From c14359df74322829ebae268316b2efd74800d100 Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Wed, 4 Oct 2023 09:12:28 +0100 Subject: [PATCH] Remove o-error update script (#15070) GitOrigin-RevId: dfa0c582453eadc6834056a7c8a0832ddb878b70 --- libraries/o-error/doc/update-readme.js | 49 - libraries/o-error/package.json | 2 - package-lock.json | 2003 -------------- services/track-changes/.eslintignore | 1 - .../track-changes/.github/ISSUE_TEMPLATE.md | 38 - .../.github/PULL_REQUEST_TEMPLATE.md | 48 - services/track-changes/.gitignore | 7 - services/track-changes/.mocharc.json | 3 - services/track-changes/.nvmrc | 1 - services/track-changes/Dockerfile | 27 - services/track-changes/LICENSE | 662 ----- services/track-changes/Makefile | 110 - services/track-changes/README.md | 20 - services/track-changes/app.js | 164 -- .../track-changes/app/js/DiffGenerator.js | 343 --- services/track-changes/app/js/DiffManager.js | 184 -- services/track-changes/app/js/DocIterator.js | 42 - .../track-changes/app/js/DocstoreManager.js | 52 - .../app/js/DocumentUpdaterManager.js | 128 - services/track-changes/app/js/Errors.js | 5 - .../track-changes/app/js/HealthChecker.js | 84 - .../track-changes/app/js/HttpController.js | 364 --- services/track-changes/app/js/LockManager.js | 161 -- services/track-changes/app/js/MongoAWS.js | 197 -- services/track-changes/app/js/MongoManager.js | 200 -- services/track-changes/app/js/PackManager.js | 1202 --------- services/track-changes/app/js/PackWorker.js | 211 -- .../track-changes/app/js/ProjectIterator.js | 113 - services/track-changes/app/js/RedisManager.js | 169 -- .../track-changes/app/js/RestoreManager.js | 46 - .../track-changes/app/js/UpdateCompressor.js | 326 --- .../track-changes/app/js/UpdateTrimmer.js | 66 - .../track-changes/app/js/UpdatesManager.js | 883 ------- .../track-changes/app/js/WebApiManager.js | 112 - services/track-changes/app/js/ZipManager.js | 203 -- services/track-changes/app/js/mongodb.js | 42 - .../track-changes/app/js/util/PackUtils.js | 18 - .../track-changes/app/lib/diff_match_patch.js | 2340 ----------------- services/track-changes/buildscript.txt | 9 - .../track-changes/config/settings.defaults.js | 90 - services/track-changes/docker-compose.ci.yml | 74 - services/track-changes/docker-compose.yml | 77 - services/track-changes/pack.sh | 24 - services/track-changes/package.json | 54 - services/track-changes/scripts/flush_all.js | 27 - .../test/acceptance/deps/Dockerfile.s3mock | 4 - .../test/acceptance/deps/healthcheck.sh | 9 - .../acceptance/js/AppendingUpdatesTests.js | 587 ----- .../acceptance/js/ArchivingUpdatesTests.js | 319 --- .../test/acceptance/js/ExportProjectTests.js | 34 - .../acceptance/js/FlushingUpdatesTests.js | 277 -- .../test/acceptance/js/GettingADiffTests.js | 127 - .../test/acceptance/js/GettingUpdatesTests.js | 185 -- .../test/acceptance/js/LockManagerTests.js | 64 - .../test/acceptance/js/RestoringVersions.js | 116 - .../acceptance/js/helpers/MockDocStoreApi.js | 50 - .../js/helpers/MockDocUpdaterApi.js | 87 - .../test/acceptance/js/helpers/MockWebApi.js | 72 - .../acceptance/js/helpers/TrackChangesApp.js | 64 - .../js/helpers/TrackChangesClient.js | 302 --- services/track-changes/test/setup.js | 21 - .../js/DiffGenerator/DiffGeneratorTests.js | 456 ---- .../unit/js/DiffManager/DiffManagerTests.js | 443 ---- .../test/unit/js/DocArchive/MongoAWS.js | 110 - .../DocumentUpdaterManagerTests.js | 218 -- .../js/HttpController/HttpControllerTests.js | 200 -- .../unit/js/LockManager/LockManagerTests.js | 315 --- .../unit/js/MongoManager/MongoManagerTests.js | 239 -- .../unit/js/PackManager/PackManagerTests.js | 704 ----- .../unit/js/RedisManager/RedisManagerTests.js | 163 -- .../js/RestoreManager/RestoreManagerTests.js | 64 - .../UpdateCompressor/UpdateCompressorTests.js | 848 ------ .../js/UpdateTrimmer/UpdateTrimmerTests.js | 182 -- .../js/UpdatesManager/UpdatesManagerTests.js | 1333 ---------- .../test/unit/js/Util/PackUtilsTests.js | 55 - .../js/WebApiManager/WebApiManagerTests.js | 208 -- 76 files changed, 18837 deletions(-) delete mode 100755 libraries/o-error/doc/update-readme.js delete mode 100644 services/track-changes/.eslintignore delete mode 100644 services/track-changes/.github/ISSUE_TEMPLATE.md delete mode 100644 services/track-changes/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 services/track-changes/.gitignore delete mode 100644 services/track-changes/.mocharc.json delete mode 100644 services/track-changes/.nvmrc delete mode 100644 services/track-changes/Dockerfile delete mode 100644 services/track-changes/LICENSE delete mode 100644 services/track-changes/Makefile delete mode 100644 services/track-changes/README.md delete mode 100644 services/track-changes/app.js delete mode 100644 services/track-changes/app/js/DiffGenerator.js delete mode 100644 services/track-changes/app/js/DiffManager.js delete mode 100644 services/track-changes/app/js/DocIterator.js delete mode 100644 services/track-changes/app/js/DocstoreManager.js delete mode 100644 services/track-changes/app/js/DocumentUpdaterManager.js delete mode 100644 services/track-changes/app/js/Errors.js delete mode 100644 services/track-changes/app/js/HealthChecker.js delete mode 100644 services/track-changes/app/js/HttpController.js delete mode 100644 services/track-changes/app/js/LockManager.js delete mode 100644 services/track-changes/app/js/MongoAWS.js delete mode 100644 services/track-changes/app/js/MongoManager.js delete mode 100644 services/track-changes/app/js/PackManager.js delete mode 100644 services/track-changes/app/js/PackWorker.js delete mode 100644 services/track-changes/app/js/ProjectIterator.js delete mode 100644 services/track-changes/app/js/RedisManager.js delete mode 100644 services/track-changes/app/js/RestoreManager.js delete mode 100644 services/track-changes/app/js/UpdateCompressor.js delete mode 100644 services/track-changes/app/js/UpdateTrimmer.js delete mode 100644 services/track-changes/app/js/UpdatesManager.js delete mode 100644 services/track-changes/app/js/WebApiManager.js delete mode 100644 services/track-changes/app/js/ZipManager.js delete mode 100644 services/track-changes/app/js/mongodb.js delete mode 100644 services/track-changes/app/js/util/PackUtils.js delete mode 100644 services/track-changes/app/lib/diff_match_patch.js delete mode 100644 services/track-changes/buildscript.txt delete mode 100755 services/track-changes/config/settings.defaults.js delete mode 100644 services/track-changes/docker-compose.ci.yml delete mode 100644 services/track-changes/docker-compose.yml delete mode 100755 services/track-changes/pack.sh delete mode 100644 services/track-changes/package.json delete mode 100644 services/track-changes/scripts/flush_all.js delete mode 100644 services/track-changes/test/acceptance/deps/Dockerfile.s3mock delete mode 100644 services/track-changes/test/acceptance/deps/healthcheck.sh delete mode 100644 services/track-changes/test/acceptance/js/AppendingUpdatesTests.js delete mode 100644 services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js delete mode 100644 services/track-changes/test/acceptance/js/ExportProjectTests.js delete mode 100644 services/track-changes/test/acceptance/js/FlushingUpdatesTests.js delete mode 100644 services/track-changes/test/acceptance/js/GettingADiffTests.js delete mode 100644 services/track-changes/test/acceptance/js/GettingUpdatesTests.js delete mode 100644 services/track-changes/test/acceptance/js/LockManagerTests.js delete mode 100644 services/track-changes/test/acceptance/js/RestoringVersions.js delete mode 100644 services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js delete mode 100644 services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js delete mode 100644 services/track-changes/test/acceptance/js/helpers/MockWebApi.js delete mode 100644 services/track-changes/test/acceptance/js/helpers/TrackChangesApp.js delete mode 100644 services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js delete mode 100644 services/track-changes/test/setup.js delete mode 100644 services/track-changes/test/unit/js/DiffGenerator/DiffGeneratorTests.js delete mode 100644 services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js delete mode 100644 services/track-changes/test/unit/js/DocArchive/MongoAWS.js delete mode 100644 services/track-changes/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js delete mode 100644 services/track-changes/test/unit/js/HttpController/HttpControllerTests.js delete mode 100644 services/track-changes/test/unit/js/LockManager/LockManagerTests.js delete mode 100644 services/track-changes/test/unit/js/MongoManager/MongoManagerTests.js delete mode 100644 services/track-changes/test/unit/js/PackManager/PackManagerTests.js delete mode 100644 services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js delete mode 100644 services/track-changes/test/unit/js/RestoreManager/RestoreManagerTests.js delete mode 100644 services/track-changes/test/unit/js/UpdateCompressor/UpdateCompressorTests.js delete mode 100644 services/track-changes/test/unit/js/UpdateTrimmer/UpdateTrimmerTests.js delete mode 100644 services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js delete mode 100644 services/track-changes/test/unit/js/Util/PackUtilsTests.js delete mode 100644 services/track-changes/test/unit/js/WebApiManager/WebApiManagerTests.js diff --git a/libraries/o-error/doc/update-readme.js b/libraries/o-error/doc/update-readme.js deleted file mode 100755 index 61a7d90f3c..0000000000 --- a/libraries/o-error/doc/update-readme.js +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env node - -const fs = require('fs') -const jsdoc2md = require('jsdoc-to-markdown') -const toc = require('markdown-toc') - -const README = 'README.md' -const HEADER = '## OError API Reference' -const FOOTER = '' - -async function main() { - const apiDocs = await jsdoc2md.render({ files: 'index.js' }) - const apiDocLines = apiDocs.trim().split(/\r?\n/g) - - // The first few lines don't make much sense when included in the README. - const apiDocStart = apiDocLines.indexOf('* [OError](#OError)') - if (apiDocStart === -1) { - console.error('API docs not in expected format for insertion.') - process.exit(1) - } - apiDocLines.splice(1, apiDocStart - 1) - apiDocLines.unshift(HEADER, '') - - const readme = await fs.promises.readFile(README, { encoding: 'utf8' }) - const readmeLines = readme.split(/\r?\n/g) - - const apiStart = readmeLines.indexOf(HEADER) - const apiEnd = readmeLines.indexOf(FOOTER) - - if (apiStart === -1 || apiEnd === -1) { - console.error('Could not find the API Reference section.') - process.exit(1) - } - - Array.prototype.splice.apply( - readmeLines, - [apiStart, apiEnd - apiStart].concat(apiDocLines) - ) - - const readmeWithApi = readmeLines.join('\n') - - let readmeWithApiAndToc = toc.insert(readmeWithApi) - - // Unfortunately, the ⇒ breaks the generated TOC links. - readmeWithApiAndToc = readmeWithApiAndToc.replace(/-%E2%87%92-/g, '--') - - await fs.promises.writeFile(README, readmeWithApiAndToc) -} -main() diff --git a/libraries/o-error/package.json b/libraries/o-error/package.json index a676fd9cba..ceea6a3495 100644 --- a/libraries/o-error/package.json +++ b/libraries/o-error/package.json @@ -39,8 +39,6 @@ "@types/chai": "^4.3.0", "@types/node": "^18.17.4", "chai": "^4.3.6", - "jsdoc-to-markdown": "^7.1.0", - "markdown-toc": "^1.2.0", "mocha": "^10.2.0", "nyc": "^15.1.0", "typescript": "^5.0.4" diff --git a/package-lock.json b/package-lock.json index a37a118947..94d6c3ea8a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -323,8 +323,6 @@ "@types/chai": "^4.3.0", "@types/node": "^18.17.4", "chai": "^4.3.6", - "jsdoc-to-markdown": "^7.1.0", - "markdown-toc": "^1.2.0", "mocha": "^10.2.0", "nyc": "^15.1.0", "typescript": "^5.0.4" @@ -19087,27 +19085,6 @@ "node": ">=6" } }, - "node_modules/ansi-escape-sequences": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-escape-sequences/-/ansi-escape-sequences-4.1.0.tgz", - "integrity": "sha512-dzW9kHxH011uBsidTXd14JXgzye/YLb2LzeKZ4bsgl/Knwx8AtbSFkkGxagdNOoh0DlqHCmfiEjWKBaqjOanVw==", - "dev": true, - "dependencies": { - "array-back": "^3.0.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/ansi-escape-sequences/node_modules/array-back": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", - "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/ansi-escapes": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", @@ -19146,18 +19123,6 @@ "ansi-html": "bin/ansi-html" } }, - "node_modules/ansi-red": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ansi-red/-/ansi-red-0.1.1.tgz", - "integrity": "sha1-jGOPnRCAgAo1PJwoyKgcpHBdlGw=", - "dev": true, - "dependencies": { - "ansi-wrap": "0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -19190,15 +19155,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" }, - "node_modules/ansi-wrap": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz", - "integrity": "sha1-qCJQ3bABXponyoLoLqYDu/pF768=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/anymatch": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", @@ -19363,15 +19319,6 @@ "deep-equal": "^2.0.5" } }, - "node_modules/array-back": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-6.2.2.tgz", - "integrity": "sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw==", - "dev": true, - "engines": { - "node": ">=12.17" - } - }, "node_modules/array-buffer-byte-length": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", @@ -19609,15 +19556,6 @@ "resolved": "https://registry.npmjs.org/auth0-id-generator/-/auth0-id-generator-0.2.0.tgz", "integrity": "sha512-sJVZrGls/XB7TEsAovv6GsGwsjDBhBy014w+9x/DNZH8OTV8F/uioMmT68ADWtfbvfkJaNCYNjRs1dOVFyNqbQ==" }, - "node_modules/autolinker": { - "version": "0.28.1", - "resolved": "https://registry.npmjs.org/autolinker/-/autolinker-0.28.1.tgz", - "integrity": "sha1-BlK0kYgYefB3XazgzcoyM5QqTkc=", - "dev": true, - "dependencies": { - "gulp-header": "^1.7.1" - } - }, "node_modules/autoprefixer": { "version": "9.8.8", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.8.8.tgz", @@ -20961,29 +20899,6 @@ "object-hash": "^2.2.0" } }, - "node_modules/cache-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/cache-point/-/cache-point-2.0.0.tgz", - "integrity": "sha512-4gkeHlFpSKgm3vm2gJN5sPqfmijYRFYCQ6tv5cLw0xVmT6r1z1vd4FNnpuOREco3cBs1G709sZ72LdgddKvL5w==", - "dev": true, - "dependencies": { - "array-back": "^4.0.1", - "fs-then-native": "^2.0.0", - "mkdirp2": "^1.0.4" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cache-point/node_modules/array-back": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz", - "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/cachedir": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", @@ -21639,33 +21554,6 @@ "node": ">=0.12.0" } }, - "node_modules/coffee-script": { - "version": "1.12.7", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz", - "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", - "deprecated": "CoffeeScript on NPM has moved to \"coffeescript\" (no hyphen)", - "dev": true, - "bin": { - "cake": "bin/cake", - "coffee": "bin/coffee" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/collect-all": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/collect-all/-/collect-all-1.0.4.tgz", - "integrity": "sha512-RKZhRwJtJEP5FWul+gkSMEnaK6H3AGPTTWOiRimCcs+rc/OmQE3Yhy1Q7A7KsdkG3ZXVdZq68Y6ONSdvkeEcKA==", - "dev": true, - "dependencies": { - "stream-connect": "^1.0.2", - "stream-via": "^1.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -21721,94 +21609,6 @@ "node": ">= 0.8" } }, - "node_modules/command-line-args": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz", - "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", - "dev": true, - "dependencies": { - "array-back": "^3.1.0", - "find-replace": "^3.0.0", - "lodash.camelcase": "^4.3.0", - "typical": "^4.0.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/command-line-args/node_modules/array-back": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", - "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/command-line-args/node_modules/typical": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz", - "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/command-line-tool": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/command-line-tool/-/command-line-tool-0.8.0.tgz", - "integrity": "sha512-Xw18HVx/QzQV3Sc5k1vy3kgtOeGmsKIqwtFFoyjI4bbcpSgnw2CWVULvtakyw4s6fhyAdI6soQQhXc2OzJy62g==", - "dev": true, - "dependencies": { - "ansi-escape-sequences": "^4.0.0", - "array-back": "^2.0.0", - "command-line-args": "^5.0.0", - "command-line-usage": "^4.1.0", - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/command-line-tool/node_modules/array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "dependencies": { - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/command-line-usage": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-4.1.0.tgz", - "integrity": "sha512-MxS8Ad995KpdAC0Jopo/ovGIroV/m0KHwzKfXxKag6FHOkGsH8/lv5yjgablcRxCJJC0oJeUMuO/gmaq+Wq46g==", - "dev": true, - "dependencies": { - "ansi-escape-sequences": "^4.0.0", - "array-back": "^2.0.0", - "table-layout": "^0.4.2", - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/command-line-usage/node_modules/array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "dependencies": { - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/commander": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", @@ -21822,15 +21622,6 @@ "resolved": "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz", "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==" }, - "node_modules/common-sequence": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/common-sequence/-/common-sequence-2.0.2.tgz", - "integrity": "sha512-jAg09gkdkrDO9EWTdXfv80WWH3yeZl5oT69fGfedBNS9pXUKYInVJ1bJ+/ht2+Moeei48TmSbQDYMc8EOx9G0g==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/common-tags": { "version": "1.8.2", "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", @@ -21977,15 +21768,6 @@ "safe-buffer": "~5.1.0" } }, - "node_modules/concat-with-sourcemaps": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz", - "integrity": "sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg==", - "dev": true, - "dependencies": { - "source-map": "^0.6.1" - } - }, "node_modules/config": { "version": "1.31.0", "resolved": "https://registry.npmjs.org/config/-/config-1.31.0.tgz", @@ -22007,24 +21789,6 @@ "proto-list": "~1.2.1" } }, - "node_modules/config-master": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/config-master/-/config-master-3.1.0.tgz", - "integrity": "sha1-ZnZjWQUFooO/JqSE1oSJ10xUhdo=", - "dev": true, - "dependencies": { - "walk-back": "^2.0.1" - } - }, - "node_modules/config-master/node_modules/walk-back": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/walk-back/-/walk-back-2.0.1.tgz", - "integrity": "sha1-VU4qnYdPrEeoywBr9EwvDEmYoKQ=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/config/node_modules/json5": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", @@ -24420,15 +24184,6 @@ "integrity": "sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw=", "dev": true }, - "node_modules/diacritics-map": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/diacritics-map/-/diacritics-map-0.1.0.tgz", - "integrity": "sha1-bfwP+dAQAKLt8oZTccrDFulJd68=", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/dicer": { "version": "0.2.5", "resolved": "https://registry.npmjs.org/dicer/-/dicer-0.2.5.tgz", @@ -24510,29 +24265,6 @@ "integrity": "sha512-UaachK0eL7neLyL2emXptVGyggGJKowJd24rqCZi9N2CDxuCQTk7wdePTgS7py4HMh+qxUI6zzTVinVwCDTbIA==", "dev": true }, - "node_modules/dmd": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/dmd/-/dmd-6.1.0.tgz", - "integrity": "sha512-0zQIJ873gay1scCTFZvHPWM9mVJBnaylB2NQDI8O9u8O32m00Jb6uxDKexZm8hjTRM7RiWe0FJ32pExHoXdwoQ==", - "dev": true, - "dependencies": { - "array-back": "^6.2.2", - "cache-point": "^2.0.0", - "common-sequence": "^2.0.2", - "file-set": "^4.0.2", - "handlebars": "^4.7.7", - "marked": "^4.0.12", - "object-get": "^2.1.1", - "reduce-flatten": "^3.0.1", - "reduce-unique": "^2.0.1", - "reduce-without": "^1.0.1", - "test-value": "^3.0.0", - "walk-back": "^5.1.0" - }, - "engines": { - "node": ">=12" - } - }, "node_modules/dnd-core": { "version": "11.1.3", "resolved": "https://registry.npmjs.org/dnd-core/-/dnd-core-11.1.3.tgz", @@ -26629,52 +26361,6 @@ "node": ">= 0.8.0" } }, - "node_modules/expand-range": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/expand-range/-/expand-range-1.8.2.tgz", - "integrity": "sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc=", - "dev": true, - "dependencies": { - "fill-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-range/node_modules/fill-range": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-2.2.4.tgz", - "integrity": "sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q==", - "dev": true, - "dependencies": { - "is-number": "^2.1.0", - "isobject": "^2.0.0", - "randomatic": "^3.0.0", - "repeat-element": "^1.1.2", - "repeat-string": "^1.5.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-range/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "node_modules/expand-range/node_modules/isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dev": true, - "dependencies": { - "isarray": "1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/expose-loader": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/expose-loader/-/expose-loader-4.1.0.tgz", @@ -27022,18 +26708,6 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, - "node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/extract-zip": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", @@ -27324,28 +26998,6 @@ "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/file-set": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/file-set/-/file-set-4.0.2.tgz", - "integrity": "sha512-fuxEgzk4L8waGXaAkd8cMr73Pm0FxOVkn8hztzUW7BAHhOGH90viQNXbiOsnecCWmfInqU6YmAMwxRMdKETceQ==", - "dev": true, - "dependencies": { - "array-back": "^5.0.0", - "glob": "^7.1.6" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/file-set/node_modules/array-back": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-5.0.0.tgz", - "integrity": "sha512-kgVWwJReZWmVuWOQKEOohXKJX+nD02JAZ54D1RRWlv8L0NebauKAaFxACKzB74RTclt1+WNz5KHaLRDAPZbDEw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, "node_modules/file-system-cache": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/file-system-cache/-/file-system-cache-2.3.0.tgz", @@ -27537,27 +27189,6 @@ "node": ">=6" } }, - "node_modules/find-replace": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz", - "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==", - "dev": true, - "dependencies": { - "array-back": "^3.0.1" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/find-replace/node_modules/array-back": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", - "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -27682,15 +27313,6 @@ "is-callable": "^1.1.3" } }, - "node_modules/for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/foreach": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", @@ -27998,15 +27620,6 @@ "integrity": "sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA==", "dev": true }, - "node_modules/fs-then-native": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fs-then-native/-/fs-then-native-2.0.0.tgz", - "integrity": "sha1-GaEk2U2QwiyOBF8ujdbr6jbUjGc=", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -29095,44 +28708,6 @@ "lodash": "^4.17.15" } }, - "node_modules/gray-matter": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-2.1.1.tgz", - "integrity": "sha1-MELZrewqHe1qdwep7SOA+KF6Qw4=", - "dev": true, - "dependencies": { - "ansi-red": "^0.1.1", - "coffee-script": "^1.12.4", - "extend-shallow": "^2.0.1", - "js-yaml": "^3.8.1", - "toml": "^2.3.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gray-matter/node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/gray-matter/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dev": true, - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, "node_modules/gtoken": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.3.1.tgz", @@ -29161,58 +28736,6 @@ "node": ">=10" } }, - "node_modules/gulp-header": { - "version": "1.8.12", - "resolved": "https://registry.npmjs.org/gulp-header/-/gulp-header-1.8.12.tgz", - "integrity": "sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ==", - "deprecated": "Removed event-stream from gulp-header", - "dev": true, - "dependencies": { - "concat-with-sourcemaps": "*", - "lodash.template": "^4.4.0", - "through2": "^2.0.0" - } - }, - "node_modules/gulp-header/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "node_modules/gulp-header/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/gulp-header/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/gulp-header/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, "node_modules/gulp-sort": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/gulp-sort/-/gulp-sort-2.0.0.tgz", @@ -30586,15 +30109,6 @@ "object-assign": "^4.1.1" } }, - "node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -30729,18 +30243,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-number": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-2.1.0.tgz", - "integrity": "sha1-Afy7s5NGOlSPL0ZszhbezknbkI8=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-number-object": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz", @@ -30755,18 +30257,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", @@ -31950,64 +31440,6 @@ "node": ">=12.0.0" } }, - "node_modules/jsdoc-api": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/jsdoc-api/-/jsdoc-api-7.2.0.tgz", - "integrity": "sha512-93YDnlm/OYTlLOFeNs4qAv0RBCJ0kGj67xQaWy8wrbk97Rw1EySitoOTHsTHXPEs3uyx2IStPKGrbE7LTnZXbA==", - "dev": true, - "dependencies": { - "array-back": "^6.2.2", - "cache-point": "^2.0.0", - "collect-all": "^1.0.4", - "file-set": "^4.0.2", - "fs-then-native": "^2.0.0", - "jsdoc": "^4.0.0", - "object-to-spawn-args": "^2.0.1", - "temp-path": "^1.0.0", - "walk-back": "^5.1.0" - }, - "engines": { - "node": ">=12.17" - } - }, - "node_modules/jsdoc-parse": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsdoc-parse/-/jsdoc-parse-6.1.0.tgz", - "integrity": "sha512-n/hDGQJa69IBun1yZAjqzV4gVR41+flZ3bIlm9fKvNe2Xjsd1/+zCo2+R9ls8LxtePgIWbpA1jU7xkB2lRdLLg==", - "dev": true, - "dependencies": { - "array-back": "^6.2.2", - "lodash.omit": "^4.5.0", - "lodash.pick": "^4.4.0", - "reduce-extract": "^1.0.0", - "sort-array": "^4.1.4", - "test-value": "^3.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/jsdoc-to-markdown": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/jsdoc-to-markdown/-/jsdoc-to-markdown-7.1.1.tgz", - "integrity": "sha512-CI86d63xAVNO+ENumWwmJ034lYe5iGU5GwjtTA11EuphP9tpnoi4hrKgR/J8uME0D+o4KUpVfwX1fjZhc8dEtg==", - "dev": true, - "dependencies": { - "array-back": "^6.2.2", - "command-line-tool": "^0.8.0", - "config-master": "^3.1.0", - "dmd": "^6.1.0", - "jsdoc-api": "^7.1.1", - "jsdoc-parse": "^6.1.0", - "walk-back": "^5.1.0" - }, - "bin": { - "jsdoc2md": "bin/cli.js" - }, - "engines": { - "node": ">=12.17" - } - }, "node_modules/jsdoc/node_modules/escape-string-regexp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", @@ -32800,18 +32232,6 @@ "node": "> 0.8" } }, - "node_modules/lazy-cache": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-2.0.2.tgz", - "integrity": "sha1-uRkKT5EzVGlIQIWfio9whNiCImQ=", - "dev": true, - "dependencies": { - "set-getter": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/lazy-universal-dotenv": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/lazy-universal-dotenv/-/lazy-universal-dotenv-4.0.0.tgz", @@ -33075,21 +32495,6 @@ "uc.micro": "^1.0.1" } }, - "node_modules/list-item": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/list-item/-/list-item-1.1.1.tgz", - "integrity": "sha1-DGXQDih8tmPMs8s4Sad+iewmilY=", - "dev": true, - "dependencies": { - "expand-range": "^1.8.1", - "extend-shallow": "^2.0.1", - "is-number": "^2.1.0", - "repeat-string": "^1.5.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/listr2": { "version": "3.14.0", "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", @@ -33282,12 +32687,6 @@ "resolved": "https://registry.npmjs.org/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz", "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" }, - "node_modules/lodash._reinterpolate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz", - "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=", - "dev": true - }, "node_modules/lodash._releasearray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._releasearray/-/lodash._releasearray-2.4.1.tgz", @@ -33486,29 +32885,11 @@ "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-2.4.1.tgz", "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" }, - "node_modules/lodash.omit": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.omit/-/lodash.omit-4.5.0.tgz", - "integrity": "sha1-brGa5aHuHdnfC5aeZs4Lf6MLXmA=", - "dev": true - }, "node_modules/lodash.once": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" }, - "node_modules/lodash.padend": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/lodash.padend/-/lodash.padend-4.6.1.tgz", - "integrity": "sha1-U8y6BH0G4VjTEfRdpiX05J5vFm4=", - "dev": true - }, - "node_modules/lodash.pick": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz", - "integrity": "sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM=", - "dev": true - }, "node_modules/lodash.set": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", @@ -33528,25 +32909,6 @@ "lodash._isnative": "~2.4.1" } }, - "node_modules/lodash.template": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", - "integrity": "sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==", - "dev": true, - "dependencies": { - "lodash._reinterpolate": "^3.0.0", - "lodash.templatesettings": "^4.0.0" - } - }, - "node_modules/lodash.templatesettings": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz", - "integrity": "sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==", - "dev": true, - "dependencies": { - "lodash._reinterpolate": "^3.0.0" - } - }, "node_modules/lodash.throttle": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz", @@ -33975,15 +33337,6 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/markdown-link": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/markdown-link/-/markdown-link-0.1.1.tgz", - "integrity": "sha1-MsXGUZmmRXMWMi0eQinRNAfIx88=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/markdown-to-jsx": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-7.2.1.tgz", @@ -33995,32 +33348,6 @@ "react": ">= 0.14.0" } }, - "node_modules/markdown-toc": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/markdown-toc/-/markdown-toc-1.2.0.tgz", - "integrity": "sha512-eOsq7EGd3asV0oBfmyqngeEIhrbkc7XVP63OwcJBIhH2EpG2PzFcbZdhy1jutXSlRBBVMNXHvMtSr5LAxSUvUg==", - "dev": true, - "dependencies": { - "concat-stream": "^1.5.2", - "diacritics-map": "^0.1.0", - "gray-matter": "^2.1.0", - "lazy-cache": "^2.0.2", - "list-item": "^1.1.1", - "markdown-link": "^0.1.1", - "minimist": "^1.2.0", - "mixin-deep": "^1.1.3", - "object.pick": "^1.2.0", - "remarkable": "^1.7.1", - "repeat-string": "^1.6.1", - "strip-color": "^0.1.0" - }, - "bin": { - "markdown-toc": "cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/marked": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/marked/-/marked-4.1.0.tgz", @@ -34057,12 +33384,6 @@ "node": ">=0.10.0" } }, - "node_modules/math-random": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/math-random/-/math-random-1.0.4.tgz", - "integrity": "sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A==", - "dev": true - }, "node_modules/mathjax": { "version": "2.7.9", "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-2.7.9.tgz", @@ -34459,31 +33780,6 @@ "resolved": "jobs/mirror-documentation", "link": true }, - "node_modules/mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "dev": true, - "dependencies": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/mixin-deep/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/mixme": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/mixme/-/mixme-0.5.4.tgz", @@ -34519,12 +33815,6 @@ "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, - "node_modules/mkdirp2": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/mkdirp2/-/mkdirp2-1.0.5.tgz", - "integrity": "sha512-xOE9xbICroUDmG1ye2h4bZ8WBie9EGmACaco8K8cx6RlkJJrxGIqjGqztAI+NMhexXBcdGbSEzI6N3EJPevxZw==", - "dev": true - }, "node_modules/mmdb-lib": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/mmdb-lib/-/mmdb-lib-2.0.2.tgz", @@ -35678,12 +34968,6 @@ "node": ">=0.10.0" } }, - "node_modules/object-get": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object-get/-/object-get-2.1.1.tgz", - "integrity": "sha512-7n4IpLMzGGcLEMiQKsNR7vCe+N5E9LORFrtNUVy4sO3dj9a3HedZCxEL2T7QuLhcHN1NBuBsMOKaOsAYI9IIvg==", - "dev": true - }, "node_modules/object-hash": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", @@ -35723,15 +35007,6 @@ "node": ">= 0.4" } }, - "node_modules/object-to-spawn-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/object-to-spawn-args/-/object-to-spawn-args-2.0.1.tgz", - "integrity": "sha512-6FuKFQ39cOID+BMZ3QaphcC8Y4cw6LXBLyIgPU+OhIYwviJamPAn+4mITapnSBQrejB+NNp+FMskhD8Cq+Ys3w==", - "dev": true, - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/object.assign": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", @@ -35809,18 +35084,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object.values": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", @@ -38265,29 +37528,6 @@ "node": ">= 0.8" } }, - "node_modules/randomatic": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz", - "integrity": "sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw==", - "dev": true, - "dependencies": { - "is-number": "^4.0.0", - "kind-of": "^6.0.0", - "math-random": "^1.0.1" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/randomatic/node_modules/is-number": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz", - "integrity": "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/randombytes": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.0.3.tgz", @@ -39016,98 +38256,6 @@ "object-keys": "^1.1.0" } }, - "node_modules/reduce-extract": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/reduce-extract/-/reduce-extract-1.0.0.tgz", - "integrity": "sha1-Z/I4W+2mUGG19fQxJmLosIDKFSU=", - "dev": true, - "dependencies": { - "test-value": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/reduce-extract/node_modules/array-back": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", - "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", - "dev": true, - "dependencies": { - "typical": "^2.6.0" - }, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/reduce-extract/node_modules/test-value": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/test-value/-/test-value-1.1.0.tgz", - "integrity": "sha1-oJE29y7AQ9J8iTcHwrFZv6196T8=", - "dev": true, - "dependencies": { - "array-back": "^1.0.2", - "typical": "^2.4.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/reduce-flatten": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-3.0.1.tgz", - "integrity": "sha512-bYo+97BmUUOzg09XwfkwALt4PQH1M5L0wzKerBt6WLm3Fhdd43mMS89HiT1B9pJIqko/6lWx3OnV4J9f2Kqp5Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/reduce-unique": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/reduce-unique/-/reduce-unique-2.0.1.tgz", - "integrity": "sha512-x4jH/8L1eyZGR785WY+ePtyMNhycl1N2XOLxhCbzZFaqF4AXjLzqSxa2UHgJ2ZVR/HHyPOvl1L7xRnW8ye5MdA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/reduce-without": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/reduce-without/-/reduce-without-1.0.1.tgz", - "integrity": "sha1-aK0OrRGFXJo31OglbBW7+Hly/Iw=", - "dev": true, - "dependencies": { - "test-value": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/reduce-without/node_modules/array-back": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", - "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", - "dev": true, - "dependencies": { - "typical": "^2.6.0" - }, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/reduce-without/node_modules/test-value": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/test-value/-/test-value-2.1.0.tgz", - "integrity": "sha1-Edpv9nDzRxpztiXKTz/c97t0gpE=", - "dev": true, - "dependencies": { - "array-back": "^1.0.3", - "typical": "^2.6.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/redux": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.2.tgz", @@ -39280,31 +38428,6 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/remarkable": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/remarkable/-/remarkable-1.7.4.tgz", - "integrity": "sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg==", - "dev": true, - "dependencies": { - "argparse": "^1.0.10", - "autolinker": "~0.28.0" - }, - "bin": { - "remarkable": "bin/remarkable.js" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/remarkable/node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, "node_modules/remove-accents": { "version": "0.4.2", "resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.4.2.tgz", @@ -39329,24 +38452,6 @@ "strip-ansi": "^6.0.1" } }, - "node_modules/repeat-element": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz", - "integrity": "sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, "node_modules/replace-ext": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-2.0.0.tgz", @@ -40444,18 +39549,6 @@ "integrity": "sha512-edRH8mBKEWNVIVMKejNnuJxleqYE/ZSdcT8/Nem9/mmosx12pctd80s2Oy00KNZzrogMZS5mauK2/ymL1bvlvg==", "dev": true }, - "node_modules/set-getter": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/set-getter/-/set-getter-0.1.1.tgz", - "integrity": "sha512-9sVWOy+gthr+0G9DzqqLaYNA7+5OKkSmcqjL9cBpDEaZrr3ShQlyX2cZ/O/ozE41oxn/Tt0LGEM/w4Rub3A3gw==", - "dev": true, - "dependencies": { - "to-object-path": "^0.3.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -40931,37 +40024,6 @@ "npm": ">= 3.0.0" } }, - "node_modules/sort-array": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/sort-array/-/sort-array-4.1.4.tgz", - "integrity": "sha512-GVFN6Y1sHKrWaSYOJTk9093ZnrBMc9sP3nuhANU44S4xg3rE6W5Z5WyamuT8VpMBbssnetx5faKCua0LEmUnSw==", - "dev": true, - "dependencies": { - "array-back": "^5.0.0", - "typical": "^6.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/sort-array/node_modules/array-back": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-5.0.0.tgz", - "integrity": "sha512-kgVWwJReZWmVuWOQKEOohXKJX+nD02JAZ54D1RRWlv8L0NebauKAaFxACKzB74RTclt1+WNz5KHaLRDAPZbDEw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/sort-array/node_modules/typical": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/typical/-/typical-6.0.1.tgz", - "integrity": "sha512-+g3NEp7fJLe9DPa1TArHm9QAA7YciZmWnfAqEaFrBihQ7epOv9i99rjtgb6Iz0wh3WuQDjsCTDfgRoGnmHN81A==", - "dev": true, - "engines": { - "node": ">=10" - } - }, "node_modules/sortobject": { "version": "4.16.0", "resolved": "https://registry.npmjs.org/sortobject/-/sortobject-4.16.0.tgz", @@ -41298,30 +40360,6 @@ "streamx": "^2.13.2" } }, - "node_modules/stream-connect": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stream-connect/-/stream-connect-1.0.2.tgz", - "integrity": "sha1-GLyB8u2zW4tdmoAJIAqYUxRCipc=", - "dev": true, - "dependencies": { - "array-back": "^1.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/stream-connect/node_modules/array-back": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", - "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", - "dev": true, - "dependencies": { - "typical": "^2.6.0" - }, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/stream-events": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", @@ -41343,15 +40381,6 @@ "mixme": "^0.5.1" } }, - "node_modules/stream-via": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/stream-via/-/stream-via-1.0.4.tgz", - "integrity": "sha512-DBp0lSvX5G9KGRDTkR/R+a29H+Wk2xItOF+MpZLLNDWbEV9tGPnqLPxHEYjmiz8xGtJHRIqmI+hCjmNzqoA4nQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/streamifier": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", @@ -41545,15 +40574,6 @@ "node": ">=8" } }, - "node_modules/strip-color": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/strip-color/-/strip-color-0.1.0.tgz", - "integrity": "sha1-EG9l09PmotlAHKwOsM6LinArT3s=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", @@ -42118,34 +41138,6 @@ "resolved": "https://registry.npmjs.org/synchronous-promise/-/synchronous-promise-2.0.17.tgz", "integrity": "sha512-AsS729u2RHUfEra9xJrE39peJcc2stq2+poBXX8bcM08Y6g9j/i/PUzwNQqkaJde7Ntg1TO7bSREbR5sdosQ+g==" }, - "node_modules/table-layout": { - "version": "0.4.5", - "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-0.4.5.tgz", - "integrity": "sha512-zTvf0mcggrGeTe/2jJ6ECkJHAQPIYEwDoqsiqBjI24mvRmQbInK5jq33fyypaCBxX08hMkfmdOqj6haT33EqWw==", - "dev": true, - "dependencies": { - "array-back": "^2.0.0", - "deep-extend": "~0.6.0", - "lodash.padend": "^4.6.1", - "typical": "^2.6.1", - "wordwrapjs": "^3.0.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/table-layout/node_modules/array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "dependencies": { - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/tapable": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", @@ -42336,12 +41328,6 @@ "node": ">=8" } }, - "node_modules/temp-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/temp-path/-/temp-path-1.0.0.tgz", - "integrity": "sha1-JLFUOXOrRCiW2a02fdnL2/r+kYs=", - "dev": true - }, "node_modules/temp/node_modules/rimraf": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", @@ -42477,31 +41463,6 @@ "node": ">=8" } }, - "node_modules/test-value": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/test-value/-/test-value-3.0.0.tgz", - "integrity": "sha512-sVACdAWcZkSU9x7AOmJo5TqE+GyNJknHaHsMrR6ZnhjVlVN9Yx6FjHrsKZ3BjIpPCT68zYesPWkakrNupwfOTQ==", - "dev": true, - "dependencies": { - "array-back": "^2.0.0", - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/test-value/node_modules/array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "dependencies": { - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -42689,30 +41650,6 @@ "node": ">=4" } }, - "node_modules/to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-object-path/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -42801,12 +41738,6 @@ "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz", "integrity": "sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ=" }, - "node_modules/toml": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/toml/-/toml-2.3.6.tgz", - "integrity": "sha512-gVweAectJU3ebq//Ferr2JUY4WKSDe5N+z0FvjDncLGyHmIDoxgY/2Ie4qfEIDm4IS7OA6Rmdm7pdEEdMcV/xQ==", - "dev": true - }, "node_modules/topo": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/topo/-/topo-3.0.3.tgz", @@ -43756,15 +42687,6 @@ "node": ">=12" } }, - "node_modules/walk-back": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/walk-back/-/walk-back-5.1.0.tgz", - "integrity": "sha512-Uhxps5yZcVNbLEAnb+xaEEMdgTXl9qAQDzKYejG2AZ7qPwRQ81lozY9ECDbjLPNWm7YsO1IK5rsP1KoQzXAcGA==", - "dev": true, - "engines": { - "node": ">=12.17" - } - }, "node_modules/walkdir": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", @@ -44459,28 +43381,6 @@ "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" }, - "node_modules/wordwrapjs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-3.0.0.tgz", - "integrity": "sha512-mO8XtqyPvykVCsrwj5MlOVWvSnCdT+C+QVbm6blradR7JExAhbkZ7hZ9A+9NUtwzSqrlUo9a67ws0EiILrvRpw==", - "dev": true, - "dependencies": { - "reduce-flatten": "^1.0.1", - "typical": "^2.6.1" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/wordwrapjs/node_modules/reduce-flatten": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-1.0.1.tgz", - "integrity": "sha1-JYx479FT3fk8tWEjf2EYTzaW4yc=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/worker-loader": { "version": "3.0.8", "resolved": "https://registry.npmjs.org/worker-loader/-/worker-loader-3.0.8.tgz", @@ -54316,8 +53216,6 @@ "@types/chai": "^4.3.0", "@types/node": "^18.17.4", "chai": "^4.3.6", - "jsdoc-to-markdown": "^7.1.0", - "markdown-toc": "^1.2.0", "mocha": "^10.2.0", "nyc": "^15.1.0", "typescript": "^5.0.4" @@ -64069,23 +62967,6 @@ "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", "dev": true }, - "ansi-escape-sequences": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-escape-sequences/-/ansi-escape-sequences-4.1.0.tgz", - "integrity": "sha512-dzW9kHxH011uBsidTXd14JXgzye/YLb2LzeKZ4bsgl/Knwx8AtbSFkkGxagdNOoh0DlqHCmfiEjWKBaqjOanVw==", - "dev": true, - "requires": { - "array-back": "^3.0.1" - }, - "dependencies": { - "array-back": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", - "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", - "dev": true - } - } - }, "ansi-escapes": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", @@ -64108,15 +62989,6 @@ "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==" }, - "ansi-red": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ansi-red/-/ansi-red-0.1.1.tgz", - "integrity": "sha1-jGOPnRCAgAo1PJwoyKgcpHBdlGw=", - "dev": true, - "requires": { - "ansi-wrap": "0.1.0" - } - }, "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -64145,12 +63017,6 @@ } } }, - "ansi-wrap": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz", - "integrity": "sha1-qCJQ3bABXponyoLoLqYDu/pF768=", - "dev": true - }, "anymatch": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", @@ -64284,12 +63150,6 @@ "deep-equal": "^2.0.5" } }, - "array-back": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-6.2.2.tgz", - "integrity": "sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw==", - "dev": true - }, "array-buffer-byte-length": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", @@ -64478,15 +63338,6 @@ "resolved": "https://registry.npmjs.org/auth0-id-generator/-/auth0-id-generator-0.2.0.tgz", "integrity": "sha512-sJVZrGls/XB7TEsAovv6GsGwsjDBhBy014w+9x/DNZH8OTV8F/uioMmT68ADWtfbvfkJaNCYNjRs1dOVFyNqbQ==" }, - "autolinker": { - "version": "0.28.1", - "resolved": "https://registry.npmjs.org/autolinker/-/autolinker-0.28.1.tgz", - "integrity": "sha1-BlK0kYgYefB3XazgzcoyM5QqTkc=", - "dev": true, - "requires": { - "gulp-header": "^1.7.1" - } - }, "autoprefixer": { "version": "9.8.8", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.8.8.tgz", @@ -65533,25 +64384,6 @@ "object-hash": "^2.2.0" } }, - "cache-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/cache-point/-/cache-point-2.0.0.tgz", - "integrity": "sha512-4gkeHlFpSKgm3vm2gJN5sPqfmijYRFYCQ6tv5cLw0xVmT6r1z1vd4FNnpuOREco3cBs1G709sZ72LdgddKvL5w==", - "dev": true, - "requires": { - "array-back": "^4.0.1", - "fs-then-native": "^2.0.0", - "mkdirp2": "^1.0.4" - }, - "dependencies": { - "array-back": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz", - "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg==", - "dev": true - } - } - }, "cachedir": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", @@ -66043,22 +64875,6 @@ "resolved": "https://registry.npmjs.org/co-use/-/co-use-1.1.0.tgz", "integrity": "sha1-xrs83xDLc17Kqdru2kbXJclKTmI=" }, - "coffee-script": { - "version": "1.12.7", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz", - "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", - "dev": true - }, - "collect-all": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/collect-all/-/collect-all-1.0.4.tgz", - "integrity": "sha512-RKZhRwJtJEP5FWul+gkSMEnaK6H3AGPTTWOiRimCcs+rc/OmQE3Yhy1Q7A7KsdkG3ZXVdZq68Y6ONSdvkeEcKA==", - "dev": true, - "requires": { - "stream-connect": "^1.0.2", - "stream-via": "^1.0.4" - } - }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -66102,79 +64918,6 @@ "delayed-stream": "~1.0.0" } }, - "command-line-args": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz", - "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", - "dev": true, - "requires": { - "array-back": "^3.1.0", - "find-replace": "^3.0.0", - "lodash.camelcase": "^4.3.0", - "typical": "^4.0.0" - }, - "dependencies": { - "array-back": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", - "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", - "dev": true - }, - "typical": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz", - "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==", - "dev": true - } - } - }, - "command-line-tool": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/command-line-tool/-/command-line-tool-0.8.0.tgz", - "integrity": "sha512-Xw18HVx/QzQV3Sc5k1vy3kgtOeGmsKIqwtFFoyjI4bbcpSgnw2CWVULvtakyw4s6fhyAdI6soQQhXc2OzJy62g==", - "dev": true, - "requires": { - "ansi-escape-sequences": "^4.0.0", - "array-back": "^2.0.0", - "command-line-args": "^5.0.0", - "command-line-usage": "^4.1.0", - "typical": "^2.6.1" - }, - "dependencies": { - "array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "requires": { - "typical": "^2.6.1" - } - } - } - }, - "command-line-usage": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-4.1.0.tgz", - "integrity": "sha512-MxS8Ad995KpdAC0Jopo/ovGIroV/m0KHwzKfXxKag6FHOkGsH8/lv5yjgablcRxCJJC0oJeUMuO/gmaq+Wq46g==", - "dev": true, - "requires": { - "ansi-escape-sequences": "^4.0.0", - "array-back": "^2.0.0", - "table-layout": "^0.4.2", - "typical": "^2.6.1" - }, - "dependencies": { - "array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "requires": { - "typical": "^2.6.1" - } - } - } - }, "commander": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", @@ -66185,12 +64928,6 @@ "resolved": "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz", "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==" }, - "common-sequence": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/common-sequence/-/common-sequence-2.0.2.tgz", - "integrity": "sha512-jAg09gkdkrDO9EWTdXfv80WWH3yeZl5oT69fGfedBNS9pXUKYInVJ1bJ+/ht2+Moeei48TmSbQDYMc8EOx9G0g==", - "dev": true - }, "common-tags": { "version": "1.8.2", "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", @@ -66322,15 +65059,6 @@ } } }, - "concat-with-sourcemaps": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz", - "integrity": "sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg==", - "dev": true, - "requires": { - "source-map": "^0.6.1" - } - }, "config": { "version": "1.31.0", "resolved": "https://registry.npmjs.org/config/-/config-1.31.0.tgz", @@ -66359,23 +65087,6 @@ "proto-list": "~1.2.1" } }, - "config-master": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/config-master/-/config-master-3.1.0.tgz", - "integrity": "sha1-ZnZjWQUFooO/JqSE1oSJ10xUhdo=", - "dev": true, - "requires": { - "walk-back": "^2.0.1" - }, - "dependencies": { - "walk-back": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/walk-back/-/walk-back-2.0.1.tgz", - "integrity": "sha1-VU4qnYdPrEeoywBr9EwvDEmYoKQ=", - "dev": true - } - } - }, "connect": { "version": "3.7.0", "resolved": "https://registry.npmjs.org/connect/-/connect-3.7.0.tgz", @@ -68048,12 +66759,6 @@ "integrity": "sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw=", "dev": true }, - "diacritics-map": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/diacritics-map/-/diacritics-map-0.1.0.tgz", - "integrity": "sha1-bfwP+dAQAKLt8oZTccrDFulJd68=", - "dev": true - }, "dicer": { "version": "0.2.5", "resolved": "https://registry.npmjs.org/dicer/-/dicer-0.2.5.tgz", @@ -68126,26 +66831,6 @@ "integrity": "sha512-UaachK0eL7neLyL2emXptVGyggGJKowJd24rqCZi9N2CDxuCQTk7wdePTgS7py4HMh+qxUI6zzTVinVwCDTbIA==", "dev": true }, - "dmd": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/dmd/-/dmd-6.1.0.tgz", - "integrity": "sha512-0zQIJ873gay1scCTFZvHPWM9mVJBnaylB2NQDI8O9u8O32m00Jb6uxDKexZm8hjTRM7RiWe0FJ32pExHoXdwoQ==", - "dev": true, - "requires": { - "array-back": "^6.2.2", - "cache-point": "^2.0.0", - "common-sequence": "^2.0.2", - "file-set": "^4.0.2", - "handlebars": "^4.7.7", - "marked": "^4.0.12", - "object-get": "^2.1.1", - "reduce-flatten": "^3.0.1", - "reduce-unique": "^2.0.1", - "reduce-without": "^1.0.1", - "test-value": "^3.0.0", - "walk-back": "^5.1.0" - } - }, "dnd-core": { "version": "11.1.3", "resolved": "https://registry.npmjs.org/dnd-core/-/dnd-core-11.1.3.tgz", @@ -69731,45 +68416,6 @@ "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=" }, - "expand-range": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/expand-range/-/expand-range-1.8.2.tgz", - "integrity": "sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc=", - "dev": true, - "requires": { - "fill-range": "^2.1.0" - }, - "dependencies": { - "fill-range": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-2.2.4.tgz", - "integrity": "sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q==", - "dev": true, - "requires": { - "is-number": "^2.1.0", - "isobject": "^2.0.0", - "randomatic": "^3.0.0", - "repeat-element": "^1.1.2", - "repeat-string": "^1.5.2" - } - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dev": true, - "requires": { - "isarray": "1.0.0" - } - } - } - }, "expose-loader": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/expose-loader/-/expose-loader-4.1.0.tgz", @@ -70038,15 +68684,6 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - }, "extract-zip": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", @@ -70276,24 +68913,6 @@ "flat-cache": "^3.0.4" } }, - "file-set": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/file-set/-/file-set-4.0.2.tgz", - "integrity": "sha512-fuxEgzk4L8waGXaAkd8cMr73Pm0FxOVkn8hztzUW7BAHhOGH90viQNXbiOsnecCWmfInqU6YmAMwxRMdKETceQ==", - "dev": true, - "requires": { - "array-back": "^5.0.0", - "glob": "^7.1.6" - }, - "dependencies": { - "array-back": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-5.0.0.tgz", - "integrity": "sha512-kgVWwJReZWmVuWOQKEOohXKJX+nD02JAZ54D1RRWlv8L0NebauKAaFxACKzB74RTclt1+WNz5KHaLRDAPZbDEw==", - "dev": true - } - } - }, "file-system-cache": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/file-system-cache/-/file-system-cache-2.3.0.tgz", @@ -70454,23 +69073,6 @@ } } }, - "find-replace": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz", - "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==", - "dev": true, - "requires": { - "array-back": "^3.0.1" - }, - "dependencies": { - "array-back": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", - "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", - "dev": true - } - } - }, "find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -70558,12 +69160,6 @@ "is-callable": "^1.1.3" } }, - "for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", - "dev": true - }, "foreach": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", @@ -70792,12 +69388,6 @@ "integrity": "sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA==", "dev": true }, - "fs-then-native": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fs-then-native/-/fs-then-native-2.0.0.tgz", - "integrity": "sha1-GaEk2U2QwiyOBF8ujdbr6jbUjGc=", - "dev": true - }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -71623,40 +70213,6 @@ "lodash": "^4.17.15" } }, - "gray-matter": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-2.1.1.tgz", - "integrity": "sha1-MELZrewqHe1qdwep7SOA+KF6Qw4=", - "dev": true, - "requires": { - "ansi-red": "^0.1.1", - "coffee-script": "^1.12.4", - "extend-shallow": "^2.0.1", - "js-yaml": "^3.8.1", - "toml": "^2.3.2" - }, - "dependencies": { - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "requires": { - "sprintf-js": "~1.0.2" - } - }, - "js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dev": true, - "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - } - } - } - }, "gtoken": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.3.1.tgz", @@ -71681,59 +70237,6 @@ } } }, - "gulp-header": { - "version": "1.8.12", - "resolved": "https://registry.npmjs.org/gulp-header/-/gulp-header-1.8.12.tgz", - "integrity": "sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ==", - "dev": true, - "requires": { - "concat-with-sourcemaps": "*", - "lodash.template": "^4.4.0", - "through2": "^2.0.0" - }, - "dependencies": { - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "requires": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - } - } - }, "gulp-sort": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/gulp-sort/-/gulp-sort-2.0.0.tgz", @@ -72788,12 +71291,6 @@ "object-assign": "^4.1.1" } }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", - "dev": true - }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -72880,26 +71377,6 @@ "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" }, - "is-number": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-2.1.0.tgz", - "integrity": "sha1-Afy7s5NGOlSPL0ZszhbezknbkI8=", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "is-number-object": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz", @@ -73801,52 +72278,6 @@ } } }, - "jsdoc-api": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/jsdoc-api/-/jsdoc-api-7.2.0.tgz", - "integrity": "sha512-93YDnlm/OYTlLOFeNs4qAv0RBCJ0kGj67xQaWy8wrbk97Rw1EySitoOTHsTHXPEs3uyx2IStPKGrbE7LTnZXbA==", - "dev": true, - "requires": { - "array-back": "^6.2.2", - "cache-point": "^2.0.0", - "collect-all": "^1.0.4", - "file-set": "^4.0.2", - "fs-then-native": "^2.0.0", - "jsdoc": "^4.0.0", - "object-to-spawn-args": "^2.0.1", - "temp-path": "^1.0.0", - "walk-back": "^5.1.0" - } - }, - "jsdoc-parse": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsdoc-parse/-/jsdoc-parse-6.1.0.tgz", - "integrity": "sha512-n/hDGQJa69IBun1yZAjqzV4gVR41+flZ3bIlm9fKvNe2Xjsd1/+zCo2+R9ls8LxtePgIWbpA1jU7xkB2lRdLLg==", - "dev": true, - "requires": { - "array-back": "^6.2.2", - "lodash.omit": "^4.5.0", - "lodash.pick": "^4.4.0", - "reduce-extract": "^1.0.0", - "sort-array": "^4.1.4", - "test-value": "^3.0.0" - } - }, - "jsdoc-to-markdown": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/jsdoc-to-markdown/-/jsdoc-to-markdown-7.1.1.tgz", - "integrity": "sha512-CI86d63xAVNO+ENumWwmJ034lYe5iGU5GwjtTA11EuphP9tpnoi4hrKgR/J8uME0D+o4KUpVfwX1fjZhc8dEtg==", - "dev": true, - "requires": { - "array-back": "^6.2.2", - "command-line-tool": "^0.8.0", - "config-master": "^3.1.0", - "dmd": "^6.1.0", - "jsdoc-api": "^7.1.1", - "jsdoc-parse": "^6.1.0", - "walk-back": "^5.1.0" - } - }, "jsdom": { "version": "19.0.0", "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-19.0.0.tgz", @@ -74453,15 +72884,6 @@ "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", "dev": true }, - "lazy-cache": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-2.0.2.tgz", - "integrity": "sha1-uRkKT5EzVGlIQIWfio9whNiCImQ=", - "dev": true, - "requires": { - "set-getter": "^0.1.0" - } - }, "lazy-universal-dotenv": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/lazy-universal-dotenv/-/lazy-universal-dotenv-4.0.0.tgz", @@ -74661,18 +73083,6 @@ "uc.micro": "^1.0.1" } }, - "list-item": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/list-item/-/list-item-1.1.1.tgz", - "integrity": "sha1-DGXQDih8tmPMs8s4Sad+iewmilY=", - "dev": true, - "requires": { - "expand-range": "^1.8.1", - "extend-shallow": "^2.0.1", - "is-number": "^2.1.0", - "repeat-string": "^1.5.2" - } - }, "listr2": { "version": "3.14.0", "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", @@ -74841,12 +73251,6 @@ "resolved": "https://registry.npmjs.org/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz", "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" }, - "lodash._reinterpolate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz", - "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=", - "dev": true - }, "lodash._releasearray": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash._releasearray/-/lodash._releasearray-2.4.1.tgz", @@ -75045,29 +73449,11 @@ "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-2.4.1.tgz", "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" }, - "lodash.omit": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.omit/-/lodash.omit-4.5.0.tgz", - "integrity": "sha1-brGa5aHuHdnfC5aeZs4Lf6MLXmA=", - "dev": true - }, "lodash.once": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" }, - "lodash.padend": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/lodash.padend/-/lodash.padend-4.6.1.tgz", - "integrity": "sha1-U8y6BH0G4VjTEfRdpiX05J5vFm4=", - "dev": true - }, - "lodash.pick": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz", - "integrity": "sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM=", - "dev": true - }, "lodash.set": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", @@ -75087,25 +73473,6 @@ "lodash._isnative": "~2.4.1" } }, - "lodash.template": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", - "integrity": "sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==", - "dev": true, - "requires": { - "lodash._reinterpolate": "^3.0.0", - "lodash.templatesettings": "^4.0.0" - } - }, - "lodash.templatesettings": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz", - "integrity": "sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==", - "dev": true, - "requires": { - "lodash._reinterpolate": "^3.0.0" - } - }, "lodash.throttle": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz", @@ -75449,38 +73816,12 @@ "integrity": "sha512-jRW30YGywD2ESXDc+l17AiritL0uVaSnWsb26f+68qaW9zgbIIr1f4v2Nsvc0+s0Z2N3uX6t/yAw7BwCQ1wMsA==", "requires": {} }, - "markdown-link": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/markdown-link/-/markdown-link-0.1.1.tgz", - "integrity": "sha1-MsXGUZmmRXMWMi0eQinRNAfIx88=", - "dev": true - }, "markdown-to-jsx": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-7.2.1.tgz", "integrity": "sha512-9HrdzBAo0+sFz9ZYAGT5fB8ilzTW+q6lPocRxrIesMO+aB40V9MgFfbfMXxlGjf22OpRy+IXlvVaQenicdpgbg==", "requires": {} }, - "markdown-toc": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/markdown-toc/-/markdown-toc-1.2.0.tgz", - "integrity": "sha512-eOsq7EGd3asV0oBfmyqngeEIhrbkc7XVP63OwcJBIhH2EpG2PzFcbZdhy1jutXSlRBBVMNXHvMtSr5LAxSUvUg==", - "dev": true, - "requires": { - "concat-stream": "^1.5.2", - "diacritics-map": "^0.1.0", - "gray-matter": "^2.1.0", - "lazy-cache": "^2.0.2", - "list-item": "^1.1.1", - "markdown-link": "^0.1.1", - "minimist": "^1.2.0", - "mixin-deep": "^1.1.3", - "object.pick": "^1.2.0", - "remarkable": "^1.7.1", - "repeat-string": "^1.6.1", - "strip-color": "^0.1.0" - } - }, "marked": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/marked/-/marked-4.1.0.tgz", @@ -75508,12 +73849,6 @@ "integrity": "sha1-+4lBvl9evol55xjmJzsXjlhpRWU=", "dev": true }, - "math-random": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/math-random/-/math-random-1.0.4.tgz", - "integrity": "sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A==", - "dev": true - }, "mathjax": { "version": "2.7.9", "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-2.7.9.tgz", @@ -75848,27 +74183,6 @@ } } }, - "mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "dev": true, - "requires": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, "mixme": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/mixme/-/mixme-0.5.4.tgz", @@ -75895,12 +74209,6 @@ "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, - "mkdirp2": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/mkdirp2/-/mkdirp2-1.0.5.tgz", - "integrity": "sha512-xOE9xbICroUDmG1ye2h4bZ8WBie9EGmACaco8K8cx6RlkJJrxGIqjGqztAI+NMhexXBcdGbSEzI6N3EJPevxZw==", - "dev": true - }, "mmdb-lib": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/mmdb-lib/-/mmdb-lib-2.0.2.tgz", @@ -76798,12 +75106,6 @@ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, - "object-get": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object-get/-/object-get-2.1.1.tgz", - "integrity": "sha512-7n4IpLMzGGcLEMiQKsNR7vCe+N5E9LORFrtNUVy4sO3dj9a3HedZCxEL2T7QuLhcHN1NBuBsMOKaOsAYI9IIvg==", - "dev": true - }, "object-hash": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", @@ -76828,12 +75130,6 @@ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" }, - "object-to-spawn-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/object-to-spawn-args/-/object-to-spawn-args-2.0.1.tgz", - "integrity": "sha512-6FuKFQ39cOID+BMZ3QaphcC8Y4cw6LXBLyIgPU+OhIYwviJamPAn+4mITapnSBQrejB+NNp+FMskhD8Cq+Ys3w==", - "dev": true - }, "object.assign": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", @@ -76887,15 +75183,6 @@ "es-abstract": "^1.20.4" } }, - "object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "dev": true, - "requires": { - "isobject": "^3.0.1" - } - }, "object.values": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", @@ -78932,25 +77219,6 @@ "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", "integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=" }, - "randomatic": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz", - "integrity": "sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw==", - "dev": true, - "requires": { - "is-number": "^4.0.0", - "kind-of": "^6.0.0", - "math-random": "^1.0.1" - }, - "dependencies": { - "is-number": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz", - "integrity": "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==", - "dev": true - } - } - }, "randombytes": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.0.3.tgz", @@ -79511,78 +77779,6 @@ "object-keys": "^1.1.0" } }, - "reduce-extract": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/reduce-extract/-/reduce-extract-1.0.0.tgz", - "integrity": "sha1-Z/I4W+2mUGG19fQxJmLosIDKFSU=", - "dev": true, - "requires": { - "test-value": "^1.0.1" - }, - "dependencies": { - "array-back": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", - "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", - "dev": true, - "requires": { - "typical": "^2.6.0" - } - }, - "test-value": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/test-value/-/test-value-1.1.0.tgz", - "integrity": "sha1-oJE29y7AQ9J8iTcHwrFZv6196T8=", - "dev": true, - "requires": { - "array-back": "^1.0.2", - "typical": "^2.4.2" - } - } - } - }, - "reduce-flatten": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-3.0.1.tgz", - "integrity": "sha512-bYo+97BmUUOzg09XwfkwALt4PQH1M5L0wzKerBt6WLm3Fhdd43mMS89HiT1B9pJIqko/6lWx3OnV4J9f2Kqp5Q==", - "dev": true - }, - "reduce-unique": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/reduce-unique/-/reduce-unique-2.0.1.tgz", - "integrity": "sha512-x4jH/8L1eyZGR785WY+ePtyMNhycl1N2XOLxhCbzZFaqF4AXjLzqSxa2UHgJ2ZVR/HHyPOvl1L7xRnW8ye5MdA==", - "dev": true - }, - "reduce-without": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/reduce-without/-/reduce-without-1.0.1.tgz", - "integrity": "sha1-aK0OrRGFXJo31OglbBW7+Hly/Iw=", - "dev": true, - "requires": { - "test-value": "^2.0.0" - }, - "dependencies": { - "array-back": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", - "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", - "dev": true, - "requires": { - "typical": "^2.6.0" - } - }, - "test-value": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/test-value/-/test-value-2.1.0.tgz", - "integrity": "sha1-Edpv9nDzRxpztiXKTz/c97t0gpE=", - "dev": true, - "requires": { - "array-back": "^1.0.3", - "typical": "^2.6.0" - } - } - } - }, "redux": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.2.tgz", @@ -79716,27 +77912,6 @@ "unist-util-visit": "^2.0.0" } }, - "remarkable": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/remarkable/-/remarkable-1.7.4.tgz", - "integrity": "sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg==", - "dev": true, - "requires": { - "argparse": "^1.0.10", - "autolinker": "~0.28.0" - }, - "dependencies": { - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "requires": { - "sprintf-js": "~1.0.2" - } - } - } - }, "remove-accents": { "version": "0.4.2", "resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.4.2.tgz", @@ -79761,18 +77936,6 @@ "strip-ansi": "^6.0.1" } }, - "repeat-element": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz", - "integrity": "sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==", - "dev": true - }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "dev": true - }, "replace-ext": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-2.0.0.tgz", @@ -80616,15 +78779,6 @@ "integrity": "sha512-edRH8mBKEWNVIVMKejNnuJxleqYE/ZSdcT8/Nem9/mmosx12pctd80s2Oy00KNZzrogMZS5mauK2/ymL1bvlvg==", "dev": true }, - "set-getter": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/set-getter/-/set-getter-0.1.1.tgz", - "integrity": "sha512-9sVWOy+gthr+0G9DzqqLaYNA7+5OKkSmcqjL9cBpDEaZrr3ShQlyX2cZ/O/ozE41oxn/Tt0LGEM/w4Rub3A3gw==", - "dev": true, - "requires": { - "to-object-path": "^0.3.0" - } - }, "setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -80987,30 +79141,6 @@ "smart-buffer": "^4.2.0" } }, - "sort-array": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/sort-array/-/sort-array-4.1.4.tgz", - "integrity": "sha512-GVFN6Y1sHKrWaSYOJTk9093ZnrBMc9sP3nuhANU44S4xg3rE6W5Z5WyamuT8VpMBbssnetx5faKCua0LEmUnSw==", - "dev": true, - "requires": { - "array-back": "^5.0.0", - "typical": "^6.0.1" - }, - "dependencies": { - "array-back": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-5.0.0.tgz", - "integrity": "sha512-kgVWwJReZWmVuWOQKEOohXKJX+nD02JAZ54D1RRWlv8L0NebauKAaFxACKzB74RTclt1+WNz5KHaLRDAPZbDEw==", - "dev": true - }, - "typical": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/typical/-/typical-6.0.1.tgz", - "integrity": "sha512-+g3NEp7fJLe9DPa1TArHm9QAA7YciZmWnfAqEaFrBihQ7epOv9i99rjtgb6Iz0wh3WuQDjsCTDfgRoGnmHN81A==", - "dev": true - } - } - }, "sortobject": { "version": "4.16.0", "resolved": "https://registry.npmjs.org/sortobject/-/sortobject-4.16.0.tgz", @@ -81279,26 +79409,6 @@ "streamx": "^2.13.2" } }, - "stream-connect": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stream-connect/-/stream-connect-1.0.2.tgz", - "integrity": "sha1-GLyB8u2zW4tdmoAJIAqYUxRCipc=", - "dev": true, - "requires": { - "array-back": "^1.0.2" - }, - "dependencies": { - "array-back": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", - "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", - "dev": true, - "requires": { - "typical": "^2.6.0" - } - } - } - }, "stream-events": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", @@ -81320,12 +79430,6 @@ "mixme": "^0.5.1" } }, - "stream-via": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/stream-via/-/stream-via-1.0.4.tgz", - "integrity": "sha512-DBp0lSvX5G9KGRDTkR/R+a29H+Wk2xItOF+MpZLLNDWbEV9tGPnqLPxHEYjmiz8xGtJHRIqmI+hCjmNzqoA4nQ==", - "dev": true - }, "streamifier": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", @@ -81466,12 +79570,6 @@ "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", "dev": true }, - "strip-color": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/strip-color/-/strip-color-0.1.0.tgz", - "integrity": "sha1-EG9l09PmotlAHKwOsM6LinArT3s=", - "dev": true - }, "strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", @@ -81926,30 +80024,6 @@ "resolved": "https://registry.npmjs.org/synchronous-promise/-/synchronous-promise-2.0.17.tgz", "integrity": "sha512-AsS729u2RHUfEra9xJrE39peJcc2stq2+poBXX8bcM08Y6g9j/i/PUzwNQqkaJde7Ntg1TO7bSREbR5sdosQ+g==" }, - "table-layout": { - "version": "0.4.5", - "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-0.4.5.tgz", - "integrity": "sha512-zTvf0mcggrGeTe/2jJ6ECkJHAQPIYEwDoqsiqBjI24mvRmQbInK5jq33fyypaCBxX08hMkfmdOqj6haT33EqWw==", - "dev": true, - "requires": { - "array-back": "^2.0.0", - "deep-extend": "~0.6.0", - "lodash.padend": "^4.6.1", - "typical": "^2.6.1", - "wordwrapjs": "^3.0.0" - }, - "dependencies": { - "array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "requires": { - "typical": "^2.6.1" - } - } - } - }, "tapable": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", @@ -82106,12 +80180,6 @@ "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz", "integrity": "sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==" }, - "temp-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/temp-path/-/temp-path-1.0.0.tgz", - "integrity": "sha1-JLFUOXOrRCiW2a02fdnL2/r+kYs=", - "dev": true - }, "tempy": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/tempy/-/tempy-1.0.1.tgz", @@ -82194,27 +80262,6 @@ "minimatch": "^3.0.4" } }, - "test-value": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/test-value/-/test-value-3.0.0.tgz", - "integrity": "sha512-sVACdAWcZkSU9x7AOmJo5TqE+GyNJknHaHsMrR6ZnhjVlVN9Yx6FjHrsKZ3BjIpPCT68zYesPWkakrNupwfOTQ==", - "dev": true, - "requires": { - "array-back": "^2.0.0", - "typical": "^2.6.1" - }, - "dependencies": { - "array-back": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", - "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", - "dev": true, - "requires": { - "typical": "^2.6.1" - } - } - } - }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -82369,26 +80416,6 @@ "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=" }, - "to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -82460,12 +80487,6 @@ "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz", "integrity": "sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ=" }, - "toml": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/toml/-/toml-2.3.6.tgz", - "integrity": "sha512-gVweAectJU3ebq//Ferr2JUY4WKSDe5N+z0FvjDncLGyHmIDoxgY/2Ie4qfEIDm4IS7OA6Rmdm7pdEEdMcV/xQ==", - "dev": true - }, "topo": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/topo/-/topo-3.0.3.tgz", @@ -83187,12 +81208,6 @@ "xml-name-validator": "^4.0.0" } }, - "walk-back": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/walk-back/-/walk-back-5.1.0.tgz", - "integrity": "sha512-Uhxps5yZcVNbLEAnb+xaEEMdgTXl9qAQDzKYejG2AZ7qPwRQ81lozY9ECDbjLPNWm7YsO1IK5rsP1KoQzXAcGA==", - "dev": true - }, "walkdir": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", @@ -83710,24 +81725,6 @@ "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" }, - "wordwrapjs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-3.0.0.tgz", - "integrity": "sha512-mO8XtqyPvykVCsrwj5MlOVWvSnCdT+C+QVbm6blradR7JExAhbkZ7hZ9A+9NUtwzSqrlUo9a67ws0EiILrvRpw==", - "dev": true, - "requires": { - "reduce-flatten": "^1.0.1", - "typical": "^2.6.1" - }, - "dependencies": { - "reduce-flatten": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-1.0.1.tgz", - "integrity": "sha1-JYx479FT3fk8tWEjf2EYTzaW4yc=", - "dev": true - } - } - }, "worker-loader": { "version": "3.0.8", "resolved": "https://registry.npmjs.org/worker-loader/-/worker-loader-3.0.8.tgz", diff --git a/services/track-changes/.eslintignore b/services/track-changes/.eslintignore deleted file mode 100644 index c59a33a4df..0000000000 --- a/services/track-changes/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -app/lib/diff_match_patch.js diff --git a/services/track-changes/.github/ISSUE_TEMPLATE.md b/services/track-changes/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index e0093aa90c..0000000000 --- a/services/track-changes/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,38 +0,0 @@ - - -## Steps to Reproduce - - - -1. -2. -3. - -## Expected Behaviour - - -## Observed Behaviour - - - -## Context - - -## Technical Info - - -* URL: -* Browser Name and version: -* Operating System and version (desktop or mobile): -* Signed in as: -* Project and/or file: - -## Analysis - - -## Who Needs to Know? - - - -- -- diff --git a/services/track-changes/.github/PULL_REQUEST_TEMPLATE.md b/services/track-changes/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 12bb2eeb3f..0000000000 --- a/services/track-changes/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,48 +0,0 @@ - - - - - -### Description - - - -#### Screenshots - - - -#### Related Issues / PRs - - - -### Review - - - -#### Potential Impact - - - -#### Manual Testing Performed - -- [ ] -- [ ] - -#### Accessibility - - - -### Deployment - - - -#### Deployment Checklist - -- [ ] Update documentation not included in the PR (if any) -- [ ] - -#### Metrics and Monitoring - - - -#### Who Needs to Know? diff --git a/services/track-changes/.gitignore b/services/track-changes/.gitignore deleted file mode 100644 index 008dc714ba..0000000000 --- a/services/track-changes/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -**.swp -node_modules/ -forever/ -*.js.map - -# managed by dev-environment$ bin/update_build_scripts -.npmrc diff --git a/services/track-changes/.mocharc.json b/services/track-changes/.mocharc.json deleted file mode 100644 index dc3280aa96..0000000000 --- a/services/track-changes/.mocharc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "require": "test/setup.js" -} diff --git a/services/track-changes/.nvmrc b/services/track-changes/.nvmrc deleted file mode 100644 index 02c8b485ed..0000000000 --- a/services/track-changes/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -18.18.0 diff --git a/services/track-changes/Dockerfile b/services/track-changes/Dockerfile deleted file mode 100644 index 4c316f41cd..0000000000 --- a/services/track-changes/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated, do not edit it directly. -# Instead run bin/update_build_scripts from -# https://github.com/overleaf/internal/ - -FROM node:18.18.0 as base - -WORKDIR /overleaf/services/track-changes - -# Google Cloud Storage needs a writable $HOME/.config for resumable uploads -# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) -RUN mkdir /home/node/.config && chown node:node /home/node/.config - -FROM base as app - -COPY package.json package-lock.json /overleaf/ -COPY services/track-changes/package.json /overleaf/services/track-changes/ -COPY libraries/ /overleaf/libraries/ -COPY patches/ /overleaf/patches/ - -RUN cd /overleaf && npm ci --quiet - -COPY services/track-changes/ /overleaf/services/track-changes/ - -FROM app -USER node - -CMD ["node", "--expose-gc", "app.js"] diff --git a/services/track-changes/LICENSE b/services/track-changes/LICENSE deleted file mode 100644 index ac8619dcb9..0000000000 --- a/services/track-changes/LICENSE +++ /dev/null @@ -1,662 +0,0 @@ - - GNU AFFERO GENERAL PUBLIC LICENSE - Version 3, 19 November 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU Affero General Public License is a free, copyleft license for -software and other kinds of works, specifically designed to ensure -cooperation with the community in the case of network server software. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -our General Public Licenses are intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - Developers that use our General Public Licenses protect your rights -with two steps: (1) assert copyright on the software, and (2) offer -you this License which gives you legal permission to copy, distribute -and/or modify the software. - - A secondary benefit of defending all users' freedom is that -improvements made in alternate versions of the program, if they -receive widespread use, become available for other developers to -incorporate. Many developers of free software are heartened and -encouraged by the resulting cooperation. However, in the case of -software used on network servers, this result may fail to come about. -The GNU General Public License permits making a modified version and -letting the public access it on a server without ever releasing its -source code to the public. - - The GNU Affero General Public License is designed specifically to -ensure that, in such cases, the modified source code becomes available -to the community. It requires the operator of a network server to -provide the source code of the modified version running there to the -users of that server. Therefore, public use of a modified version, on -a publicly accessible server, gives the public access to the source -code of the modified version. - - An older license, called the Affero General Public License and -published by Affero, was designed to accomplish similar goals. This is -a different license, not a version of the Affero GPL, but Affero has -released a new version of the Affero GPL which permits relicensing under -this license. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU Affero General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Remote Network Interaction; Use with the GNU General Public License. - - Notwithstanding any other provision of this License, if you modify the -Program, your modified version must prominently offer all users -interacting with it remotely through a computer network (if your version -supports such interaction) an opportunity to receive the Corresponding -Source of your version by providing access to the Corresponding Source -from a network server at no charge, through some standard or customary -means of facilitating copying of software. This Corresponding Source -shall include the Corresponding Source for any work covered by version 3 -of the GNU General Public License that is incorporated pursuant to the -following paragraph. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the work with which it is combined will remain governed by version -3 of the GNU General Public License. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU Affero General Public License from time to time. Such new versions -will be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU Affero General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU Affero General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU Affero General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If your software can interact with users remotely through a computer -network, you should also make sure that it provides a way for users to -get its source. For example, if your program is a web application, its -interface could display a "Source" link that leads users to an archive -of the code. There are many ways you could offer source, and different -solutions will be better for different programs; see section 13 for the -specific requirements. - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU AGPL, see -. diff --git a/services/track-changes/Makefile b/services/track-changes/Makefile deleted file mode 100644 index 9874879e60..0000000000 --- a/services/track-changes/Makefile +++ /dev/null @@ -1,110 +0,0 @@ -# This file was auto-generated, do not edit it directly. -# Instead run bin/update_build_scripts from -# https://github.com/overleaf/internal/ - -BUILD_NUMBER ?= local -BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) -PROJECT_NAME = track-changes -BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') - -DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml -DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ - BRANCH_NAME=$(BRANCH_NAME) \ - PROJECT_NAME=$(PROJECT_NAME) \ - MOCHA_GREP=${MOCHA_GREP} \ - docker-compose ${DOCKER_COMPOSE_FLAGS} - -DOCKER_COMPOSE_TEST_ACCEPTANCE = \ - COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) - -DOCKER_COMPOSE_TEST_UNIT = \ - COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) - -clean: - -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local - -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local - -HERE=$(shell pwd) -MONOREPO=$(shell cd ../../ && pwd) -# Run the linting commands in the scope of the monorepo. -# Eslint and prettier (plus some configs) are on the root. -RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:18.18.0 npm run --silent - -format: - $(RUN_LINTING) format - -format_fix: - $(RUN_LINTING) format:fix - -lint: - $(RUN_LINTING) lint - -lint_fix: - $(RUN_LINTING) lint:fix - -test: format lint test_unit test_acceptance - -test_unit: -ifneq (,$(wildcard test/unit)) - $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit - $(MAKE) test_unit_clean -endif - -test_clean: test_unit_clean -test_unit_clean: -ifneq (,$(wildcard test/unit)) - $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 -endif - -test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run - $(MAKE) test_acceptance_clean - -test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug - $(MAKE) test_acceptance_clean - -test_acceptance_run: -ifneq (,$(wildcard test/acceptance)) - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance -endif - -test_acceptance_run_debug: -ifneq (,$(wildcard test/acceptance)) - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk -endif - -test_clean: test_acceptance_clean -test_acceptance_clean: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 - -test_acceptance_pre_run: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) up -d mongo - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) exec -T mongo sh -c ' \ - while ! mongo --eval "db.version()" > /dev/null; do \ - echo "Waiting for Mongo..."; \ - sleep 1; \ - done; \ - mongo --eval "rs.initiate({ _id: \"overleaf\", members: [ { _id: 0, host: \"mongo:27017\" } ] })"' -ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run -endif - -benchmarks: - $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks - -build: - docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ - --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ - --file Dockerfile \ - ../.. - -tar: - $(DOCKER_COMPOSE) up tar - -publish: - - docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - - -.PHONY: clean test test_unit test_acceptance test_clean benchmarks build publish diff --git a/services/track-changes/README.md b/services/track-changes/README.md deleted file mode 100644 index 6a0f1d79ee..0000000000 --- a/services/track-changes/README.md +++ /dev/null @@ -1,20 +0,0 @@ -overleaf/track-changes -======================== - -An API for converting raw editor updates into a compressed and browseable history. - -Acceptance tests can be run with the command -``` -AWS_BUCKET= AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= make test -``` -where `bucket-name`, `aws-access-key` and `aws-secret-access-key` are the credentials for an AWS S3 bucket. - - - - -License -------- - -The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. - -Copyright (c) Overleaf, 2014-2019. diff --git a/services/track-changes/app.js b/services/track-changes/app.js deleted file mode 100644 index 862994f7a0..0000000000 --- a/services/track-changes/app.js +++ /dev/null @@ -1,164 +0,0 @@ -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS103: Rewrite code to no longer use __guard__ - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const Metrics = require('@overleaf/metrics') -Metrics.initialize('track-changes') -const Settings = require('@overleaf/settings') -const logger = require('@overleaf/logger') -const TrackChangesLogger = logger.initialize('track-changes').logger - -if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { - logger.initializeErrorReporting(Settings.sentry.dsn) -} - -// log updates as truncated strings -const truncateFn = updates => - JSON.parse( - JSON.stringify(updates, function (key, value) { - let len - if (typeof value === 'string' && (len = value.length) > 80) { - return ( - value.substr(0, 32) + - `...(message of length ${len} truncated)...` + - value.substr(-32) - ) - } else { - return value - } - }) - ) -TrackChangesLogger.addSerializers({ - rawUpdate: truncateFn, - rawUpdates: truncateFn, - newUpdates: truncateFn, - lastUpdate: truncateFn, -}) - -const Path = require('path') - -Metrics.memory.monitor(logger) -Metrics.open_sockets.monitor() - -const childProcess = require('child_process') - -const mongodb = require('./app/js/mongodb') -const HttpController = require('./app/js/HttpController') -const express = require('express') -const bodyParser = require('body-parser') - -const app = express() - -app.use(bodyParser.json()) - -app.use(Metrics.http.monitor(logger)) - -Metrics.injectMetricsRoute(app) - -app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDoc) - -app.get('/project/:project_id/doc/:doc_id/diff', HttpController.getDiff) - -app.get('/project/:project_id/doc/:doc_id/check', HttpController.checkDoc) - -app.get('/project/:project_id/updates', HttpController.getUpdates) -app.get('/project/:project_id/export', HttpController.exportProject) - -app.get('/project/:project_id/zip', HttpController.zipProject) - -app.post('/project/:project_id/flush', HttpController.flushProject) - -app.post( - '/project/:project_id/doc/:doc_id/version/:version/restore', - HttpController.restore -) - -app.post('/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory) -app.post('/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory) - -app.post('/flush/all', HttpController.flushAll) -app.post('/check/dangling', HttpController.checkDanglingUpdates) - -let packWorker = null // use a single packing worker - -app.post('/pack', function (req, res, next) { - if (packWorker != null) { - return res.send('pack already running') - } else { - logger.debug('running pack') - packWorker = childProcess.fork( - Path.join(__dirname, '/app/js/PackWorker.js'), - [ - req.query.limit || 1000, - req.query.delay || 1000, - req.query.timeout || 30 * 60 * 1000, - ] - ) - packWorker.on('exit', function (code, signal) { - logger.debug({ code, signal }, 'history auto pack exited') - return (packWorker = null) - }) - return res.send('pack started') - } -}) - -app.get('/status', (req, res, next) => res.send('track-changes is alive')) - -app.get('/oops', function (req, res, next) { - throw new Error('dummy test error') -}) - -app.get('/check_lock', HttpController.checkLock) - -app.get('/health_check', HttpController.healthCheck) - -app.use(function (error, req, res, next) { - logger.error({ err: error, req }, 'an internal error occured') - return res.sendStatus(500) -}) - -const port = - __guard__( - Settings.internal != null ? Settings.internal.trackchanges : undefined, - x => x.port - ) || 3015 -const host = - __guard__( - Settings.internal != null ? Settings.internal.trackchanges : undefined, - x1 => x1.host - ) || 'localhost' - -if (!module.parent) { - // Called directly - mongodb - .waitForDb() - .then(() => { - app.listen(port, host, function (error) { - if (error != null) { - return logger.error( - { err: error }, - 'could not start track-changes server' - ) - } else { - return logger.debug( - `trackchanges starting up, listening on ${host}:${port}` - ) - } - }) - }) - .catch(err => { - logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') - process.exit(1) - }) -} - -module.exports = app - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} diff --git a/services/track-changes/app/js/DiffGenerator.js b/services/track-changes/app/js/DiffGenerator.js deleted file mode 100644 index 7f1a420a37..0000000000 --- a/services/track-changes/app/js/DiffGenerator.js +++ /dev/null @@ -1,343 +0,0 @@ -/* eslint-disable - no-proto, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let DiffGenerator -function ConsistencyError(message) { - const error = new Error(message) - error.name = 'ConsistencyError' - error.__proto__ = ConsistencyError.prototype - return error -} -ConsistencyError.prototype.__proto__ = Error.prototype - -const logger = require('@overleaf/logger') - -module.exports = DiffGenerator = { - ConsistencyError, - - rewindUpdate(content, update) { - for (let j = update.op.length - 1, i = j; j >= 0; j--, i = j) { - const op = update.op[i] - if (op.broken !== true) { - try { - content = DiffGenerator.rewindOp(content, op) - } catch (e) { - if (e instanceof ConsistencyError && (i = update.op.length - 1)) { - // catch known case where the last op in an array has been - // merged into a later op - logger.warn( - { err: e, update, op: JSON.stringify(op) }, - 'marking op as broken' - ) - op.broken = true - } else { - throw e // rethrow the execption - } - } - } - } - return content - }, - - rewindOp(content, op) { - let p - if (op.i != null) { - // ShareJS will accept an op where p > content.length when applied, - // and it applies as though p == content.length. However, the op is - // passed to us with the original p > content.length. Detect if that - // is the case with this op, and shift p back appropriately to match - // ShareJS if so. - ;({ p } = op) - const maxP = content.length - op.i.length - if (p > maxP) { - logger.warn({ maxP, p }, 'truncating position to content length') - p = maxP - op.p = p // fix out of range offsets to avoid invalid history exports in ZipManager - } - - const textToBeRemoved = content.slice(p, p + op.i.length) - if (op.i !== textToBeRemoved) { - throw new ConsistencyError( - `Inserted content, '${op.i}', does not match text to be removed, '${textToBeRemoved}'` - ) - } - - return content.slice(0, p) + content.slice(p + op.i.length) - } else if (op.d != null) { - if (op.p > content.length) { - op.p = content.length // fix out of range offsets to avoid invalid history exports in ZipManager - } - return content.slice(0, op.p) + op.d + content.slice(op.p) - } else { - return content - } - }, - - rewindUpdates(content, updates) { - for (const update of Array.from(updates.reverse())) { - try { - content = DiffGenerator.rewindUpdate(content, update) - } catch (e) { - e.attempted_update = update // keep a record of the attempted update - throw e // rethrow the exception - } - } - return content - }, - - buildDiff(initialContent, updates) { - let diff = [{ u: initialContent }] - for (const update of Array.from(updates)) { - diff = DiffGenerator.applyUpdateToDiff(diff, update) - } - diff = DiffGenerator.compressDiff(diff) - return diff - }, - - compressDiff(diff) { - const newDiff = [] - for (const part of Array.from(diff)) { - const lastPart = newDiff[newDiff.length - 1] - if ( - lastPart != null && - (lastPart.meta != null ? lastPart.meta.user : undefined) != null && - (part.meta != null ? part.meta.user : undefined) != null - ) { - if ( - lastPart.i != null && - part.i != null && - lastPart.meta.user.id === part.meta.user.id - ) { - lastPart.i += part.i - lastPart.meta.start_ts = Math.min( - lastPart.meta.start_ts, - part.meta.start_ts - ) - lastPart.meta.end_ts = Math.max( - lastPart.meta.end_ts, - part.meta.end_ts - ) - } else if ( - lastPart.d != null && - part.d != null && - lastPart.meta.user.id === part.meta.user.id - ) { - lastPart.d += part.d - lastPart.meta.start_ts = Math.min( - lastPart.meta.start_ts, - part.meta.start_ts - ) - lastPart.meta.end_ts = Math.max( - lastPart.meta.end_ts, - part.meta.end_ts - ) - } else { - newDiff.push(part) - } - } else { - newDiff.push(part) - } - } - return newDiff - }, - - applyOpToDiff(diff, op, meta) { - let consumedDiff - const position = 0 - - let remainingDiff = diff.slice() - ;({ consumedDiff, remainingDiff } = DiffGenerator._consumeToOffset( - remainingDiff, - op.p - )) - const newDiff = consumedDiff - - if (op.i != null) { - newDiff.push({ - i: op.i, - meta, - }) - } else if (op.d != null) { - ;({ consumedDiff, remainingDiff } = - DiffGenerator._consumeDiffAffectedByDeleteOp(remainingDiff, op, meta)) - newDiff.push(...Array.from(consumedDiff || [])) - } - - newDiff.push(...Array.from(remainingDiff || [])) - - return newDiff - }, - - applyUpdateToDiff(diff, update) { - for (const op of Array.from(update.op)) { - if (op.broken !== true) { - diff = DiffGenerator.applyOpToDiff(diff, op, update.meta) - } - } - return diff - }, - - _consumeToOffset(remainingDiff, totalOffset) { - let part - const consumedDiff = [] - let position = 0 - while ((part = remainingDiff.shift())) { - const length = DiffGenerator._getLengthOfDiffPart(part) - if (part.d != null) { - consumedDiff.push(part) - } else if (position + length >= totalOffset) { - const partOffset = totalOffset - position - if (partOffset > 0) { - consumedDiff.push(DiffGenerator._slicePart(part, 0, partOffset)) - } - if (partOffset < length) { - remainingDiff.unshift(DiffGenerator._slicePart(part, partOffset)) - } - break - } else { - position += length - consumedDiff.push(part) - } - } - - return { - consumedDiff, - remainingDiff, - } - }, - - _consumeDiffAffectedByDeleteOp(remainingDiff, deleteOp, meta) { - const consumedDiff = [] - let remainingOp = deleteOp - while (remainingOp && remainingDiff.length > 0) { - let newPart - ;({ newPart, remainingDiff, remainingOp } = - DiffGenerator._consumeDeletedPart(remainingDiff, remainingOp, meta)) - if (newPart != null) { - consumedDiff.push(newPart) - } - } - return { - consumedDiff, - remainingDiff, - } - }, - - _consumeDeletedPart(remainingDiff, op, meta) { - let deletedContent, newPart, remainingOp - const part = remainingDiff.shift() - const partLength = DiffGenerator._getLengthOfDiffPart(part) - - if (part.d != null) { - // Skip existing deletes - remainingOp = op - newPart = part - } else if (partLength > op.d.length) { - // Only the first bit of the part has been deleted - const remainingPart = DiffGenerator._slicePart(part, op.d.length) - remainingDiff.unshift(remainingPart) - - deletedContent = DiffGenerator._getContentOfPart(part).slice( - 0, - op.d.length - ) - if (deletedContent !== op.d) { - throw new ConsistencyError( - `deleted content, '${deletedContent}', does not match delete op, '${op.d}'` - ) - } - - if (part.u != null) { - newPart = { - d: op.d, - meta, - } - } else if (part.i != null) { - newPart = null - } - - remainingOp = null - } else if (partLength === op.d.length) { - // The entire part has been deleted, but it is the last part - - deletedContent = DiffGenerator._getContentOfPart(part) - if (deletedContent !== op.d) { - throw new ConsistencyError( - `deleted content, '${deletedContent}', does not match delete op, '${op.d}'` - ) - } - - if (part.u != null) { - newPart = { - d: op.d, - meta, - } - } else if (part.i != null) { - newPart = null - } - - remainingOp = null - } else if (partLength < op.d.length) { - // The entire part has been deleted and there is more - - deletedContent = DiffGenerator._getContentOfPart(part) - const opContent = op.d.slice(0, deletedContent.length) - if (deletedContent !== opContent) { - throw new ConsistencyError( - `deleted content, '${deletedContent}', does not match delete op, '${opContent}'` - ) - } - - if (part.u) { - newPart = { - d: part.u, - meta, - } - } else if (part.i != null) { - newPart = null - } - - remainingOp = { - p: op.p, - d: op.d.slice(DiffGenerator._getLengthOfDiffPart(part)), - } - } - - return { - newPart, - remainingDiff, - remainingOp, - } - }, - - _slicePart(basePart, from, to) { - let part - if (basePart.u != null) { - part = { u: basePart.u.slice(from, to) } - } else if (basePart.i != null) { - part = { i: basePart.i.slice(from, to) } - } - if (basePart.meta != null) { - part.meta = basePart.meta - } - return part - }, - - _getLengthOfDiffPart(part) { - return (part.u || part.d || part.i || '').length - }, - - _getContentOfPart(part) { - return part.u || part.d || part.i || '' - }, -} diff --git a/services/track-changes/app/js/DiffManager.js b/services/track-changes/app/js/DiffManager.js deleted file mode 100644 index b7ac801a2b..0000000000 --- a/services/track-changes/app/js/DiffManager.js +++ /dev/null @@ -1,184 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let DiffManager -const UpdatesManager = require('./UpdatesManager') -const DocumentUpdaterManager = require('./DocumentUpdaterManager') -const DiffGenerator = require('./DiffGenerator') -const logger = require('@overleaf/logger') - -module.exports = DiffManager = { - getLatestDocAndUpdates(projectId, docId, fromVersion, callback) { - // Get updates last, since then they must be ahead and it - // might be possible to rewind to the same version as the doc. - if (callback == null) { - callback = function () {} - } - return DocumentUpdaterManager.getDocument( - projectId, - docId, - function (error, content, version) { - if (error != null) { - return callback(error) - } - if (fromVersion == null) { - // If we haven't been given a version, just return lastest doc and no updates - return callback(null, content, version, []) - } - return UpdatesManager.getDocUpdatesWithUserInfo( - projectId, - docId, - { from: fromVersion }, - function (error, updates) { - if (error != null) { - return callback(error) - } - return callback(null, content, version, updates) - } - ) - } - ) - }, - - getDiff(projectId, docId, fromVersion, toVersion, callback) { - if (callback == null) { - callback = function () {} - } - return DiffManager.getDocumentBeforeVersion( - projectId, - docId, - fromVersion, - function (error, startingContent, updates) { - let diff - if (error != null) { - if (error.message === 'broken-history') { - return callback(null, 'history unavailable') - } else { - return callback(error) - } - } - - const updatesToApply = [] - for (const update of Array.from(updates.slice().reverse())) { - if (update.v <= toVersion) { - updatesToApply.push(update) - } - } - - try { - diff = DiffGenerator.buildDiff(startingContent, updatesToApply) - } catch (e) { - return callback(e) - } - - return callback(null, diff) - } - ) - }, - - getDocumentBeforeVersion(projectId, docId, version, _callback) { - // Whichever order we get the latest document and the latest updates, - // there is potential for updates to be applied between them so that - // they do not return the same 'latest' versions. - // If this happens, we just retry and hopefully get them at the compatible - // versions. - let retry - if (_callback == null) { - _callback = function () {} - } - let retries = 3 - const callback = function (error, ...args) { - if (error != null) { - if (error.retry && retries > 0) { - logger.warn( - { error, projectId, docId, version, retries }, - 'retrying getDocumentBeforeVersion' - ) - return retry() - } else { - return _callback(error) - } - } else { - return _callback(null, ...Array.from(args)) - } - } - - return (retry = function () { - retries-- - return DiffManager._tryGetDocumentBeforeVersion( - projectId, - docId, - version, - callback - ) - })() - }, - - _tryGetDocumentBeforeVersion(projectId, docId, version, callback) { - if (callback == null) { - callback = function () {} - } - logger.debug( - { projectId, docId, version }, - 'getting document before version' - ) - return DiffManager.getLatestDocAndUpdates( - projectId, - docId, - version, - function (error, content, version, updates) { - let startingContent - if (error != null) { - return callback(error) - } - - // bail out if we hit a broken update - for (const u of Array.from(updates)) { - if (u.broken) { - return callback(new Error('broken-history')) - } - } - - // discard any updates which are ahead of this document version - while ((updates[0] != null ? updates[0].v : undefined) >= version) { - updates.shift() - } - - const lastUpdate = updates[0] - if (lastUpdate != null && lastUpdate.v !== version - 1) { - error = new Error( - `latest update version, ${lastUpdate.v}, does not match doc version, ${version}` - ) - error.retry = true - return callback(error) - } - - logger.debug( - { - docVersion: version, - lastUpdateVersion: lastUpdate != null ? lastUpdate.v : undefined, - updateCount: updates.length, - }, - 'rewinding updates' - ) - - const tryUpdates = updates.slice().reverse() - - try { - startingContent = DiffGenerator.rewindUpdates(content, tryUpdates) - // tryUpdates is reversed, and any unapplied ops are marked as broken - } catch (e) { - return callback(e) - } - - return callback(null, startingContent, tryUpdates) - } - ) - }, -} diff --git a/services/track-changes/app/js/DocIterator.js b/services/track-changes/app/js/DocIterator.js deleted file mode 100644 index d78cf02b65..0000000000 --- a/services/track-changes/app/js/DocIterator.js +++ /dev/null @@ -1,42 +0,0 @@ -module.exports = class DocIterator { - constructor(packs, getPackByIdFn) { - this.getPackByIdFn = getPackByIdFn - // sort packs in descending order by version (i.e. most recent first) - const byVersion = (a, b) => b.v - a.v - this.packs = packs.slice().sort(byVersion) - this.queue = [] - } - - next(callback) { - const update = this.queue.shift() - if (update) { - return callback(null, update) - } - if (!this.packs.length) { - this._done = true - return callback(null) - } - const nextPack = this.packs[0] - this.getPackByIdFn( - nextPack.project_id, - nextPack.doc_id, - nextPack._id, - (err, pack) => { - if (err != null) { - return callback(err) - } - this.packs.shift() // have now retrieved this pack, remove it - for (const op of pack.pack.reverse()) { - op.doc_id = nextPack.doc_id - op.project_id = nextPack.project_id - this.queue.push(op) - } - return this.next(callback) - } - ) - } - - done() { - return this._done - } -} diff --git a/services/track-changes/app/js/DocstoreManager.js b/services/track-changes/app/js/DocstoreManager.js deleted file mode 100644 index 46de335678..0000000000 --- a/services/track-changes/app/js/DocstoreManager.js +++ /dev/null @@ -1,52 +0,0 @@ -const request = require('request') -const logger = require('@overleaf/logger') -const Settings = require('@overleaf/settings') -const Errors = require('./Errors') - -function peekDocument(projectId, docId, callback) { - const url = `${Settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek` - logger.debug({ projectId, docId }, 'getting doc from docstore') - request.get(url, function (error, res, body) { - if (error != null) { - return callback(error) - } - if (res.statusCode >= 200 && res.statusCode < 300) { - try { - body = JSON.parse(body) - } catch (error) { - return callback(error) - } - logger.debug( - { projectId, docId, version: body.version }, - 'got doc from docstore' - ) - return callback(null, body.lines.join('\n'), body.version) - } else if (res.statusCode === 404) { - return callback( - new Errors.NotFoundError('doc not found', { projectId, docId }) - ) - } else { - return callback( - new Error( - `docstore returned a non-success status code: ${res.statusCode}` - ) - ) - } - }) -} - -module.exports = { - promises: { - peekDocument: (projectId, docId) => { - return new Promise((resolve, reject) => { - peekDocument(projectId, docId, (err, content, version) => { - if (err) { - reject(err) - } else { - resolve([content, version]) - } - }) - }) - }, - }, -} diff --git a/services/track-changes/app/js/DocumentUpdaterManager.js b/services/track-changes/app/js/DocumentUpdaterManager.js deleted file mode 100644 index 26a6d3ab4b..0000000000 --- a/services/track-changes/app/js/DocumentUpdaterManager.js +++ /dev/null @@ -1,128 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let DocumentUpdaterManager -const request = require('request') -const logger = require('@overleaf/logger') -const Settings = require('@overleaf/settings') -const Errors = require('./Errors') - -module.exports = DocumentUpdaterManager = { - _requestDocument(projectId, docId, url, callback) { - if (callback == null) { - callback = function () {} - } - - logger.debug({ projectId, docId }, 'getting doc from document updater') - return request.get(url, function (error, res, body) { - if (error != null) { - return callback(error) - } - if (res.statusCode >= 200 && res.statusCode < 300) { - try { - body = JSON.parse(body) - } catch (error1) { - error = error1 - return callback(error) - } - logger.debug( - { projectId, docId, version: body.version }, - 'got doc from document updater' - ) - return callback(null, body.lines.join('\n'), body.version) - } else { - error = new Error( - `doc updater returned a non-success status code: ${res.statusCode}` - ) - logger.error( - { err: error, projectId, docId, url }, - 'error accessing doc updater' - ) - if (res.statusCode === 404) { - return callback( - new Errors.NotFoundError('doc not found', { - projectId, - docId, - }) - ) - } else { - return callback(error) - } - } - }) - }, - - getDocument(projectId, docId, callback) { - const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}` - DocumentUpdaterManager._requestDocument(projectId, docId, url, callback) - }, - - peekDocument(projectId, docId, callback) { - const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}/peek` - DocumentUpdaterManager._requestDocument(projectId, docId, url, callback) - }, - - setDocument(projectId, docId, content, userId, callback) { - if (callback == null) { - callback = function () {} - } - const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}` - logger.debug({ projectId, docId }, 'setting doc in document updater') - return request.post( - { - url, - json: { - lines: content.split('\n'), - source: 'restore', - user_id: userId, - undoing: true, - }, - }, - function (error, res, body) { - if (error != null) { - return callback(error) - } - if (res.statusCode >= 200 && res.statusCode < 300) { - return callback(null) - } else { - error = new Error( - `doc updater returned a non-success status code: ${res.statusCode}` - ) - logger.error( - { err: error, projectId, docId, url }, - 'error accessing doc updater' - ) - return callback(error) - } - } - ) - }, -} - -module.exports.promises = { - // peekDocument returns two arguments so we can't use util.promisify, which only handles a single argument, we need - // to treat this it as a special case. - peekDocument: (projectId, docId) => { - return new Promise((resolve, reject) => { - DocumentUpdaterManager.peekDocument( - projectId, - docId, - (err, content, version) => { - if (err) { - reject(err) - } else { - resolve([content, version]) - } - } - ) - }) - }, -} diff --git a/services/track-changes/app/js/Errors.js b/services/track-changes/app/js/Errors.js deleted file mode 100644 index a59bb1a707..0000000000 --- a/services/track-changes/app/js/Errors.js +++ /dev/null @@ -1,5 +0,0 @@ -const OError = require('@overleaf/o-error') - -class NotFoundError extends OError {} - -module.exports = { NotFoundError } diff --git a/services/track-changes/app/js/HealthChecker.js b/services/track-changes/app/js/HealthChecker.js deleted file mode 100644 index 9cb5efb182..0000000000 --- a/services/track-changes/app/js/HealthChecker.js +++ /dev/null @@ -1,84 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { ObjectId } = require('./mongodb') -const request = require('request') -const async = require('async') -const settings = require('@overleaf/settings') -const { port } = settings.internal.trackchanges -const logger = require('@overleaf/logger') -const LockManager = require('./LockManager') - -module.exports = { - check(callback) { - const projectId = ObjectId(settings.trackchanges.healthCheck.project_id) - const url = `http://localhost:${port}/project/${projectId}` - logger.debug({ projectId }, 'running health check') - const jobs = [ - cb => - request.get( - { url: `http://localhost:${port}/check_lock`, timeout: 3000 }, - function (err, res, body) { - if (err != null) { - logger.err( - { err, projectId }, - 'error checking lock for health check' - ) - return cb(err) - } else if ((res != null ? res.statusCode : undefined) !== 200) { - return cb( - new Error(`status code not 200, it's ${res.statusCode}`) - ) - } else { - return cb() - } - } - ), - cb => - request.post( - { url: `${url}/flush`, timeout: 10000 }, - function (err, res, body) { - if (err != null) { - logger.err({ err, projectId }, 'error flushing for health check') - return cb(err) - } else if ((res != null ? res.statusCode : undefined) !== 204) { - return cb( - new Error(`status code not 204, it's ${res.statusCode}`) - ) - } else { - return cb() - } - } - ), - cb => - request.get( - { url: `${url}/updates`, timeout: 10000 }, - function (err, res, body) { - if (err != null) { - logger.err( - { err, projectId }, - 'error getting updates for health check' - ) - return cb(err) - } else if ((res != null ? res.statusCode : undefined) !== 200) { - return cb( - new Error(`status code not 200, it's ${res.statusCode}`) - ) - } else { - return cb() - } - } - ), - ] - return async.series(jobs, callback) - }, - - checkLock(callback) { - return LockManager.healthCheck(callback) - }, -} diff --git a/services/track-changes/app/js/HttpController.js b/services/track-changes/app/js/HttpController.js deleted file mode 100644 index 4b85ce00df..0000000000 --- a/services/track-changes/app/js/HttpController.js +++ /dev/null @@ -1,364 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let HttpController -const UpdatesManager = require('./UpdatesManager') -const DiffManager = require('./DiffManager') -const PackManager = require('./PackManager') -const RestoreManager = require('./RestoreManager') -const ZipManager = require('./ZipManager') -const logger = require('@overleaf/logger') -const HealthChecker = require('./HealthChecker') -const _ = require('underscore') -const Path = require('path') -const { pipeline } = require('stream') - -module.exports = HttpController = { - flushDoc(req, res, next) { - if (next == null) { - next = function () {} - } - const { doc_id: docId } = req.params - const { project_id: projectId } = req.params - logger.debug({ projectId, docId }, 'compressing doc history') - return UpdatesManager.processUncompressedUpdatesWithLock( - projectId, - docId, - function (error) { - if (error != null) { - return next(error) - } - return res.sendStatus(204) - } - ) - }, - - flushProject(req, res, next) { - if (next == null) { - next = function () {} - } - const { project_id: projectId } = req.params - logger.debug({ projectId }, 'compressing project history') - return UpdatesManager.processUncompressedUpdatesForProject( - projectId, - function (error) { - if (error != null) { - return next(error) - } - return res.sendStatus(204) - } - ) - }, - - flushAll(req, res, next) { - // limit on projects to flush or -1 for all (default) - if (next == null) { - next = function () {} - } - const limit = req.query.limit != null ? parseInt(req.query.limit, 10) : -1 - logger.debug({ limit }, 'flushing all projects') - return UpdatesManager.flushAll(limit, function (error, result) { - if (error != null) { - return next(error) - } - const { failed, succeeded, all } = result - const status = `${succeeded.length} succeeded, ${failed.length} failed` - if (limit === 0) { - return res - .status(200) - .send(`${status}\nwould flush:\n${all.join('\n')}\n`) - } else if (failed.length > 0) { - logger.debug({ failed, succeeded }, 'error flushing projects') - return res - .status(500) - .send(`${status}\nfailed to flush:\n${failed.join('\n')}\n`) - } else { - return res - .status(200) - .send( - `${status}\nflushed ${succeeded.length} projects of ${all.length}\n` - ) - } - }) - }, - - checkDanglingUpdates(req, res, next) { - if (next == null) { - next = function () {} - } - logger.debug('checking dangling updates') - return UpdatesManager.getDanglingUpdates(function (error, result) { - if (error != null) { - return next(error) - } - if (result.length > 0) { - logger.debug({ dangling: result }, 'found dangling updates') - return res.status(500).send(`dangling updates:\n${result.join('\n')}\n`) - } else { - return res.status(200).send('no dangling updates found\n') - } - }) - }, - - checkDoc(req, res, next) { - if (next == null) { - next = function () {} - } - const { doc_id: docId } = req.params - const { project_id: projectId } = req.params - logger.debug({ projectId, docId }, 'checking doc history') - return DiffManager.getDocumentBeforeVersion( - projectId, - docId, - 1, - function (error, document, rewoundUpdates) { - if (error != null) { - return next(error) - } - const broken = [] - for (const update of Array.from(rewoundUpdates)) { - for (const op of Array.from(update.op)) { - if (op.broken === true) { - broken.push(op) - } - } - } - if (broken.length > 0) { - return res.send(broken) - } else { - return res.sendStatus(204) - } - } - ) - }, - - getDiff(req, res, next) { - let from, to - if (next == null) { - next = function () {} - } - const { doc_id: docId } = req.params - const { project_id: projectId } = req.params - - if (req.query.from != null) { - from = parseInt(req.query.from, 10) - } else { - from = null - } - if (req.query.to != null) { - to = parseInt(req.query.to, 10) - } else { - to = null - } - - logger.debug({ projectId, docId, from, to }, 'getting diff') - return DiffManager.getDiff( - projectId, - docId, - from, - to, - function (error, diff) { - if (error != null) { - return next(error) - } - return res.json({ diff }) - } - ) - }, - - getUpdates(req, res, next) { - let before, minCount - if (next == null) { - next = function () {} - } - const { project_id: projectId } = req.params - - if (req.query.before != null) { - before = parseInt(req.query.before, 10) - } - if (req.query.min_count != null) { - minCount = parseInt(req.query.min_count, 10) - } - - return UpdatesManager.getSummarizedProjectUpdates( - projectId, - { before, min_count: minCount }, - function (error, updates, nextBeforeTimestamp) { - if (error != null) { - return next(error) - } - return res.json({ - updates, - nextBeforeTimestamp, - }) - } - ) - }, - - zipProject(req, res, next) { - const { project_id: projectId } = req.params - logger.debug({ projectId }, 'exporting project history as zip file') - ZipManager.makeTempDirectory((err, tmpdir) => { - if (err) { - return next(err) - } - const zipFilePath = Path.join(tmpdir, 'export.zip') - ZipManager.exportProject(projectId, zipFilePath, err => { - if (err) { - ZipManager.cleanupTempDirectory(tmpdir) - return next(err) - } - res.download(zipFilePath, `${projectId}-track-changes.zip`, err => { - ZipManager.cleanupTempDirectory(tmpdir) - if (err && !res.headersSent) { - return next(err) - } - }) - }) - }) - }, - - exportProject(req, res, next) { - // The project history can be huge: - // - updates can weight MBs for insert/delete of full doc - // - multiple updates form a pack - // Flush updates per pack onto the wire. - const { project_id: projectId } = req.params - logger.debug({ projectId }, 'exporting project history') - UpdatesManager.exportProject( - projectId, - function (err, { updates, userIds }, confirmWrite) { - const abortStreaming = req.destroyed || res.finished || res.destroyed - if (abortStreaming) { - // Tell the producer to stop emitting data - if (confirmWrite) confirmWrite(new Error('stop')) - return - } - const hasStartedStreamingResponse = res.headersSent - if (err) { - logger.error({ projectId, err }, 'export failed') - if (!hasStartedStreamingResponse) { - // Generate a nice 500 - return next(err) - } else { - // Stop streaming - return res.destroy() - } - } - // Compose the response incrementally - const isFirstWrite = !hasStartedStreamingResponse - const isLastWrite = updates.length === 0 - if (isFirstWrite) { - // The first write will emit the 200 status, headers and start of the - // response payload (open array) - res.setHeader('Content-Type', 'application/json') - res.setHeader('Trailer', 'X-User-Ids') - res.writeHead(200) - res.write('[') - } - if (!isFirstWrite && !isLastWrite) { - // Starting from the 2nd non-empty write, emit a continuing comma. - // write 1: [updates1 - // write 2: ,updates2 - // write 3: ,updates3 - // write N: ] - res.write(',') - } - - // Every write will emit a blob onto the response stream: - // '[update1,update2,...]' - // ^^^^^^^^^^^^^^^^^^^ - res.write(JSON.stringify(updates).slice(1, -1), confirmWrite) - - if (isLastWrite) { - // The last write will have no updates and will finish the response - // payload (close array) and emit the userIds as trailer. - res.addTrailers({ 'X-User-Ids': JSON.stringify(userIds) }) - res.end(']') - } - } - ) - }, - - restore(req, res, next) { - if (next == null) { - next = function () {} - } - let { doc_id: docId, project_id: projectId, version } = req.params - const userId = req.headers['x-user-id'] - version = parseInt(version, 10) - return RestoreManager.restoreToBeforeVersion( - projectId, - docId, - version, - userId, - function (error) { - if (error != null) { - return next(error) - } - return res.sendStatus(204) - } - ) - }, - - pushDocHistory(req, res, next) { - if (next == null) { - next = function () {} - } - const { project_id: projectId } = req.params - const { doc_id: docId } = req.params - logger.debug({ projectId, docId }, 'pushing all finalised changes to s3') - return PackManager.pushOldPacks(projectId, docId, function (error) { - if (error != null) { - return next(error) - } - return res.sendStatus(204) - }) - }, - - pullDocHistory(req, res, next) { - if (next == null) { - next = function () {} - } - const { project_id: projectId } = req.params - const { doc_id: docId } = req.params - logger.debug({ projectId, docId }, 'pulling all packs from s3') - return PackManager.pullOldPacks(projectId, docId, function (error) { - if (error != null) { - return next(error) - } - return res.sendStatus(204) - }) - }, - - healthCheck(req, res) { - return HealthChecker.check(function (err) { - if (err != null) { - logger.err({ err }, 'error performing health check') - return res.sendStatus(500) - } else { - return res.sendStatus(200) - } - }) - }, - - checkLock(req, res) { - return HealthChecker.checkLock(function (err) { - if (err != null) { - logger.err({ err }, 'error performing lock check') - return res.sendStatus(500) - } else { - return res.sendStatus(200) - } - }) - }, -} diff --git a/services/track-changes/app/js/LockManager.js b/services/track-changes/app/js/LockManager.js deleted file mode 100644 index 3886fca8fb..0000000000 --- a/services/track-changes/app/js/LockManager.js +++ /dev/null @@ -1,161 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let LockManager -const Settings = require('@overleaf/settings') -const redis = require('@overleaf/redis-wrapper') -const rclient = redis.createClient(Settings.redis.lock) -const os = require('os') -const crypto = require('crypto') -const logger = require('@overleaf/logger') - -const HOST = os.hostname() -const PID = process.pid -const RND = crypto.randomBytes(4).toString('hex') -let COUNT = 0 - -module.exports = LockManager = { - LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock - MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock - LOCK_TTL: 300, // seconds (allow 5 minutes for any operation to complete) - - // Use a signed lock value as described in - // http://redis.io/topics/distlock#correct-implementation-with-a-single-instance - // to prevent accidental unlocking by multiple processes - randomLock() { - const time = Date.now() - return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}` - }, - - unlockScript: - 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end', - - tryLock(key, callback) { - if (callback == null) { - callback = function () {} - } - const lockValue = LockManager.randomLock() - return rclient.set( - key, - lockValue, - 'EX', - this.LOCK_TTL, - 'NX', - function (err, gotLock) { - if (err != null) { - return callback(err) - } - if (gotLock === 'OK') { - return callback(err, true, lockValue) - } else { - return callback(err, false) - } - } - ) - }, - - getLock(key, callback) { - let attempt - if (callback == null) { - callback = function () {} - } - const startTime = Date.now() - return (attempt = function () { - if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) { - const e = new Error('Timeout') - e.key = key - return callback(e) - } - - return LockManager.tryLock(key, function (error, gotLock, lockValue) { - if (error != null) { - return callback(error) - } - if (gotLock) { - return callback(null, lockValue) - } else { - return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL) - } - }) - })() - }, - - checkLock(key, callback) { - if (callback == null) { - callback = function () {} - } - return rclient.exists(key, function (err, exists) { - if (err != null) { - return callback(err) - } - exists = parseInt(exists) - if (exists === 1) { - return callback(err, false) - } else { - return callback(err, true) - } - }) - }, - - releaseLock(key, lockValue, callback) { - return rclient.eval( - LockManager.unlockScript, - 1, - key, - lockValue, - function (err, result) { - if (err != null) { - return callback(err) - } - if (result != null && result !== 1) { - // successful unlock should release exactly one key - logger.error( - { key, lockValue, redisErr: err, redisResult: result }, - 'unlocking error' - ) - return callback(new Error('tried to release timed out lock')) - } - return callback(err, result) - } - ) - }, - - runWithLock(key, runner, callback) { - if (callback == null) { - callback = function () {} - } - return LockManager.getLock(key, function (error, lockValue) { - if (error != null) { - return callback(error) - } - return runner(error1 => - LockManager.releaseLock(key, lockValue, function (error2) { - error = error1 || error2 - if (error != null) { - return callback(error) - } - return callback() - }) - ) - }) - }, - - healthCheck(callback) { - const action = releaseLock => releaseLock() - return LockManager.runWithLock( - `HistoryLock:HealthCheck:host=${HOST}:pid=${PID}:random=${RND}`, - action, - callback - ) - }, - - close(callback) { - rclient.quit() - return rclient.once('end', callback) - }, -} diff --git a/services/track-changes/app/js/MongoAWS.js b/services/track-changes/app/js/MongoAWS.js deleted file mode 100644 index 2fe7bb4b27..0000000000 --- a/services/track-changes/app/js/MongoAWS.js +++ /dev/null @@ -1,197 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let MongoAWS -const settings = require('@overleaf/settings') -const logger = require('@overleaf/logger') -const AWS = require('aws-sdk') -const S3S = require('s3-streams') -const { db, ObjectId } = require('./mongodb') -const JSONStream = require('JSONStream') -const ReadlineStream = require('byline') -const zlib = require('zlib') -const Metrics = require('@overleaf/metrics') - -const DAYS = 24 * 3600 * 1000 // one day in milliseconds - -const createStream = function (streamConstructor, projectId, docId, packId) { - const AWS_CONFIG = { - accessKeyId: settings.trackchanges.s3.key, - secretAccessKey: settings.trackchanges.s3.secret, - endpoint: settings.trackchanges.s3.endpoint, - s3ForcePathStyle: settings.trackchanges.s3.pathStyle, - } - - return streamConstructor(new AWS.S3(AWS_CONFIG), { - Bucket: settings.trackchanges.stores.doc_history, - Key: projectId + '/changes-' + docId + '/pack-' + packId, - }) -} - -module.exports = MongoAWS = { - archivePack(projectId, docId, packId, _callback) { - if (_callback == null) { - _callback = function () {} - } - const callback = function (...args) { - _callback(...Array.from(args || [])) - return (_callback = function () {}) - } - - const query = { - _id: ObjectId(packId), - doc_id: ObjectId(docId), - } - - if (projectId == null) { - return callback(new Error('invalid project id')) - } - if (docId == null) { - return callback(new Error('invalid doc id')) - } - if (packId == null) { - return callback(new Error('invalid pack id')) - } - - logger.debug({ projectId, docId, packId }, 'uploading data to s3') - - const upload = createStream(S3S.WriteStream, projectId, docId, packId) - - return db.docHistory.findOne(query, function (err, result) { - if (err != null) { - return callback(err) - } - if (result == null) { - return callback(new Error('cannot find pack to send to s3')) - } - if (result.expiresAt != null) { - return callback(new Error('refusing to send pack with TTL to s3')) - } - const uncompressedData = JSON.stringify(result) - if (uncompressedData.indexOf('\u0000') !== -1) { - const error = new Error('null bytes found in upload') - logger.error({ err: error, projectId, docId, packId }, error.message) - return callback(error) - } - return zlib.gzip(uncompressedData, function (err, buf) { - logger.debug( - { - projectId, - docId, - packId, - origSize: uncompressedData.length, - newSize: buf.length, - }, - 'compressed pack' - ) - if (err != null) { - return callback(err) - } - upload.on('error', err => callback(err)) - upload.on('finish', function () { - Metrics.inc('archive-pack') - logger.debug({ projectId, docId, packId }, 'upload to s3 completed') - return callback(null) - }) - upload.write(buf) - return upload.end() - }) - }) - }, - - readArchivedPack(projectId, docId, packId, _callback) { - if (_callback == null) { - _callback = function () {} - } - const callback = function (...args) { - _callback(...Array.from(args || [])) - return (_callback = function () {}) - } - - if (projectId == null) { - return callback(new Error('invalid project id')) - } - if (docId == null) { - return callback(new Error('invalid doc id')) - } - if (packId == null) { - return callback(new Error('invalid pack id')) - } - - logger.debug({ projectId, docId, packId }, 'downloading data from s3') - - const download = createStream(S3S.ReadStream, projectId, docId, packId) - - const inputStream = download - .on('open', obj => 1) - .on('error', err => callback(err)) - - const gunzip = zlib.createGunzip() - gunzip.setEncoding('utf8') - gunzip.on('error', function (err) { - logger.debug( - { projectId, docId, packId, err }, - 'error uncompressing gzip stream' - ) - return callback(err) - }) - - const outputStream = inputStream.pipe(gunzip) - const parts = [] - outputStream.on('error', err => callback(err)) - outputStream.on('end', function () { - let object - logger.debug({ projectId, docId, packId }, 'download from s3 completed') - try { - object = JSON.parse(parts.join('')) - } catch (e) { - return callback(e) - } - object._id = ObjectId(object._id) - object.doc_id = ObjectId(object.doc_id) - object.project_id = ObjectId(object.project_id) - for (const op of Array.from(object.pack)) { - if (op._id != null) { - op._id = ObjectId(op._id) - } - } - return callback(null, object) - }) - return outputStream.on('data', data => parts.push(data)) - }, - - unArchivePack(projectId, docId, packId, callback) { - if (callback == null) { - callback = function () {} - } - return MongoAWS.readArchivedPack( - projectId, - docId, - packId, - function (err, object) { - if (err != null) { - return callback(err) - } - Metrics.inc('unarchive-pack') - // allow the object to expire, we can always retrieve it again - object.expiresAt = new Date(Date.now() + 7 * DAYS) - logger.debug({ projectId, docId, packId }, 'inserting object from s3') - return db.docHistory.insertOne(object, (err, confirmation) => { - if (err) return callback(err) - object._id = confirmation.insertedId - callback(null, object) - }) - } - ) - }, -} diff --git a/services/track-changes/app/js/MongoManager.js b/services/track-changes/app/js/MongoManager.js deleted file mode 100644 index 19408143af..0000000000 --- a/services/track-changes/app/js/MongoManager.js +++ /dev/null @@ -1,200 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let MongoManager -const { db, ObjectId } = require('./mongodb') -const PackManager = require('./PackManager') -const async = require('async') -const _ = require('underscore') -const metrics = require('@overleaf/metrics') -const logger = require('@overleaf/logger') - -module.exports = MongoManager = { - getLastCompressedUpdate(docId, callback) { - if (callback == null) { - callback = function () {} - } - return db.docHistory - .find( - { doc_id: ObjectId(docId.toString()) }, - // only return the last entry in a pack - { projection: { pack: { $slice: -1 } } } - ) - .sort({ v: -1 }) - .limit(1) - .toArray(function (error, compressedUpdates) { - if (error != null) { - return callback(error) - } - return callback(null, compressedUpdates[0] || null) - }) - }, - - peekLastCompressedUpdate(docId, callback) { - // under normal use we pass back the last update as - // callback(null,update,version). - // - // when we have an existing last update but want to force a new one - // to start, we pass it back as callback(null,null,version), just - // giving the version so we can check consistency. - if (callback == null) { - callback = function () {} - } - return MongoManager.getLastCompressedUpdate( - docId, - function (error, update) { - if (error != null) { - return callback(error) - } - if (update != null) { - if (update.broken) { - // marked as broken so we will force a new op - return callback(null, null) - } else if (update.pack != null) { - if (update.finalised) { - // no more ops can be appended - return callback( - null, - null, - update.pack[0] != null ? update.pack[0].v : undefined - ) - } else { - return callback( - null, - update, - update.pack[0] != null ? update.pack[0].v : undefined - ) - } - } else { - return callback(null, update, update.v) - } - } else { - return PackManager.getLastPackFromIndex( - docId, - function (error, pack) { - if (error != null) { - return callback(error) - } - if ( - (pack != null ? pack.inS3 : undefined) != null && - (pack != null ? pack.v_end : undefined) != null - ) { - return callback(null, null, pack.v_end) - } - return callback(null, null) - } - ) - } - } - ) - }, - - backportProjectId(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return db.docHistory.updateMany( - { - doc_id: ObjectId(docId.toString()), - project_id: { $exists: false }, - }, - { - $set: { project_id: ObjectId(projectId.toString()) }, - }, - callback - ) - }, - - getProjectMetaData(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return db.projectHistoryMetaData.findOne( - { - project_id: ObjectId(projectId.toString()), - }, - callback - ) - }, - - setProjectMetaData(projectId, metadata, callback) { - if (callback == null) { - callback = function () {} - } - return db.projectHistoryMetaData.updateOne( - { - project_id: ObjectId(projectId), - }, - { - $set: metadata, - }, - { - upsert: true, - }, - callback - ) - }, - - upgradeHistory(projectId, callback) { - // preserve the project's existing history - if (callback == null) { - callback = function () {} - } - return db.docHistory.updateMany( - { - project_id: ObjectId(projectId), - temporary: true, - expiresAt: { $exists: true }, - }, - { - $set: { temporary: false }, - $unset: { expiresAt: '' }, - }, - callback - ) - }, - - ensureIndices() { - // For finding all updates that go into a diff for a doc - db.docHistory.ensureIndex({ doc_id: 1, v: 1 }, { background: true }) - // For finding all updates that affect a project - db.docHistory.ensureIndex( - { project_id: 1, 'meta.end_ts': 1 }, - { background: true } - ) - // For finding updates that don't yet have a project_id and need it inserting - db.docHistory.ensureIndex( - { doc_id: 1, project_id: 1 }, - { background: true } - ) - // For finding project meta-data - db.projectHistoryMetaData.ensureIndex( - { project_id: 1 }, - { background: true } - ) - // TTL index for auto deleting week old temporary ops - db.docHistory.ensureIndex( - { expiresAt: 1 }, - { expireAfterSeconds: 0, background: true } - ) - // For finding packs to be checked for archiving - db.docHistory.ensureIndex({ last_checked: 1 }, { background: true }) - // For finding archived packs - return db.docHistoryIndex.ensureIndex( - { project_id: 1 }, - { background: true } - ) - }, -} -;['getLastCompressedUpdate', 'getProjectMetaData', 'setProjectMetaData'].map( - method => - metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger) -) diff --git a/services/track-changes/app/js/PackManager.js b/services/track-changes/app/js/PackManager.js deleted file mode 100644 index 7043bf043e..0000000000 --- a/services/track-changes/app/js/PackManager.js +++ /dev/null @@ -1,1202 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS205: Consider reworking code to avoid use of IIFEs - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let PackManager -const async = require('async') -const _ = require('underscore') -const Bson = require('bson') -const BSON = new Bson() -const { db, ObjectId } = require('./mongodb') -const logger = require('@overleaf/logger') -const LockManager = require('./LockManager') -const MongoAWS = require('./MongoAWS') -const Metrics = require('@overleaf/metrics') -const ProjectIterator = require('./ProjectIterator') -const DocIterator = require('./DocIterator') -const Settings = require('@overleaf/settings') -const util = require('util') -const keys = Settings.redis.lock.key_schema - -// Sharejs operations are stored in a 'pack' object -// -// e.g. a single sharejs update looks like -// -// { -// "doc_id" : 549dae9e0a2a615c0c7f0c98, -// "project_id" : 549dae9c0a2a615c0c7f0c8c, -// "op" : [ {"p" : 6981, "d" : "?" } ], -// "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 }, -// "v" : 17082 -// } -// -// and a pack looks like this -// -// { -// "doc_id" : 549dae9e0a2a615c0c7f0c98, -// "project_id" : 549dae9c0a2a615c0c7f0c8c, -// "pack" : [ U1, U2, U3, ...., UN], -// "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 }, -// "v" : 17082 -// "v_end" : ... -// } -// -// where U1, U2, U3, .... are single updates stripped of their -// doc_id and project_id fields (which are the same for all the -// updates in the pack). -// -// The pack itself has v and meta fields, this makes it possible to -// treat packs and single updates in a similar way. -// -// The v field of the pack itself is from the first entry U1, the -// v_end field from UN. The meta.end_ts field of the pack itself is -// from the last entry UN, the meta.start_ts field from U1. - -const DAYS = 24 * 3600 * 1000 // one day in milliseconds - -module.exports = PackManager = { - MAX_SIZE: 1024 * 1024, // make these configurable parameters - MAX_COUNT: 1024, - - insertCompressedUpdates( - projectId, - docId, - lastUpdate, - newUpdates, - temporary, - callback - ) { - if (callback == null) { - callback = function () {} - } - if (newUpdates.length === 0) { - return callback() - } - - // never append permanent ops to a pack that will expire - if ( - (lastUpdate != null ? lastUpdate.expiresAt : undefined) != null && - !temporary - ) { - lastUpdate = null - } - - const updatesToFlush = [] - const updatesRemaining = newUpdates.slice() - - let n = (lastUpdate != null ? lastUpdate.n : undefined) || 0 - let sz = (lastUpdate != null ? lastUpdate.sz : undefined) || 0 - - while ( - updatesRemaining.length && - n < PackManager.MAX_COUNT && - sz < PackManager.MAX_SIZE - ) { - const nextUpdate = updatesRemaining[0] - const nextUpdateSize = BSON.calculateObjectSize(nextUpdate) - if (nextUpdateSize + sz > PackManager.MAX_SIZE && n > 0) { - break - } - n++ - sz += nextUpdateSize - updatesToFlush.push(updatesRemaining.shift()) - } - - return PackManager.flushCompressedUpdates( - projectId, - docId, - lastUpdate, - updatesToFlush, - temporary, - function (error) { - if (error != null) { - return callback(error) - } - return PackManager.insertCompressedUpdates( - projectId, - docId, - null, - updatesRemaining, - temporary, - callback - ) - } - ) - }, - - flushCompressedUpdates( - projectId, - docId, - lastUpdate, - newUpdates, - temporary, - callback - ) { - if (callback == null) { - callback = function () {} - } - if (newUpdates.length === 0) { - return callback() - } - - let canAppend = false - // check if it is safe to append to an existing pack - if (lastUpdate != null) { - if (!temporary && lastUpdate.expiresAt == null) { - // permanent pack appends to permanent pack - canAppend = true - } - const age = - Date.now() - - (lastUpdate.meta != null ? lastUpdate.meta.start_ts : undefined) - if (temporary && lastUpdate.expiresAt != null && age < 1 * DAYS) { - // temporary pack appends to temporary pack if same day - canAppend = true - } - } - - if (canAppend) { - return PackManager.appendUpdatesToExistingPack( - projectId, - docId, - lastUpdate, - newUpdates, - temporary, - callback - ) - } else { - return PackManager.insertUpdatesIntoNewPack( - projectId, - docId, - newUpdates, - temporary, - callback - ) - } - }, - - insertUpdatesIntoNewPack(projectId, docId, newUpdates, temporary, callback) { - if (callback == null) { - callback = function () {} - } - const first = newUpdates[0] - const last = newUpdates[newUpdates.length - 1] - const n = newUpdates.length - const sz = BSON.calculateObjectSize(newUpdates) - const newPack = { - project_id: ObjectId(projectId.toString()), - doc_id: ObjectId(docId.toString()), - pack: newUpdates, - n, - sz, - meta: { - start_ts: first.meta.start_ts, - end_ts: last.meta.end_ts, - }, - v: first.v, - v_end: last.v, - temporary, - } - if (temporary) { - newPack.expiresAt = new Date(Date.now() + 7 * DAYS) - newPack.last_checked = new Date(Date.now() + 30 * DAYS) // never check temporary packs - } - logger.debug( - { projectId, docId, newUpdates }, - 'inserting updates into new pack' - ) - return db.docHistory.insertOne(newPack, function (err) { - if (err != null) { - return callback(err) - } - Metrics.inc(`insert-pack-${temporary ? 'temporary' : 'permanent'}`) - if (temporary) { - return callback() - } else { - return PackManager.updateIndex(projectId, docId, callback) - } - }) - }, - - appendUpdatesToExistingPack( - projectId, - docId, - lastUpdate, - newUpdates, - temporary, - callback - ) { - if (callback == null) { - callback = function () {} - } - const first = newUpdates[0] - const last = newUpdates[newUpdates.length - 1] - const n = newUpdates.length - const sz = BSON.calculateObjectSize(newUpdates) - const query = { - _id: lastUpdate._id, - project_id: ObjectId(projectId.toString()), - doc_id: ObjectId(docId.toString()), - pack: { $exists: true }, - } - const update = { - $push: { - pack: { $each: newUpdates }, - }, - $inc: { - n, - sz, - }, - $set: { - 'meta.end_ts': last.meta.end_ts, - v_end: last.v, - }, - } - if (lastUpdate.expiresAt && temporary) { - update.$set.expiresAt = new Date(Date.now() + 7 * DAYS) - } - logger.debug( - { projectId, docId, lastUpdate, newUpdates }, - 'appending updates to existing pack' - ) - Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`) - return db.docHistory.updateOne(query, update, callback) - }, - - // Retrieve all changes for a document - - getOpsByVersionRange(projectId, docId, fromVersion, toVersion, callback) { - if (callback == null) { - callback = function () {} - } - return PackManager.loadPacksByVersionRange( - projectId, - docId, - fromVersion, - toVersion, - function (error) { - if (error) return callback(error) - const query = { doc_id: ObjectId(docId.toString()) } - if (toVersion != null) { - query.v = { $lte: toVersion } - } - if (fromVersion != null) { - query.v_end = { $gte: fromVersion } - } - // console.log "query:", query - return db.docHistory - .find(query) - .sort({ v: -1 }) - .toArray(function (err, result) { - if (err != null) { - return callback(err) - } - // console.log "getOpsByVersionRange:", err, result - const updates = [] - const opInRange = function (op, from, to) { - if (fromVersion != null && op.v < fromVersion) { - return false - } - if (toVersion != null && op.v > toVersion) { - return false - } - return true - } - for (const docHistory of Array.from(result)) { - // console.log 'adding', docHistory.pack - for (const op of Array.from(docHistory.pack.reverse())) { - if (opInRange(op, fromVersion, toVersion)) { - op.project_id = docHistory.project_id - op.doc_id = docHistory.doc_id - // console.log "added op", op.v, fromVersion, toVersion - updates.push(op) - } - } - } - return callback(null, updates) - }) - } - ) - }, - - loadPacksByVersionRange(projectId, docId, fromVersion, toVersion, callback) { - return PackManager.getIndex(docId, function (err, indexResult) { - let pack - if (err != null) { - return callback(err) - } - const indexPacks = - (indexResult != null ? indexResult.packs : undefined) || [] - const packInRange = function (pack, from, to) { - if (fromVersion != null && pack.v_end < fromVersion) { - return false - } - if (toVersion != null && pack.v > toVersion) { - return false - } - return true - } - const neededIds = (() => { - const result = [] - for (pack of Array.from(indexPacks)) { - if (packInRange(pack, fromVersion, toVersion)) { - result.push(pack._id) - } - } - return result - })() - if (neededIds.length) { - return PackManager.fetchPacksIfNeeded( - projectId, - docId, - neededIds, - callback - ) - } else { - return callback() - } - }) - }, - - fetchPacksIfNeeded(projectId, docId, packIds, callback) { - let id - return db.docHistory - .find({ _id: { $in: packIds.map(ObjectId) } }, { projection: { _id: 1 } }) - .toArray(function (err, loadedPacks) { - if (err != null) { - return callback(err) - } - const allPackIds = (() => { - const result1 = [] - for (id of Array.from(packIds)) { - result1.push(id.toString()) - } - return result1 - })() - const loadedPackIds = Array.from(loadedPacks).map(pack => - pack._id.toString() - ) - const packIdsToFetch = _.difference(allPackIds, loadedPackIds) - logger.debug( - { projectId, docId, loadedPackIds, allPackIds, packIdsToFetch }, - 'analysed packs' - ) - if (packIdsToFetch.length === 0) { - return callback() - } - return async.eachLimit( - packIdsToFetch, - 4, - (packId, cb) => MongoAWS.unArchivePack(projectId, docId, packId, cb), - function (err) { - if (err != null) { - return callback(err) - } - logger.debug({ projectId, docId }, 'done unarchiving') - return callback() - } - ) - }) - }, - - findAllDocsInProject(projectId, callback) { - const docIdSet = new Set() - async.series( - [ - cb => { - db.docHistory - .find( - { project_id: ObjectId(projectId) }, - { projection: { pack: false } } - ) - .toArray((err, packs) => { - if (err) return callback(err) - packs.forEach(pack => { - docIdSet.add(pack.doc_id.toString()) - }) - return cb() - }) - }, - cb => { - db.docHistoryIndex - .find({ project_id: ObjectId(projectId) }) - .toArray((err, indexes) => { - if (err) return callback(err) - indexes.forEach(index => { - docIdSet.add(index._id.toString()) - }) - return cb() - }) - }, - ], - err => { - if (err) return callback(err) - callback(null, [...docIdSet]) - } - ) - }, - - // rewrite any query using doc_id to use _id instead - // (because docHistoryIndex uses the doc_id) - - _rewriteQueryForIndex(query) { - const indexQuery = _.omit(query, 'doc_id') - if ('doc_id' in query) { - indexQuery._id = query.doc_id - } - return indexQuery - }, - - // Retrieve all changes across a project - - _findPacks(query, sortKeys, callback) { - // get all the docHistory Entries - return db.docHistory - .find(query, { projection: { pack: false } }) - .sort(sortKeys) - .toArray(function (err, packs) { - let pack - if (err != null) { - return callback(err) - } - const allPacks = [] - const seenIds = {} - for (pack of Array.from(packs)) { - allPacks.push(pack) - seenIds[pack._id] = true - } - const indexQuery = PackManager._rewriteQueryForIndex(query) - return db.docHistoryIndex - .find(indexQuery) - .toArray(function (err, indexes) { - if (err != null) { - return callback(err) - } - for (const index of Array.from(indexes)) { - for (pack of Array.from(index.packs)) { - if (!seenIds[pack._id]) { - pack.project_id = index.project_id - pack.doc_id = index._id - pack.fromIndex = true - allPacks.push(pack) - seenIds[pack._id] = true - } - } - } - return callback(null, allPacks) - }) - }) - }, - - makeProjectIterator(projectId, before, callback) { - PackManager._findPacks( - { project_id: ObjectId(projectId) }, - { 'meta.end_ts': -1 }, - function (err, allPacks) { - if (err) return callback(err) - callback( - null, - new ProjectIterator(allPacks, before, PackManager.getPackById) - ) - } - ) - }, - - makeDocIterator(docId, callback) { - PackManager._findPacks( - { doc_id: ObjectId(docId) }, - { v: -1 }, - function (err, allPacks) { - if (err) return callback(err) - callback(null, new DocIterator(allPacks, PackManager.getPackById)) - } - ) - }, - - getPackById(projectId, docId, packId, callback) { - return db.docHistory.findOne({ _id: packId }, function (err, pack) { - if (err != null) { - return callback(err) - } - if (pack == null) { - return MongoAWS.unArchivePack(projectId, docId, packId, callback) - } else if (pack.expiresAt != null && pack.temporary === false) { - // we only need to touch the TTL when listing the changes in the project - // because diffs on individual documents are always done after that - return PackManager.increaseTTL(pack, callback) - // only do this for cached packs, not temporary ones to avoid older packs - // being kept longer than newer ones (which messes up the last update version) - } else { - return callback(null, pack) - } - }) - }, - - increaseTTL(pack, callback) { - if (pack.expiresAt < new Date(Date.now() + 6 * DAYS)) { - // update cache expiry since we are using this pack - return db.docHistory.updateOne( - { _id: pack._id }, - { $set: { expiresAt: new Date(Date.now() + 7 * DAYS) } }, - err => callback(err, pack) - ) - } else { - return callback(null, pack) - } - }, - - // Manage docHistoryIndex collection - - getIndex(docId, callback) { - return db.docHistoryIndex.findOne( - { _id: ObjectId(docId.toString()) }, - callback - ) - }, - - getPackFromIndex(docId, packId, callback) { - return db.docHistoryIndex.findOne( - { _id: ObjectId(docId.toString()), 'packs._id': packId }, - { projection: { 'packs.$': 1 } }, - callback - ) - }, - - getLastPackFromIndex(docId, callback) { - return db.docHistoryIndex.findOne( - { _id: ObjectId(docId.toString()) }, - { projection: { packs: { $slice: -1 } } }, - function (err, indexPack) { - if (err != null) { - return callback(err) - } - if (indexPack == null) { - return callback() - } - return callback(null, indexPack[0]) - } - ) - }, - - getIndexWithKeys(docId, callback) { - return PackManager.getIndex(docId, function (err, index) { - if (err != null) { - return callback(err) - } - if (index == null) { - return callback() - } - for (const pack of Array.from( - (index != null ? index.packs : undefined) || [] - )) { - index[pack._id] = pack - } - return callback(null, index) - }) - }, - - initialiseIndex(projectId, docId, callback) { - return PackManager.findCompletedPacks( - projectId, - docId, - function (err, packs) { - // console.log 'err', err, 'packs', packs, packs?.length - if (err != null) { - return callback(err) - } - if (packs == null) { - return callback() - } - return PackManager.insertPacksIntoIndexWithLock( - projectId, - docId, - packs, - callback - ) - } - ) - }, - - updateIndex(projectId, docId, callback) { - // find all packs prior to current pack - return PackManager.findUnindexedPacks( - projectId, - docId, - function (err, newPacks) { - if (err != null) { - return callback(err) - } - if (newPacks == null || newPacks.length === 0) { - return callback() - } - return PackManager.insertPacksIntoIndexWithLock( - projectId, - docId, - newPacks, - function (err) { - if (err != null) { - return callback(err) - } - logger.debug( - { projectId, docId, newPacks }, - 'added new packs to index' - ) - return callback() - } - ) - } - ) - }, - - findCompletedPacks(projectId, docId, callback) { - const query = { - doc_id: ObjectId(docId.toString()), - expiresAt: { $exists: false }, - } - return db.docHistory - .find(query, { projection: { pack: false } }) - .sort({ v: 1 }) - .toArray(function (err, packs) { - if (err != null) { - return callback(err) - } - if (packs == null) { - return callback() - } - if (!(packs != null ? packs.length : undefined)) { - return callback() - } - const last = packs.pop() // discard the last pack, if it's still in progress - if (last.finalised) { - packs.push(last) - } // it's finalised so we push it back to archive it - return callback(null, packs) - }) - }, - - findPacks(projectId, docId, callback) { - const query = { - doc_id: ObjectId(docId.toString()), - expiresAt: { $exists: false }, - } - return db.docHistory - .find(query, { projection: { pack: false } }) - .sort({ v: 1 }) - .toArray(function (err, packs) { - if (err != null) { - return callback(err) - } - if (packs == null) { - return callback() - } - if (!(packs != null ? packs.length : undefined)) { - return callback() - } - return callback(null, packs) - }) - }, - - findUnindexedPacks(projectId, docId, callback) { - return PackManager.getIndexWithKeys(docId, function (err, indexResult) { - if (err != null) { - return callback(err) - } - return PackManager.findCompletedPacks( - projectId, - docId, - function (err, historyPacks) { - let pack - if (err != null) { - return callback(err) - } - if (historyPacks == null) { - return callback() - } - // select only the new packs not already in the index - let newPacks = (() => { - const result = [] - for (pack of Array.from(historyPacks)) { - if ( - (indexResult != null ? indexResult[pack._id] : undefined) == - null - ) { - result.push(pack) - } - } - return result - })() - newPacks = (() => { - const result1 = [] - for (pack of Array.from(newPacks)) { - result1.push( - _.omit( - pack, - 'doc_id', - 'project_id', - 'n', - 'sz', - 'last_checked', - 'finalised' - ) - ) - } - return result1 - })() - if (newPacks.length) { - logger.debug( - { projectId, docId, n: newPacks.length }, - 'found new packs' - ) - } - return callback(null, newPacks) - } - ) - }) - }, - - insertPacksIntoIndexWithLock(projectId, docId, newPacks, callback) { - return LockManager.runWithLock( - keys.historyIndexLock({ doc_id: docId }), - releaseLock => - PackManager._insertPacksIntoIndex( - projectId, - docId, - newPacks, - releaseLock - ), - callback - ) - }, - - _insertPacksIntoIndex(projectId, docId, newPacks, callback) { - return db.docHistoryIndex.updateOne( - { _id: ObjectId(docId.toString()) }, - { - $setOnInsert: { project_id: ObjectId(projectId.toString()) }, - $push: { - packs: { $each: newPacks, $sort: { v: 1 } }, - }, - }, - { - upsert: true, - }, - callback - ) - }, - - // Archiving packs to S3 - - archivePack(projectId, docId, packId, callback) { - const clearFlagOnError = function (err, cb) { - if (err != null) { - // clear the inS3 flag on error - return PackManager.clearPackAsArchiveInProgress( - projectId, - docId, - packId, - function (err2) { - if (err2 != null) { - return cb(err2) - } - return cb(err) - } - ) - } else { - return cb() - } - } - return async.series( - [ - cb => - PackManager.checkArchiveNotInProgress(projectId, docId, packId, cb), - cb => - PackManager.markPackAsArchiveInProgress(projectId, docId, packId, cb), - cb => - MongoAWS.archivePack(projectId, docId, packId, err => - clearFlagOnError(err, cb) - ), - cb => - PackManager.checkArchivedPack(projectId, docId, packId, err => - clearFlagOnError(err, cb) - ), - cb => PackManager.markPackAsArchived(projectId, docId, packId, cb), - cb => - PackManager.setTTLOnArchivedPack(projectId, docId, packId, callback), - ], - callback - ) - }, - - checkArchivedPack(projectId, docId, packId, callback) { - return db.docHistory.findOne({ _id: packId }, function (err, pack) { - if (err != null) { - return callback(err) - } - if (pack == null) { - return callback(new Error('pack not found')) - } - return MongoAWS.readArchivedPack( - projectId, - docId, - packId, - function (err, result) { - if (err) return callback(err) - delete result.last_checked - delete pack.last_checked - // need to compare ids as ObjectIds with .equals() - for (const key of ['_id', 'project_id', 'doc_id']) { - if (result[key].equals(pack[key])) { - result[key] = pack[key] - } - } - for (let i = 0; i < result.pack.length; i++) { - const op = result.pack[i] - if (op._id != null && op._id.equals(pack.pack[i]._id)) { - op._id = pack.pack[i]._id - } - } - if (_.isEqual(pack, result)) { - return callback() - } else { - logger.err( - { - pack, - result, - jsondiff: JSON.stringify(pack) === JSON.stringify(result), - }, - 'difference when comparing packs' - ) - return callback( - new Error('pack retrieved from s3 does not match pack in mongo') - ) - } - } - ) - }) - }, - // Extra methods to test archive/unarchive for a doc_id - - pushOldPacks(projectId, docId, callback) { - return PackManager.findPacks(projectId, docId, function (err, packs) { - if (err != null) { - return callback(err) - } - if (!(packs != null ? packs.length : undefined)) { - return callback() - } - return PackManager.processOldPack( - projectId, - docId, - packs[0]._id, - callback - ) - }) - }, - - pullOldPacks(projectId, docId, callback) { - return PackManager.loadPacksByVersionRange( - projectId, - docId, - null, - null, - callback - ) - }, - - // Processing old packs via worker - - processOldPack(projectId, docId, packId, callback) { - const markAsChecked = err => - PackManager.markPackAsChecked(projectId, docId, packId, function (err2) { - if (err2 != null) { - return callback(err2) - } - return callback(err) - }) - logger.debug({ projectId, docId }, 'processing old packs') - return db.docHistory.findOne({ _id: packId }, function (err, pack) { - if (err != null) { - return markAsChecked(err) - } - if (pack == null) { - return markAsChecked() - } - if (pack.expiresAt != null) { - return callback() - } // return directly - return PackManager.finaliseIfNeeded( - projectId, - docId, - pack._id, - pack, - function (err) { - if (err != null) { - return markAsChecked(err) - } - return PackManager.updateIndexIfNeeded( - projectId, - docId, - function (err) { - if (err != null) { - return markAsChecked(err) - } - return PackManager.findUnarchivedPacks( - projectId, - docId, - function (err, unarchivedPacks) { - if (err != null) { - return markAsChecked(err) - } - if ( - !(unarchivedPacks != null - ? unarchivedPacks.length - : undefined) - ) { - logger.debug( - { projectId, docId }, - 'no packs need archiving' - ) - return markAsChecked() - } - return async.eachSeries( - unarchivedPacks, - (pack, cb) => - PackManager.archivePack(projectId, docId, pack._id, cb), - function (err) { - if (err != null) { - return markAsChecked(err) - } - logger.debug({ projectId, docId }, 'done processing') - return markAsChecked() - } - ) - } - ) - } - ) - } - ) - }) - }, - - finaliseIfNeeded(projectId, docId, packId, pack, callback) { - const sz = pack.sz / (1024 * 1024) // in fractions of a megabyte - const n = pack.n / 1024 // in fraction of 1024 ops - const age = (Date.now() - pack.meta.end_ts) / DAYS - if (age < 30) { - // always keep if less than 1 month old - logger.debug({ projectId, docId, packId, age }, 'less than 30 days old') - return callback() - } - // compute an archiving threshold which decreases for each month of age - const archiveThreshold = 30 / age - if (sz > archiveThreshold || n > archiveThreshold || age > 90) { - logger.debug( - { projectId, docId, packId, age, archiveThreshold, sz, n }, - 'meets archive threshold' - ) - return PackManager.markPackAsFinalisedWithLock( - projectId, - docId, - packId, - callback - ) - } else { - logger.debug( - { projectId, docId, packId, age, archiveThreshold, sz, n }, - 'does not meet archive threshold' - ) - return callback() - } - }, - - markPackAsFinalisedWithLock(projectId, docId, packId, callback) { - return LockManager.runWithLock( - keys.historyLock({ doc_id: docId }), - releaseLock => - PackManager._markPackAsFinalised(projectId, docId, packId, releaseLock), - callback - ) - }, - - _markPackAsFinalised(projectId, docId, packId, callback) { - logger.debug({ projectId, docId, packId }, 'marking pack as finalised') - return db.docHistory.updateOne( - { _id: packId }, - { $set: { finalised: true } }, - callback - ) - }, - - updateIndexIfNeeded(projectId, docId, callback) { - logger.debug({ projectId, docId }, 'archiving old packs') - return PackManager.getIndexWithKeys(docId, function (err, index) { - if (err != null) { - return callback(err) - } - if (index == null) { - return PackManager.initialiseIndex(projectId, docId, callback) - } else { - return PackManager.updateIndex(projectId, docId, callback) - } - }) - }, - - markPackAsChecked(projectId, docId, packId, callback) { - logger.debug({ projectId, docId, packId }, 'marking pack as checked') - return db.docHistory.updateOne( - { _id: packId }, - { $currentDate: { last_checked: true } }, - callback - ) - }, - - findUnarchivedPacks(projectId, docId, callback) { - return PackManager.getIndex(docId, function (err, indexResult) { - if (err != null) { - return callback(err) - } - const indexPacks = - (indexResult != null ? indexResult.packs : undefined) || [] - const unArchivedPacks = (() => { - const result = [] - for (const pack of Array.from(indexPacks)) { - if (pack.inS3 == null) { - result.push(pack) - } - } - return result - })() - if (unArchivedPacks.length) { - logger.debug( - { projectId, docId, n: unArchivedPacks.length }, - 'find unarchived packs' - ) - } - return callback(null, unArchivedPacks) - }) - }, - - // Archive locking flags - - checkArchiveNotInProgress(projectId, docId, packId, callback) { - logger.debug( - { projectId, docId, packId }, - 'checking if archive in progress' - ) - return PackManager.getPackFromIndex(docId, packId, function (err, result) { - if (err != null) { - return callback(err) - } - if (result == null) { - return callback(new Error('pack not found in index')) - } - if (result.inS3) { - return callback(new Error('pack archiving already done')) - } else if (result.inS3 != null) { - return callback(new Error('pack archiving already in progress')) - } else { - return callback() - } - }) - }, - - markPackAsArchiveInProgress(projectId, docId, packId, callback) { - logger.debug( - { projectId, docId }, - 'marking pack as archive in progress status' - ) - return db.docHistoryIndex.findOneAndUpdate( - { - _id: ObjectId(docId.toString()), - packs: { $elemMatch: { _id: packId, inS3: { $exists: false } } }, - }, - { $set: { 'packs.$.inS3': false } }, - { projection: { 'packs.$': 1 } }, - function (err, result) { - if (err != null) { - return callback(err) - } - if (!result.value) { - return callback(new Error('archive is already in progress')) - } - logger.debug( - { projectId, docId, packId }, - 'marked as archive in progress' - ) - return callback() - } - ) - }, - - clearPackAsArchiveInProgress(projectId, docId, packId, callback) { - logger.debug( - { projectId, docId, packId }, - 'clearing as archive in progress' - ) - return db.docHistoryIndex.updateOne( - { - _id: ObjectId(docId.toString()), - packs: { $elemMatch: { _id: packId, inS3: false } }, - }, - { $unset: { 'packs.$.inS3': true } }, - callback - ) - }, - - markPackAsArchived(projectId, docId, packId, callback) { - logger.debug({ projectId, docId, packId }, 'marking pack as archived') - return db.docHistoryIndex.findOneAndUpdate( - { - _id: ObjectId(docId.toString()), - packs: { $elemMatch: { _id: packId, inS3: false } }, - }, - { $set: { 'packs.$.inS3': true } }, - { projection: { 'packs.$': 1 } }, - function (err, result) { - if (err != null) { - return callback(err) - } - if (!result.value) { - return callback(new Error('archive is not marked as progress')) - } - logger.debug({ projectId, docId, packId }, 'marked as archived') - return callback() - } - ) - }, - - setTTLOnArchivedPack(projectId, docId, packId, callback) { - return db.docHistory.updateOne( - { _id: packId }, - { $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } }, - function (err) { - if (err) { - return callback(err) - } - logger.debug({ projectId, docId, packId }, 'set expiry on pack') - return callback() - } - ) - }, -} - -module.exports.promises = { - getOpsByVersionRange: util.promisify(PackManager.getOpsByVersionRange), - findAllDocsInProject: util.promisify(PackManager.findAllDocsInProject), - makeDocIterator: util.promisify(PackManager.makeDocIterator), -} - -// _getOneDayInFutureWithRandomDelay: -> -// thirtyMins = 1000 * 60 * 30 -// randomThirtyMinMax = Math.ceil(Math.random() * thirtyMins) -// return new Date(Date.now() + randomThirtyMinMax + 1*DAYS) diff --git a/services/track-changes/app/js/PackWorker.js b/services/track-changes/app/js/PackWorker.js deleted file mode 100644 index 88a3af5799..0000000000 --- a/services/track-changes/app/js/PackWorker.js +++ /dev/null @@ -1,211 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS103: Rewrite code to no longer use __guard__ - * DS205: Consider reworking code to avoid use of IIFEs - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let LIMIT, pending -let projectId, docId -const { callbackify } = require('util') -const Settings = require('@overleaf/settings') -const async = require('async') -const _ = require('underscore') -const { db, ObjectId, waitForDb, closeDb } = require('./mongodb') -const fs = require('fs') -const Metrics = require('@overleaf/metrics') -Metrics.initialize('track-changes') -const logger = require('@overleaf/logger') -logger.initialize('track-changes-packworker') -if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { - logger.initializeErrorReporting(Settings.sentry.dsn) -} - -const DAYS = 24 * 3600 * 1000 - -const LockManager = require('./LockManager') -const PackManager = require('./PackManager') - -// this worker script is forked by the main process to look for -// document histories which can be archived - -const source = process.argv[2] -const DOCUMENT_PACK_DELAY = Number(process.argv[3]) || 1000 -const TIMEOUT = Number(process.argv[4]) || 30 * 60 * 1000 -let COUNT = 0 // number processed -let TOTAL = 0 // total number to process - -if (!source.match(/^[0-9]+$/)) { - const file = fs.readFileSync(source) - const result = (() => { - const result1 = [] - for (const line of Array.from(file.toString().split('\n'))) { - ;[projectId, docId] = Array.from(line.split(' ')) - result1.push({ doc_id: docId, project_id: projectId }) - } - return result1 - })() - pending = _.filter(result, row => - __guard__(row != null ? row.doc_id : undefined, x => - x.match(/^[a-f0-9]{24}$/) - ) - ) -} else { - LIMIT = Number(process.argv[2]) || 1000 -} - -let shutDownRequested = false -const shutDownTimer = setTimeout(function () { - logger.debug('pack timed out, requesting shutdown') - // start the shutdown on the next pack - shutDownRequested = true - // do a hard shutdown after a further 5 minutes - const hardTimeout = setTimeout(function () { - logger.error('HARD TIMEOUT in pack archive worker') - return process.exit() - }, 5 * 60 * 1000) - return hardTimeout.unref() -}, TIMEOUT) - -logger.debug( - `checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}` -) - -const finish = function () { - if (shutDownTimer != null) { - logger.debug('cancelling timeout') - clearTimeout(shutDownTimer) - } - logger.debug('closing db') - callbackify(closeDb)(function () { - logger.debug('closing LockManager Redis Connection') - return LockManager.close(function () { - logger.debug( - { processedCount: COUNT, allCount: TOTAL }, - 'ready to exit from pack archive worker' - ) - const hardTimeout = setTimeout(function () { - logger.error('hard exit from pack archive worker') - return process.exit(1) - }, 5 * 1000) - return hardTimeout.unref() - }) - }) -} - -process.on('exit', code => logger.debug({ code }, 'pack archive worker exited')) - -const processUpdates = pending => - async.eachSeries( - pending, - function (result, callback) { - let _id - ;({ _id, project_id: projectId, doc_id: docId } = result) - COUNT++ - logger.debug({ projectId, docId }, `processing ${COUNT}/${TOTAL}`) - if (projectId == null || docId == null) { - logger.debug( - { projectId, docId }, - 'skipping pack, missing project/doc id' - ) - return callback() - } - const handler = function (err, result) { - if (err != null && err.code === 'InternalError' && err.retryable) { - logger.warn( - { err, result }, - 'ignoring S3 error in pack archive worker' - ) - // Ignore any s3 errors due to random problems - err = null - } - if (err != null) { - logger.error({ err, result }, 'error in pack archive worker') - return callback(err) - } - if (shutDownRequested) { - logger.warn('shutting down pack archive worker') - return callback(new Error('shutdown')) - } - return setTimeout(() => callback(err, result), DOCUMENT_PACK_DELAY) - } - if (_id == null) { - return PackManager.pushOldPacks(projectId, docId, handler) - } else { - return PackManager.processOldPack(projectId, docId, _id, handler) - } - }, - function (err, results) { - if (err != null && err.message !== 'shutdown') { - logger.error({ err }, 'error in pack archive worker processUpdates') - } - return finish() - } - ) -// find the packs which can be archived - -const ObjectIdFromDate = function (date) { - const id = Math.floor(date.getTime() / 1000).toString(16) + '0000000000000000' - return ObjectId(id) -} - -// new approach, two passes -// find packs to be marked as finalised:true, those which have a newer pack present -// then only consider finalised:true packs for archiving - -waitForDb() - .then(() => { - if (pending != null) { - logger.debug(`got ${pending.length} entries from ${source}`) - processUpdates(pending) - } else { - processFromOneWeekAgo() - } - }) - .catch(err => { - logger.fatal({ err }, 'cannot connect to mongo, exiting') - process.exit(1) - }) - -function processFromOneWeekAgo() { - const oneWeekAgo = new Date(Date.now() - 7 * DAYS) - db.docHistory - .find( - { - expiresAt: { $exists: false }, - project_id: { $exists: true }, - v_end: { $exists: true }, - _id: { $lt: ObjectIdFromDate(oneWeekAgo) }, - last_checked: { $lt: oneWeekAgo }, - }, - { projection: { _id: 1, doc_id: 1, project_id: 1 } } - ) - .sort({ - last_checked: 1, - }) - .limit(LIMIT) - .toArray(function (err, results) { - if (err != null) { - logger.debug({ err }, 'error checking for updates') - finish() - return - } - pending = _.uniq(results, false, result => result.doc_id.toString()) - TOTAL = pending.length - logger.debug(`found ${TOTAL} documents to archive`) - return processUpdates(pending) - }) -} - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} diff --git a/services/track-changes/app/js/ProjectIterator.js b/services/track-changes/app/js/ProjectIterator.js deleted file mode 100644 index 2b9da401b3..0000000000 --- a/services/track-changes/app/js/ProjectIterator.js +++ /dev/null @@ -1,113 +0,0 @@ -/* eslint-disable - no-unmodified-loop-condition, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let ProjectIterator -const Heap = require('heap') - -module.exports = - ProjectIterator = - ProjectIterator = - class ProjectIterator { - constructor(packs, before, getPackByIdFn) { - this.before = before - this.getPackByIdFn = getPackByIdFn - const byEndTs = (a, b) => - b.meta.end_ts - a.meta.end_ts || a.fromIndex - b.fromIndex - this.packs = packs.slice().sort(byEndTs) - this.queue = new Heap(byEndTs) - } - - next(callback) { - // what's up next - // console.log ">>> top item", iterator.packs[0] - const iterator = this - const { before } = this - const { queue } = iterator - const opsToReturn = [] - let nextPack = iterator.packs[0] - let lowWaterMark = - (nextPack != null ? nextPack.meta.end_ts : undefined) || 0 - let nextItem = queue.peek() - - // console.log "queue empty?", queue.empty() - // console.log "nextItem", nextItem - // console.log "nextItem.meta.end_ts", nextItem?.meta.end_ts - // console.log "lowWaterMark", lowWaterMark - - while ( - before != null && - (nextPack != null ? nextPack.meta.start_ts : undefined) > before - ) { - // discard pack that is outside range - iterator.packs.shift() - nextPack = iterator.packs[0] - lowWaterMark = - (nextPack != null ? nextPack.meta.end_ts : undefined) || 0 - } - - if ( - (queue.empty() || - (nextItem != null ? nextItem.meta.end_ts : undefined) <= - lowWaterMark) && - nextPack != null - ) { - // retrieve the next pack and populate the queue - return this.getPackByIdFn( - nextPack.project_id, - nextPack.doc_id, - nextPack._id, - function (err, pack) { - if (err != null) { - return callback(err) - } - iterator.packs.shift() // have now retrieved this pack, remove it - // console.log "got pack", pack - for (const op of Array.from(pack.pack)) { - // console.log "adding op", op - if (before == null || op.meta.end_ts < before) { - op.doc_id = nextPack.doc_id - op.project_id = nextPack.project_id - queue.push(op) - } - } - // now try again - return iterator.next(callback) - } - ) - } - - // console.log "nextItem", nextItem, "lowWaterMark", lowWaterMark - while ( - nextItem != null && - (nextItem != null ? nextItem.meta.end_ts : undefined) > lowWaterMark - ) { - opsToReturn.push(nextItem) - queue.pop() - nextItem = queue.peek() - } - - // console.log "queue empty?", queue.empty() - // console.log "nextPack", nextPack? - - if (queue.empty() && nextPack == null) { - // got everything - iterator._done = true - } - - return callback(null, opsToReturn) - } - - done() { - return this._done - } - } diff --git a/services/track-changes/app/js/RedisManager.js b/services/track-changes/app/js/RedisManager.js deleted file mode 100644 index aa90aa0d80..0000000000 --- a/services/track-changes/app/js/RedisManager.js +++ /dev/null @@ -1,169 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS205: Consider reworking code to avoid use of IIFEs - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let RedisManager -const Settings = require('@overleaf/settings') -const redis = require('@overleaf/redis-wrapper') -const rclient = redis.createClient(Settings.redis.history) -const Keys = Settings.redis.history.key_schema -const async = require('async') - -module.exports = RedisManager = { - getOldestDocUpdates(docId, batchSize, callback) { - if (callback == null) { - callback = function () {} - } - const key = Keys.uncompressedHistoryOps({ doc_id: docId }) - return rclient.lrange(key, 0, batchSize - 1, callback) - }, - - expandDocUpdates(jsonUpdates, callback) { - let rawUpdates - if (callback == null) { - callback = function () {} - } - try { - rawUpdates = Array.from(jsonUpdates || []).map(update => - JSON.parse(update) - ) - } catch (e) { - return callback(e) - } - return callback(null, rawUpdates) - }, - - deleteAppliedDocUpdates(projectId, docId, docUpdates, callback) { - if (callback == null) { - callback = function () {} - } - const multi = rclient.multi() - // Delete all the updates which have been applied (exact match) - for (const update of Array.from(docUpdates || [])) { - multi.lrem(Keys.uncompressedHistoryOps({ doc_id: docId }), 1, update) - } - return multi.exec(function (error, results) { - if (error != null) { - return callback(error) - } - // It's ok to delete the doc_id from the set here. Even though the list - // of updates may not be empty, we will continue to process it until it is. - return rclient.srem( - Keys.docsWithHistoryOps({ project_id: projectId }), - docId, - function (error) { - if (error != null) { - return callback(error) - } - return callback(null) - } - ) - }) - }, - - getDocIdsWithHistoryOps(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return rclient.smembers( - Keys.docsWithHistoryOps({ project_id: projectId }), - callback - ) - }, - - // iterate over keys asynchronously using redis scan (non-blocking) - // handle all the cluster nodes or single redis server - _getKeys(pattern, callback) { - const nodes = (typeof rclient.nodes === 'function' - ? rclient.nodes('master') - : undefined) || [rclient] - const doKeyLookupForNode = (node, cb) => - RedisManager._getKeysFromNode(node, pattern, cb) - return async.concatSeries(nodes, doKeyLookupForNode, callback) - }, - - _getKeysFromNode(node, pattern, callback) { - let cursor = 0 // redis iterator - const keySet = {} // use hash to avoid duplicate results - // scan over all keys looking for pattern - const doIteration = cb => - node.scan( - cursor, - 'MATCH', - pattern, - 'COUNT', - 1000, - function (error, reply) { - let keys - if (error != null) { - return callback(error) - } - ;[cursor, keys] = Array.from(reply) - for (const key of Array.from(keys)) { - keySet[key] = true - } - if (cursor === '0') { - // note redis returns string result not numeric - return callback(null, Object.keys(keySet)) - } else { - return doIteration() - } - } - ) - return doIteration() - }, - - // extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b - // or DocsWithHistoryOps:{57fd0b1f53a8396d22b2c24b} (for redis cluster) - _extractIds(keyList) { - const ids = (() => { - const result = [] - for (const key of Array.from(keyList)) { - const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id - result.push(m[1]) - } - return result - })() - return ids - }, - - getProjectIdsWithHistoryOps(callback) { - if (callback == null) { - callback = function () {} - } - return RedisManager._getKeys( - Keys.docsWithHistoryOps({ project_id: '*' }), - function (error, projectKeys) { - if (error != null) { - return callback(error) - } - const projectIds = RedisManager._extractIds(projectKeys) - return callback(error, projectIds) - } - ) - }, - - getAllDocIdsWithHistoryOps(callback) { - // return all the docids, to find dangling history entries after - // everything is flushed. - if (callback == null) { - callback = function () {} - } - return RedisManager._getKeys( - Keys.uncompressedHistoryOps({ doc_id: '*' }), - function (error, docKeys) { - if (error != null) { - return callback(error) - } - const docIds = RedisManager._extractIds(docKeys) - return callback(error, docIds) - } - ) - }, -} diff --git a/services/track-changes/app/js/RestoreManager.js b/services/track-changes/app/js/RestoreManager.js deleted file mode 100644 index d176a21f4c..0000000000 --- a/services/track-changes/app/js/RestoreManager.js +++ /dev/null @@ -1,46 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let RestoreManager -const DocumentUpdaterManager = require('./DocumentUpdaterManager') -const DiffManager = require('./DiffManager') -const logger = require('@overleaf/logger') - -module.exports = RestoreManager = { - restoreToBeforeVersion(projectId, docId, version, userId, callback) { - if (callback == null) { - callback = function () {} - } - logger.debug({ projectId, docId, version, userId }, 'restoring document') - return DiffManager.getDocumentBeforeVersion( - projectId, - docId, - version, - function (error, content) { - if (error != null) { - return callback(error) - } - return DocumentUpdaterManager.setDocument( - projectId, - docId, - content, - userId, - function (error) { - if (error != null) { - return callback(error) - } - return callback() - } - ) - } - ) - }, -} diff --git a/services/track-changes/app/js/UpdateCompressor.js b/services/track-changes/app/js/UpdateCompressor.js deleted file mode 100644 index 5a530bc6cf..0000000000 --- a/services/track-changes/app/js/UpdateCompressor.js +++ /dev/null @@ -1,326 +0,0 @@ -/* eslint-disable - new-cap, - no-throw-literal, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let oneMinute, twoMegabytes, UpdateCompressor -const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos) -const strRemove = (s1, pos, length) => s1.slice(0, pos) + s1.slice(pos + length) - -const { diff_match_patch: diffMatchPatch } = require('../lib/diff_match_patch') -const dmp = new diffMatchPatch() - -module.exports = UpdateCompressor = { - NOOP: 'noop', - - // Updates come from the doc updater in format - // { - // op: [ { ... op1 ... }, { ... op2 ... } ] - // meta: { ts: ..., user_id: ... } - // } - // but it's easier to work with on op per update, so convert these updates to - // our compressed format - // [{ - // op: op1 - // meta: { start_ts: ... , end_ts: ..., user_id: ... } - // }, { - // op: op2 - // meta: { start_ts: ... , end_ts: ..., user_id: ... } - // }] - convertToSingleOpUpdates(updates) { - const splitUpdates = [] - for (const update of Array.from(updates)) { - // Reject any non-insert or delete ops, i.e. comments - const ops = update.op.filter(o => o.i != null || o.d != null) - if (ops.length === 0) { - splitUpdates.push({ - op: UpdateCompressor.NOOP, - meta: { - start_ts: update.meta.start_ts || update.meta.ts, - end_ts: update.meta.end_ts || update.meta.ts, - user_id: update.meta.user_id, - }, - v: update.v, - }) - } else { - for (const op of Array.from(ops)) { - splitUpdates.push({ - op, - meta: { - start_ts: update.meta.start_ts || update.meta.ts, - end_ts: update.meta.end_ts || update.meta.ts, - user_id: update.meta.user_id, - }, - v: update.v, - }) - } - } - } - return splitUpdates - }, - - concatUpdatesWithSameVersion(updates) { - const concattedUpdates = [] - for (const update of Array.from(updates)) { - const lastUpdate = concattedUpdates[concattedUpdates.length - 1] - if (lastUpdate != null && lastUpdate.v === update.v) { - if (update.op !== UpdateCompressor.NOOP) { - lastUpdate.op.push(update.op) - } - } else { - const nextUpdate = { - op: [], - meta: update.meta, - v: update.v, - } - if (update.op !== UpdateCompressor.NOOP) { - nextUpdate.op.push(update.op) - } - concattedUpdates.push(nextUpdate) - } - } - return concattedUpdates - }, - - compressRawUpdates(lastPreviousUpdate, rawUpdates) { - if (lastPreviousUpdate?.op?.length > 1) { - // if the last previous update was an array op, don't compress onto it. - // The avoids cases where array length changes but version number doesn't - return [lastPreviousUpdate].concat( - UpdateCompressor.compressRawUpdates(null, rawUpdates) - ) - } - if (lastPreviousUpdate != null) { - rawUpdates = [lastPreviousUpdate].concat(rawUpdates) - } - let updates = UpdateCompressor.convertToSingleOpUpdates(rawUpdates) - updates = UpdateCompressor.compressUpdates(updates) - return UpdateCompressor.concatUpdatesWithSameVersion(updates) - }, - - compressUpdates(updates) { - if (updates.length === 0) { - return [] - } - - let compressedUpdates = [updates.shift()] - for (const update of Array.from(updates)) { - const lastCompressedUpdate = compressedUpdates.pop() - if (lastCompressedUpdate != null) { - compressedUpdates = compressedUpdates.concat( - UpdateCompressor._concatTwoUpdates(lastCompressedUpdate, update) - ) - } else { - compressedUpdates.push(update) - } - } - - return compressedUpdates - }, - - MAX_TIME_BETWEEN_UPDATES: (oneMinute = 60 * 1000), - MAX_UPDATE_SIZE: (twoMegabytes = 2 * 1024 * 1024), - - _concatTwoUpdates(firstUpdate, secondUpdate) { - let offset - firstUpdate = { - op: firstUpdate.op, - meta: { - user_id: firstUpdate.meta.user_id || null, - start_ts: firstUpdate.meta.start_ts || firstUpdate.meta.ts, - end_ts: firstUpdate.meta.end_ts || firstUpdate.meta.ts, - }, - v: firstUpdate.v, - } - secondUpdate = { - op: secondUpdate.op, - meta: { - user_id: secondUpdate.meta.user_id || null, - start_ts: secondUpdate.meta.start_ts || secondUpdate.meta.ts, - end_ts: secondUpdate.meta.end_ts || secondUpdate.meta.ts, - }, - v: secondUpdate.v, - } - - if (firstUpdate.meta.user_id !== secondUpdate.meta.user_id) { - return [firstUpdate, secondUpdate] - } - - if ( - secondUpdate.meta.start_ts - firstUpdate.meta.end_ts > - UpdateCompressor.MAX_TIME_BETWEEN_UPDATES - ) { - return [firstUpdate, secondUpdate] - } - - const firstOp = firstUpdate.op - const secondOp = secondUpdate.op - - const firstSize = - (firstOp.i != null ? firstOp.i.length : undefined) || - (firstOp.d != null ? firstOp.d.length : undefined) - const secondSize = - (secondOp.i != null ? secondOp.i.length : undefined) || - (secondOp.d != null ? secondOp.d.length : undefined) - - // Two inserts - if ( - firstOp.i != null && - secondOp.i != null && - firstOp.p <= secondOp.p && - secondOp.p <= firstOp.p + firstOp.i.length && - firstSize + secondSize < UpdateCompressor.MAX_UPDATE_SIZE - ) { - return [ - { - meta: { - start_ts: firstUpdate.meta.start_ts, - end_ts: secondUpdate.meta.end_ts, - user_id: firstUpdate.meta.user_id, - }, - op: { - p: firstOp.p, - i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i), - }, - v: secondUpdate.v, - }, - ] - // Two deletes - } else if ( - firstOp.d != null && - secondOp.d != null && - secondOp.p <= firstOp.p && - firstOp.p <= secondOp.p + secondOp.d.length && - firstSize + secondSize < UpdateCompressor.MAX_UPDATE_SIZE - ) { - return [ - { - meta: { - start_ts: firstUpdate.meta.start_ts, - end_ts: secondUpdate.meta.end_ts, - user_id: firstUpdate.meta.user_id, - }, - op: { - p: secondOp.p, - d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d), - }, - v: secondUpdate.v, - }, - ] - // An insert and then a delete - } else if ( - firstOp.i != null && - secondOp.d != null && - firstOp.p <= secondOp.p && - secondOp.p <= firstOp.p + firstOp.i.length - ) { - offset = secondOp.p - firstOp.p - const insertedText = firstOp.i.slice(offset, offset + secondOp.d.length) - // Only trim the insert when the delete is fully contained within in it - if (insertedText === secondOp.d) { - const insert = strRemove(firstOp.i, offset, secondOp.d.length) - return [ - { - meta: { - start_ts: firstUpdate.meta.start_ts, - end_ts: secondUpdate.meta.end_ts, - user_id: firstUpdate.meta.user_id, - }, - op: { - p: firstOp.p, - i: insert, - }, - v: secondUpdate.v, - }, - ] - } else { - // This will only happen if the delete extends outside the insert - return [firstUpdate, secondUpdate] - } - - // A delete then an insert at the same place, likely a copy-paste of a chunk of content - } else if ( - firstOp.d != null && - secondOp.i != null && - firstOp.p === secondOp.p - ) { - offset = firstOp.p - const diffOps = this.diffAsShareJsOps(firstOp.d, secondOp.i) - if (diffOps.length === 0) { - return [ - { - // Noop - meta: { - start_ts: firstUpdate.meta.start_ts, - end_ts: secondUpdate.meta.end_ts, - user_id: firstUpdate.meta.user_id, - }, - op: { - p: firstOp.p, - i: '', - }, - v: secondUpdate.v, - }, - ] - } else { - return diffOps.map(function (op) { - op.p += offset - return { - meta: { - start_ts: firstUpdate.meta.start_ts, - end_ts: secondUpdate.meta.end_ts, - user_id: firstUpdate.meta.user_id, - }, - op, - v: secondUpdate.v, - } - }) - } - } else { - return [firstUpdate, secondUpdate] - } - }, - - ADDED: 1, - REMOVED: -1, - UNCHANGED: 0, - diffAsShareJsOps(before, after, callback) { - if (callback == null) { - callback = function () {} - } - const diffs = dmp.diff_main(before, after) - dmp.diff_cleanupSemantic(diffs) - - const ops = [] - let position = 0 - for (const diff of Array.from(diffs)) { - const type = diff[0] - const content = diff[1] - if (type === this.ADDED) { - ops.push({ - i: content, - p: position, - }) - position += content.length - } else if (type === this.REMOVED) { - ops.push({ - d: content, - p: position, - }) - } else if (type === this.UNCHANGED) { - position += content.length - } else { - throw 'Unknown type' - } - } - return ops - }, -} diff --git a/services/track-changes/app/js/UpdateTrimmer.js b/services/track-changes/app/js/UpdateTrimmer.js deleted file mode 100644 index a97aad689b..0000000000 --- a/services/track-changes/app/js/UpdateTrimmer.js +++ /dev/null @@ -1,66 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let UpdateTrimmer -const MongoManager = require('./MongoManager') -const WebApiManager = require('./WebApiManager') -const logger = require('@overleaf/logger') - -module.exports = UpdateTrimmer = { - shouldTrimUpdates(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return MongoManager.getProjectMetaData( - projectId, - function (error, metadata) { - if (error != null) { - return callback(error) - } - if (metadata != null ? metadata.preserveHistory : undefined) { - return callback(null, false) - } else { - return WebApiManager.getProjectDetails( - projectId, - function (error, details) { - if (error != null) { - return callback(error) - } - logger.debug({ projectId, details }, 'got details') - if (details?.features?.versioning) { - return MongoManager.setProjectMetaData( - projectId, - { preserveHistory: true }, - function (error) { - if (error != null) { - return callback(error) - } - return MongoManager.upgradeHistory( - projectId, - function (error) { - if (error != null) { - return callback(error) - } - return callback(null, false) - } - ) - } - ) - } else { - return callback(null, true) - } - } - ) - } - } - ) - }, -} diff --git a/services/track-changes/app/js/UpdatesManager.js b/services/track-changes/app/js/UpdatesManager.js deleted file mode 100644 index 9fae4375b9..0000000000 --- a/services/track-changes/app/js/UpdatesManager.js +++ /dev/null @@ -1,883 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS103: Rewrite code to no longer use __guard__ - * DS205: Consider reworking code to avoid use of IIFEs - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let fiveMinutes, UpdatesManager -const MongoManager = require('./MongoManager') -const PackManager = require('./PackManager') -const RedisManager = require('./RedisManager') -const UpdateCompressor = require('./UpdateCompressor') -const LockManager = require('./LockManager') -const WebApiManager = require('./WebApiManager') -const UpdateTrimmer = require('./UpdateTrimmer') -const logger = require('@overleaf/logger') -const async = require('async') -const _ = require('underscore') -const Settings = require('@overleaf/settings') -const keys = Settings.redis.lock.key_schema -const util = require('util') - -module.exports = UpdatesManager = { - compressAndSaveRawUpdates(projectId, docId, rawUpdates, temporary, callback) { - let i - if (callback == null) { - callback = function () {} - } - const { length } = rawUpdates - if (length === 0) { - return callback() - } - - // check that ops are in the correct order - for (i = 0; i < rawUpdates.length; i++) { - const op = rawUpdates[i] - if (i > 0) { - const thisVersion = op != null ? op.v : undefined - const prevVersion = rawUpdates[i - 1]?.v - if (!(prevVersion < thisVersion)) { - logger.error( - { - projectId, - docId, - rawUpdates, - temporary, - thisVersion, - prevVersion, - }, - 'op versions out of order' - ) - } - } - } - - // FIXME: we no longer need the lastCompressedUpdate, so change functions not to need it - // CORRECTION: we do use it to log the time in case of error - return MongoManager.peekLastCompressedUpdate( - docId, - function (error, lastCompressedUpdate, lastVersion) { - // lastCompressedUpdate is the most recent update in Mongo, and - // lastVersion is its sharejs version number. - // - // The peekLastCompressedUpdate method may pass the update back - // as 'null' (for example if the previous compressed update has - // been archived). In this case it can still pass back the - // lastVersion from the update to allow us to check consistency. - let op - if (error != null) { - return callback(error) - } - - // Ensure that raw updates start where lastVersion left off - if (lastVersion != null) { - const discardedUpdates = [] - rawUpdates = rawUpdates.slice(0) - while (rawUpdates[0] != null && rawUpdates[0].v <= lastVersion) { - discardedUpdates.push(rawUpdates.shift()) - } - if (discardedUpdates.length) { - logger.error( - { projectId, docId, discardedUpdates, temporary, lastVersion }, - 'discarded updates already present' - ) - } - - if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) { - const ts = lastCompressedUpdate?.meta?.end_ts - const lastTimestamp = ts != null ? new Date(ts) : 'unknown time' - error = new Error( - `Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${lastTimestamp}` - ) - logger.error( - { - err: error, - docId, - projectId, - prevEndTs: ts, - temporary, - lastCompressedUpdate, - }, - 'inconsistent doc versions' - ) - if ( - (Settings.trackchanges != null - ? Settings.trackchanges.continueOnError - : undefined) && - rawUpdates[0].v > lastVersion + 1 - ) { - // we have lost some ops - continue to write into the database, we can't recover at this point - lastCompressedUpdate = null - } else { - return callback(error) - } - } - } - - if (rawUpdates.length === 0) { - return callback() - } - - // some old large ops in redis need to be rejected, they predate - // the size limit that now prevents them going through the system - const REJECT_LARGE_OP_SIZE = 4 * 1024 * 1024 - for (const rawUpdate of Array.from(rawUpdates)) { - const opSizes = (() => { - const result = [] - for (op of Array.from( - (rawUpdate != null ? rawUpdate.op : undefined) || [] - )) { - result.push( - (op.i != null ? op.i.length : undefined) || - (op.d != null ? op.d.length : undefined) - ) - } - return result - })() - const size = _.max(opSizes) - if (size > REJECT_LARGE_OP_SIZE) { - error = new Error( - `dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}` - ) - logger.error( - { err: error, docId, projectId, size, rawUpdate }, - 'dropped op - too big' - ) - rawUpdate.op = [] - } - } - - const compressedUpdates = UpdateCompressor.compressRawUpdates( - null, - rawUpdates - ) - return PackManager.insertCompressedUpdates( - projectId, - docId, - lastCompressedUpdate, - compressedUpdates, - temporary, - function (error, result) { - if (error != null) { - return callback(error) - } - if (result != null) { - logger.debug( - { - projectId, - docId, - origV: - lastCompressedUpdate != null - ? lastCompressedUpdate.v - : undefined, - newV: result.v, - }, - 'inserted updates into pack' - ) - } - return callback() - } - ) - } - ) - }, - - // Check whether the updates are temporary (per-project property) - _prepareProjectForUpdates(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return UpdateTrimmer.shouldTrimUpdates( - projectId, - function (error, temporary) { - if (error != null) { - return callback(error) - } - return callback(null, temporary) - } - ) - }, - - // Check for project id on document history (per-document property) - _prepareDocForUpdates(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return MongoManager.backportProjectId(projectId, docId, function (error) { - if (error != null) { - return callback(error) - } - return callback(null) - }) - }, - - // Apply updates for specific project/doc after preparing at project and doc level - REDIS_READ_BATCH_SIZE: 100, - processUncompressedUpdates(projectId, docId, temporary, callback) { - // get the updates as strings from redis (so we can delete them after they are applied) - if (callback == null) { - callback = function () {} - } - return RedisManager.getOldestDocUpdates( - docId, - UpdatesManager.REDIS_READ_BATCH_SIZE, - function (error, docUpdates) { - if (error != null) { - return callback(error) - } - const { length } = docUpdates - // parse the redis strings into ShareJs updates - return RedisManager.expandDocUpdates( - docUpdates, - function (error, rawUpdates) { - if (error != null) { - logger.err( - { projectId, docId, docUpdates }, - 'failed to parse docUpdates' - ) - return callback(error) - } - logger.debug( - { projectId, docId, rawUpdates }, - 'retrieved raw updates from redis' - ) - return UpdatesManager.compressAndSaveRawUpdates( - projectId, - docId, - rawUpdates, - temporary, - function (error) { - if (error != null) { - return callback(error) - } - logger.debug( - { projectId, docId }, - 'compressed and saved doc updates' - ) - // delete the applied updates from redis - return RedisManager.deleteAppliedDocUpdates( - projectId, - docId, - docUpdates, - function (error) { - if (error != null) { - return callback(error) - } - if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) { - // There might be more updates - logger.debug( - { projectId, docId }, - 'continuing processing updates' - ) - return setTimeout( - () => - UpdatesManager.processUncompressedUpdates( - projectId, - docId, - temporary, - callback - ), - 0 - ) - } else { - logger.debug( - { projectId, docId }, - 'all raw updates processed' - ) - return callback() - } - } - ) - } - ) - } - ) - } - ) - }, - - // Process updates for a doc when we flush it individually - processUncompressedUpdatesWithLock(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return UpdatesManager._prepareProjectForUpdates( - projectId, - function (error, temporary) { - if (error != null) { - return callback(error) - } - return UpdatesManager._processUncompressedUpdatesForDocWithLock( - projectId, - docId, - temporary, - callback - ) - } - ) - }, - - // Process updates for a doc when the whole project is flushed (internal method) - _processUncompressedUpdatesForDocWithLock( - projectId, - docId, - temporary, - callback - ) { - if (callback == null) { - callback = function () {} - } - return UpdatesManager._prepareDocForUpdates( - projectId, - docId, - function (error) { - if (error != null) { - return callback(error) - } - return LockManager.runWithLock( - keys.historyLock({ doc_id: docId }), - releaseLock => - UpdatesManager.processUncompressedUpdates( - projectId, - docId, - temporary, - releaseLock - ), - callback - ) - } - ) - }, - - // Process all updates for a project, only check project-level information once - processUncompressedUpdatesForProject(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return RedisManager.getDocIdsWithHistoryOps( - projectId, - function (error, docIds) { - if (error != null) { - return callback(error) - } - return UpdatesManager._prepareProjectForUpdates( - projectId, - function (error, temporary) { - if (error) return callback(error) - const jobs = [] - for (const docId of Array.from(docIds)) { - ;(docId => - jobs.push(cb => - UpdatesManager._processUncompressedUpdatesForDocWithLock( - projectId, - docId, - temporary, - cb - ) - ))(docId) - } - return async.parallelLimit(jobs, 5, callback) - } - ) - } - ) - }, - - // flush all outstanding changes - flushAll(limit, callback) { - if (callback == null) { - callback = function () {} - } - return RedisManager.getProjectIdsWithHistoryOps(function ( - error, - projectIds - ) { - let projectId - if (error != null) { - return callback(error) - } - logger.debug( - { - count: projectIds != null ? projectIds.length : undefined, - projectIds, - }, - 'found projects' - ) - const jobs = [] - projectIds = _.shuffle(projectIds) // randomise to avoid hitting same projects each time - const selectedProjects = - limit < 0 ? projectIds : projectIds.slice(0, limit) - for (projectId of Array.from(selectedProjects)) { - ;(projectId => - jobs.push(cb => - UpdatesManager.processUncompressedUpdatesForProject( - projectId, - err => cb(null, { failed: err != null, project_id: projectId }) - ) - ))(projectId) - } - return async.series(jobs, function (error, result) { - let x - if (error != null) { - return callback(error) - } - const failedProjects = (() => { - const result1 = [] - for (x of Array.from(result)) { - if (x.failed) { - result1.push(x.project_id) - } - } - return result1 - })() - const succeededProjects = (() => { - const result2 = [] - for (x of Array.from(result)) { - if (!x.failed) { - result2.push(x.project_id) - } - } - return result2 - })() - return callback(null, { - failed: failedProjects, - succeeded: succeededProjects, - all: projectIds, - }) - }) - }) - }, - - getDanglingUpdates(callback) { - if (callback == null) { - callback = function () {} - } - return RedisManager.getAllDocIdsWithHistoryOps(function (error, allDocIds) { - if (error != null) { - return callback(error) - } - return RedisManager.getProjectIdsWithHistoryOps(function ( - error, - allProjectIds - ) { - if (error != null) { - return callback(error) - } - // function to get doc_ids for each project - const task = cb => - async.concatSeries( - allProjectIds, - RedisManager.getDocIdsWithHistoryOps, - cb - ) - // find the dangling doc ids - return task(function (error, projectDocIds) { - if (error) return callback(error) - const danglingDocIds = _.difference(allDocIds, projectDocIds) - logger.debug( - { allDocIds, allProjectIds, projectDocIds, danglingDocIds }, - 'checking for dangling doc ids' - ) - return callback(null, danglingDocIds) - }) - }) - }) - }, - - getDocUpdates(projectId, docId, options, callback) { - if (options == null) { - options = {} - } - if (callback == null) { - callback = function () {} - } - return UpdatesManager.processUncompressedUpdatesWithLock( - projectId, - docId, - function (error) { - if (error != null) { - return callback(error) - } - // console.log "options", options - return PackManager.getOpsByVersionRange( - projectId, - docId, - options.from, - options.to, - function (error, updates) { - if (error != null) { - return callback(error) - } - return callback(null, updates) - } - ) - } - ) - }, - - getDocUpdatesWithUserInfo(projectId, docId, options, callback) { - if (options == null) { - options = {} - } - if (callback == null) { - callback = function () {} - } - return UpdatesManager.getDocUpdates( - projectId, - docId, - options, - function (error, updates) { - if (error != null) { - return callback(error) - } - return UpdatesManager.fillUserInfo(updates, function (error, updates) { - if (error != null) { - return callback(error) - } - return callback(null, updates) - }) - } - ) - }, - - getSummarizedProjectUpdates(projectId, options, callback) { - if (options == null) { - options = {} - } - if (callback == null) { - callback = function () {} - } - if (!options.min_count) { - options.min_count = 25 - } - let summarizedUpdates = [] - const { before } = options - let nextBeforeTimestamp = null - return UpdatesManager.processUncompressedUpdatesForProject( - projectId, - function (error) { - if (error != null) { - return callback(error) - } - return PackManager.makeProjectIterator( - projectId, - before, - function (err, iterator) { - if (err != null) { - return callback(err) - } - // repeatedly get updates and pass them through the summariser to get an final output with user info - return async.whilst( - cb => - // console.log "checking iterator.done", iterator.done() - cb( - null, - summarizedUpdates.length < options.min_count && - !iterator.done() - ), - cb => - iterator.next(function (err, partialUpdates) { - if (err != null) { - return callback(err) - } - // logger.log {partialUpdates}, 'got partialUpdates' - if (partialUpdates.length === 0) { - return cb() - } // # FIXME should try to avoid this happening - nextBeforeTimestamp = - partialUpdates[partialUpdates.length - 1].meta.end_ts - // add the updates to the summary list - summarizedUpdates = UpdatesManager._summarizeUpdates( - partialUpdates, - summarizedUpdates - ) - return cb() - }), - - () => - // finally done all updates - // console.log 'summarized Updates', summarizedUpdates - UpdatesManager.fillSummarizedUserInfo( - summarizedUpdates, - function (err, results) { - if (err != null) { - return callback(err) - } - return callback( - null, - results, - !iterator.done() ? nextBeforeTimestamp : undefined - ) - } - ) - ) - } - ) - } - ) - }, - - exportProject(projectId, consumer) { - // Flush anything before collecting updates. - UpdatesManager.processUncompressedUpdatesForProject(projectId, err => { - if (err) return consumer(err) - - // Fetch all the packs. - const before = undefined - PackManager.makeProjectIterator(projectId, before, (err, iterator) => { - if (err) return consumer(err) - - const accumulatedUserIds = new Set() - - async.whilst( - cb => cb(null, !iterator.done()), - - cb => - iterator.next((err, updatesFromASinglePack) => { - if (err) return cb(err) - - if (updatesFromASinglePack.length === 0) { - // This should not happen when `iterator.done() == false`. - // Emitting an empty array would signal the consumer the final - // call. - return cb() - } - updatesFromASinglePack.forEach(update => { - accumulatedUserIds.add( - // Super defensive access on update details. - String(update && update.meta && update.meta.user_id) - ) - }) - // Emit updates and wait for the consumer. - consumer(null, { updates: updatesFromASinglePack }, cb) - }), - - err => { - if (err) return consumer(err) - - // Adding undefined can happen for broken updates. - accumulatedUserIds.delete('undefined') - - consumer(null, { - updates: [], - userIds: Array.from(accumulatedUserIds).sort(), - }) - } - ) - }) - }) - }, - - fetchUserInfo(users, callback) { - if (callback == null) { - callback = function () {} - } - const jobs = [] - const fetchedUserInfo = {} - for (const userId in users) { - ;(userId => - jobs.push(callback => - WebApiManager.getUserInfo(userId, function (error, userInfo) { - if (error != null) { - return callback(error) - } - fetchedUserInfo[userId] = userInfo - return callback() - }) - ))(userId) - } - - return async.series(jobs, function (err) { - if (err != null) { - return callback(err) - } - return callback(null, fetchedUserInfo) - }) - }, - - fillUserInfo(updates, callback) { - let update, userId - if (callback == null) { - callback = function () {} - } - const users = {} - for (update of Array.from(updates)) { - ;({ user_id: userId } = update.meta) - if (UpdatesManager._validUserId(userId)) { - users[userId] = true - } - } - - return UpdatesManager.fetchUserInfo( - users, - function (error, fetchedUserInfo) { - if (error != null) { - return callback(error) - } - for (update of Array.from(updates)) { - ;({ user_id: userId } = update.meta) - delete update.meta.user_id - if (UpdatesManager._validUserId(userId)) { - update.meta.user = fetchedUserInfo[userId] - } - } - return callback(null, updates) - } - ) - }, - - fillSummarizedUserInfo(updates, callback) { - let update, userId, userIds - if (callback == null) { - callback = function () {} - } - const users = {} - for (update of Array.from(updates)) { - userIds = update.meta.user_ids || [] - for (userId of Array.from(userIds)) { - if (UpdatesManager._validUserId(userId)) { - users[userId] = true - } - } - } - - return UpdatesManager.fetchUserInfo( - users, - function (error, fetchedUserInfo) { - if (error != null) { - return callback(error) - } - for (update of Array.from(updates)) { - userIds = update.meta.user_ids || [] - update.meta.users = [] - delete update.meta.user_ids - for (userId of Array.from(userIds)) { - if (UpdatesManager._validUserId(userId)) { - update.meta.users.push(fetchedUserInfo[userId]) - } else { - update.meta.users.push(null) - } - } - } - return callback(null, updates) - } - ) - }, - - _validUserId(userId) { - if (userId == null) { - return false - } else { - return !!userId.match(/^[a-f0-9]{24}$/) - } - }, - - TIME_BETWEEN_DISTINCT_UPDATES: (fiveMinutes = 5 * 60 * 1000), - SPLIT_ON_DELETE_SIZE: 16, // characters - _summarizeUpdates(updates, existingSummarizedUpdates) { - if (existingSummarizedUpdates == null) { - existingSummarizedUpdates = [] - } - const summarizedUpdates = existingSummarizedUpdates.slice() - let previousUpdateWasBigDelete = false - for (const update of Array.from(updates)) { - let docId - const earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1] - let shouldConcat = false - - // If a user inserts some text, then deletes a big chunk including that text, - // the update we show might concat the insert and delete, and there will be no sign - // of that insert having happened, or be able to restore to it (restoring after a big delete is common). - // So, we split the summary on 'big' deletes. However, we've stepping backwards in time with - // most recent changes considered first, so if this update is a big delete, we want to start - // a new summarized update next timge, hence we monitor the previous update. - if (previousUpdateWasBigDelete) { - shouldConcat = false - } else if ( - earliestUpdate && - earliestUpdate.meta.end_ts - update.meta.start_ts < - this.TIME_BETWEEN_DISTINCT_UPDATES - ) { - // We're going backwards in time through the updates, so only combine if this update starts less than 5 minutes before - // the end of current summarized block, so no block spans more than 5 minutes. - shouldConcat = true - } - - let isBigDelete = false - for (const op of Array.from(update.op || [])) { - if (op.d != null && op.d.length > this.SPLIT_ON_DELETE_SIZE) { - isBigDelete = true - } - } - - previousUpdateWasBigDelete = isBigDelete - - if (shouldConcat) { - // check if the user in this update is already present in the earliest update, - // if not, add them to the users list of the earliest update - earliestUpdate.meta.user_ids = _.union(earliestUpdate.meta.user_ids, [ - update.meta.user_id, - ]) - - docId = update.doc_id.toString() - const doc = earliestUpdate.docs[docId] - if (doc != null) { - doc.fromV = Math.min(doc.fromV, update.v) - doc.toV = Math.max(doc.toV, update.v) - } else { - earliestUpdate.docs[docId] = { - fromV: update.v, - toV: update.v, - } - } - - earliestUpdate.meta.start_ts = Math.min( - earliestUpdate.meta.start_ts, - update.meta.start_ts - ) - earliestUpdate.meta.end_ts = Math.max( - earliestUpdate.meta.end_ts, - update.meta.end_ts - ) - } else { - const newUpdate = { - meta: { - user_ids: [], - start_ts: update.meta.start_ts, - end_ts: update.meta.end_ts, - }, - docs: {}, - } - - newUpdate.docs[update.doc_id.toString()] = { - fromV: update.v, - toV: update.v, - } - newUpdate.meta.user_ids.push(update.meta.user_id) - summarizedUpdates.push(newUpdate) - } - } - - return summarizedUpdates - }, -} - -module.exports.promises = { - processUncompressedUpdatesForProject: util.promisify( - UpdatesManager.processUncompressedUpdatesForProject - ), -} - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} diff --git a/services/track-changes/app/js/WebApiManager.js b/services/track-changes/app/js/WebApiManager.js deleted file mode 100644 index b98f0285b2..0000000000 --- a/services/track-changes/app/js/WebApiManager.js +++ /dev/null @@ -1,112 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let WebApiManager -const request = require('requestretry') // allow retry on error https://github.com/FGRibreau/node-request-retry -const logger = require('@overleaf/logger') -const Settings = require('@overleaf/settings') - -// Don't let HTTP calls hang for a long time -const MAX_HTTP_REQUEST_LENGTH = 15000 // 15 seconds - -// DEPRECATED! This method of getting user details via track-changes is deprecated -// in the way we lay out our services. -// Instead, web should be responsible for collecting the raw data (user_ids) and -// filling it out with calls to other services. All API calls should create a -// tree-like structure as much as possible, with web as the root. -module.exports = WebApiManager = { - sendRequest(url, callback) { - if (callback == null) { - callback = function () {} - } - return request.get( - { - url: `${Settings.apis.web.url}${url}`, - timeout: MAX_HTTP_REQUEST_LENGTH, - maxAttempts: 2, // for node-request-retry - auth: { - user: Settings.apis.web.user, - pass: Settings.apis.web.pass, - sendImmediately: true, - }, - }, - function (error, res, body) { - if (error != null) { - return callback(error) - } - if (res.statusCode === 404) { - logger.debug({ url }, 'got 404 from web api') - return callback(null, null) - } - if (res.statusCode >= 200 && res.statusCode < 300) { - return callback(null, body) - } else { - error = new Error( - `web returned a non-success status code: ${res.statusCode} (attempts: ${res.attempts})` - ) - return callback(error) - } - } - ) - }, - - getUserInfo(userId, callback) { - if (callback == null) { - callback = function () {} - } - const url = `/user/${userId}/personal_info` - logger.debug({ userId }, 'getting user info from web') - return WebApiManager.sendRequest(url, function (error, body) { - let user - if (error != null) { - logger.error({ err: error, userId, url }, 'error accessing web') - return callback(error) - } - - if (body === null) { - logger.error({ userId, url }, 'no user found') - return callback(null, null) - } - try { - user = JSON.parse(body) - } catch (error1) { - error = error1 - return callback(error) - } - return callback(null, { - id: user.id, - email: user.email, - first_name: user.first_name, - last_name: user.last_name, - }) - }) - }, - - getProjectDetails(projectId, callback) { - if (callback == null) { - callback = function () {} - } - const url = `/project/${projectId}/details` - logger.debug({ projectId }, 'getting project details from web') - return WebApiManager.sendRequest(url, function (error, body) { - let project - if (error != null) { - logger.error({ err: error, projectId, url }, 'error accessing web') - return callback(error) - } - - try { - project = JSON.parse(body) - } catch (error1) { - error = error1 - return callback(error) - } - return callback(null, project) - }) - }, -} diff --git a/services/track-changes/app/js/ZipManager.js b/services/track-changes/app/js/ZipManager.js deleted file mode 100644 index d6a5d8419a..0000000000 --- a/services/track-changes/app/js/ZipManager.js +++ /dev/null @@ -1,203 +0,0 @@ -const logger = require('@overleaf/logger') -const UpdatesManager = require('./UpdatesManager') -const DiffGenerator = require('./DiffGenerator') -const DocumentUpdaterManager = require('./DocumentUpdaterManager') -const DocstoreManager = require('./DocstoreManager') -const Errors = require('./Errors') -const PackManager = require('./PackManager') -const yazl = require('yazl') -const util = require('util') -const stream = require('stream') -const fs = require('fs') -const os = require('os') -const Path = require('path') -const { packsAreDuplicated } = require('./util/PackUtils') - -const streamPipeline = util.promisify(stream.pipeline) - -// look in docstore or docupdater for the latest version of the document -async function getLatestContent(projectId, docId, lastUpdateVersion) { - const [docstoreContent, docstoreVersion] = - await DocstoreManager.promises.peekDocument(projectId, docId) - - // if docstore is out of date, check for a newer version in docupdater - // and return that instead - if (docstoreVersion <= lastUpdateVersion) { - const [docupdaterContent, docupdaterVersion] = - await DocumentUpdaterManager.promises.peekDocument(projectId, docId) - if (docupdaterVersion > docstoreVersion) { - return [docupdaterContent, docupdaterVersion] - } - } - - return [docstoreContent, docstoreVersion] -} - -async function rewindDoc(projectId, docId, zipfile) { - logger.debug({ projectId, docId }, 'rewinding document') - - // Prepare to rewind content - - const docIterator = await PackManager.promises.makeDocIterator(docId) - - const getUpdate = util.promisify(docIterator.next).bind(docIterator) - - const lastUpdate = await getUpdate() - if (!lastUpdate) { - return null - } - - const lastUpdateVersion = lastUpdate.v - - let latestContent - let version - try { - ;[latestContent, version] = await getLatestContent( - projectId, - docId, - lastUpdateVersion - ) - } catch (err) { - if (err instanceof Errors.NotFoundError) { - // Doc not found in docstore. We can't build its history - return null - } else { - throw err - } - } - - const id = docId.toString() - - const contentEndPath = `${id}/content/end/${version}` - zipfile.addBuffer(Buffer.from(latestContent), contentEndPath) - - const metadata = { - id, - version, - content: { - end: { - path: contentEndPath, - version, - }, - }, - updates: [], - } - - let content = latestContent - let v = version - let update = lastUpdate - let previousUpdate = null - - while (update) { - if (packsAreDuplicated(update, previousUpdate)) { - previousUpdate = update - update = await getUpdate() - continue - } - - const updatePath = `${id}/updates/${update.v}` - - try { - content = DiffGenerator.rewindUpdate(content, update) - // filter out any known "broken ops" as these may be recoverable - update.op = update.op.filter(op => !op.broken) - // only store the update in the zip file when we have applied it - // successfully, and after filtering out broken ops. - zipfile.addBuffer(Buffer.from(JSON.stringify(update)), updatePath, { - mtime: new Date(update.meta.start_ts), - }) - v = update.v - } catch (e) { - e.attempted_update = update // keep a record of the attempted update - logger.warn({ projectId, docId, err: e }, 'rewind error') - break // stop attempting to rewind on error - } - - metadata.updates.push({ - path: updatePath, - version: update.v, - ts: update.meta.start_ts, - doc_length: content.length, - }) - previousUpdate = update - update = await getUpdate() - } - - const contentStartPath = `${id}/content/start/${v}` - zipfile.addBuffer(Buffer.from(content), contentStartPath) - - metadata.content.start = { - path: contentStartPath, - version: v, - } - - return metadata -} - -async function generateZip(projectId, zipfile) { - await UpdatesManager.promises.processUncompressedUpdatesForProject(projectId) - const docIds = await PackManager.promises.findAllDocsInProject(projectId) - const manifest = { projectId, docs: [] } - for (const docId of docIds) { - const doc = await rewindDoc(projectId, docId, zipfile) - if (doc) { - manifest.docs.push(doc) - } - } - zipfile.addBuffer( - Buffer.from(JSON.stringify(manifest, null, 2)), - 'manifest.json' - ) - zipfile.addBuffer( - Buffer.from( - `History Migration Data - -This zip file contains a copy of the raw history for this project. - -If this file is present in a project it means the history could not -be fully recovered or migrated. - -A new history should have been created starting at the datestamp of -this file.` - ), - 'README.txt' - ) - zipfile.end() -} - -async function exportProject(projectId, zipPath) { - const zipfile = new yazl.ZipFile() - const pipeline = streamPipeline( - zipfile.outputStream, - fs.createWriteStream(zipPath) - ) - await generateZip(projectId, zipfile) - await pipeline -} - -/** - * Create a temporary directory for use with exportProject() - */ -async function makeTempDirectory() { - const tmpdir = await fs.promises.mkdtemp( - (await fs.promises.realpath(os.tmpdir())) + Path.sep - ) - return tmpdir -} - -/** - * Clean up a temporary directory made with makeTempDirectory() - */ -function cleanupTempDirectory(tmpdir) { - fs.promises.rm(tmpdir, { recursive: true, force: true }).catch(err => { - if (err) { - logger.warn({ err, tmpdir }, 'Failed to clean up temp directory') - } - }) -} - -module.exports = { - exportProject: util.callbackify(exportProject), - makeTempDirectory: util.callbackify(makeTempDirectory), - cleanupTempDirectory, -} diff --git a/services/track-changes/app/js/mongodb.js b/services/track-changes/app/js/mongodb.js deleted file mode 100644 index a345d5ce70..0000000000 --- a/services/track-changes/app/js/mongodb.js +++ /dev/null @@ -1,42 +0,0 @@ -const Settings = require('@overleaf/settings') -const { MongoClient, ObjectId } = require('mongodb') - -const clientPromise = MongoClient.connect( - Settings.mongo.url, - Settings.mongo.options -) - -let setupDbPromise -async function waitForDb() { - if (!setupDbPromise) { - setupDbPromise = setupDb() - } - await setupDbPromise -} - -const db = {} -async function setupDb() { - const internalDb = (await clientPromise).db() - - db.docHistory = internalDb.collection('docHistory') - db.docHistoryIndex = internalDb.collection('docHistoryIndex') - db.projectHistoryMetaData = internalDb.collection('projectHistoryMetaData') -} - -async function closeDb() { - let client - try { - client = await clientPromise - } catch (e) { - // there is nothing to close - return - } - return client.close() -} - -module.exports = { - db, - ObjectId, - closeDb, - waitForDb, -} diff --git a/services/track-changes/app/js/util/PackUtils.js b/services/track-changes/app/js/util/PackUtils.js deleted file mode 100644 index fd373094ce..0000000000 --- a/services/track-changes/app/js/util/PackUtils.js +++ /dev/null @@ -1,18 +0,0 @@ -const _ = require('lodash') - -/** - * Compares a deep equality of Packs excluding _id - */ -function packsAreDuplicated(pack1, pack2) { - return Boolean( - pack1 && - pack2 && - pack1.v === pack2.v && - _.isEqual(pack1.meta, pack2.meta) && - _.isEqual(pack1.op, pack2.op) - ) -} - -module.exports = { - packsAreDuplicated, -} diff --git a/services/track-changes/app/lib/diff_match_patch.js b/services/track-changes/app/lib/diff_match_patch.js deleted file mode 100644 index ebaaf5f32a..0000000000 --- a/services/track-changes/app/lib/diff_match_patch.js +++ /dev/null @@ -1,2340 +0,0 @@ -/* eslint-disable */ -/** - * Diff Match and Patch - * - * Copyright 2006 Google Inc. - * http://code.google.com/p/google-diff-match-patch/ - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * @fileoverview Computes the difference between two texts to create a patch. - * Applies the patch onto another text, allowing for errors. - * @author fraser@google.com (Neil Fraser) - */ - -/** - * Class containing the diff, match and patch methods. - * @constructor - */ -function diff_match_patch() { - // Defaults. - // Redefine these in your program to override the defaults. - - // Number of seconds to map a diff before giving up (0 for infinity). - this.Diff_Timeout = 1.0 - // Cost of an empty edit operation in terms of edit characters. - this.Diff_EditCost = 4 - // At what point is no match declared (0.0 = perfection, 1.0 = very loose). - this.Match_Threshold = 0.5 - // How far to search for a match (0 = exact location, 1000+ = broad match). - // A match this many characters away from the expected location will add - // 1.0 to the score (0.0 is a perfect match). - this.Match_Distance = 1000 - // When deleting a large block of text (over ~64 characters), how close do - // the contents have to be to match the expected contents. (0.0 = perfection, - // 1.0 = very loose). Note that Match_Threshold controls how closely the - // end points of a delete need to match. - this.Patch_DeleteThreshold = 0.5 - // Chunk size for context length. - this.Patch_Margin = 4 - - // The number of bits in an int. - this.Match_MaxBits = 32 -} - -// DIFF FUNCTIONS - -/** - * The data structure representing a diff is an array of tuples: - * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']] - * which means: delete 'Hello', add 'Goodbye' and keep ' world.' - */ -var DIFF_DELETE = -1 -var DIFF_INSERT = 1 -var DIFF_EQUAL = 0 - -/** @typedef {{0: number, 1: string}} */ -diff_match_patch.Diff - -/** - * Find the differences between two texts. Simplifies the problem by stripping - * any common prefix or suffix off the texts before diffing. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {boolean=} opt_checklines Optional speedup flag. If present and false, - * then don't run a line-level diff first to identify the changed areas. - * Defaults to true, which does a faster, slightly less optimal diff. - * @param {number} opt_deadline Optional time when the diff should be complete - * by. Used internally for recursive calls. Users should set DiffTimeout - * instead. - * @return {!Array.} Array of diff tuples. - */ -diff_match_patch.prototype.diff_main = function ( - text1, - text2, - opt_checklines, - opt_deadline -) { - // Set a deadline by which time the diff must be complete. - if (typeof opt_deadline === 'undefined') { - if (this.Diff_Timeout <= 0) { - opt_deadline = Number.MAX_VALUE - } else { - opt_deadline = new Date().getTime() + this.Diff_Timeout * 1000 - } - } - var deadline = opt_deadline - - // Check for null inputs. - if (text1 == null || text2 == null) { - throw new Error('Null input. (diff_main)') - } - - // Check for equality (speedup). - if (text1 == text2) { - if (text1) { - return [[DIFF_EQUAL, text1]] - } - return [] - } - - if (typeof opt_checklines === 'undefined') { - opt_checklines = true - } - var checklines = opt_checklines - - // Trim off common prefix (speedup). - var commonlength = this.diff_commonPrefix(text1, text2) - var commonprefix = text1.substring(0, commonlength) - text1 = text1.substring(commonlength) - text2 = text2.substring(commonlength) - - // Trim off common suffix (speedup). - commonlength = this.diff_commonSuffix(text1, text2) - var commonsuffix = text1.substring(text1.length - commonlength) - text1 = text1.substring(0, text1.length - commonlength) - text2 = text2.substring(0, text2.length - commonlength) - - // Compute the diff on the middle block. - var diffs = this.diff_compute_(text1, text2, checklines, deadline) - - // Restore the prefix and suffix. - if (commonprefix) { - diffs.unshift([DIFF_EQUAL, commonprefix]) - } - if (commonsuffix) { - diffs.push([DIFF_EQUAL, commonsuffix]) - } - this.diff_cleanupMerge(diffs) - return diffs -} - -/** - * Find the differences between two texts. Assumes that the texts do not - * have any common prefix or suffix. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {boolean} checklines Speedup flag. If false, then don't run a - * line-level diff first to identify the changed areas. - * If true, then run a faster, slightly less optimal diff. - * @param {number} deadline Time when the diff should be complete by. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_compute_ = function ( - text1, - text2, - checklines, - deadline -) { - var diffs - - if (!text1) { - // Just add some text (speedup). - return [[DIFF_INSERT, text2]] - } - - if (!text2) { - // Just delete some text (speedup). - return [[DIFF_DELETE, text1]] - } - - var longtext = text1.length > text2.length ? text1 : text2 - var shorttext = text1.length > text2.length ? text2 : text1 - var i = longtext.indexOf(shorttext) - if (i != -1) { - // Shorter text is inside the longer text (speedup). - diffs = [ - [DIFF_INSERT, longtext.substring(0, i)], - [DIFF_EQUAL, shorttext], - [DIFF_INSERT, longtext.substring(i + shorttext.length)], - ] - // Swap insertions for deletions if diff is reversed. - if (text1.length > text2.length) { - diffs[0][0] = diffs[2][0] = DIFF_DELETE - } - return diffs - } - - if (shorttext.length == 1) { - // Single character string. - // After the previous speedup, the character can't be an equality. - return [ - [DIFF_DELETE, text1], - [DIFF_INSERT, text2], - ] - } - - // Check to see if the problem can be split in two. - var hm = this.diff_halfMatch_(text1, text2) - if (hm) { - // A half-match was found, sort out the return data. - var text1_a = hm[0] - var text1_b = hm[1] - var text2_a = hm[2] - var text2_b = hm[3] - var mid_common = hm[4] - // Send both pairs off for separate processing. - var diffs_a = this.diff_main(text1_a, text2_a, checklines, deadline) - var diffs_b = this.diff_main(text1_b, text2_b, checklines, deadline) - // Merge the results. - return diffs_a.concat([[DIFF_EQUAL, mid_common]], diffs_b) - } - - if (checklines && text1.length > 100 && text2.length > 100) { - return this.diff_lineMode_(text1, text2, deadline) - } - - return this.diff_bisect_(text1, text2, deadline) -} - -/** - * Do a quick line-level diff on both strings, then rediff the parts for - * greater accuracy. - * This speedup can produce non-minimal diffs. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {number} deadline Time when the diff should be complete by. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_lineMode_ = function (text1, text2, deadline) { - // Scan the text on a line-by-line basis first. - var a = this.diff_linesToChars_(text1, text2) - text1 = a.chars1 - text2 = a.chars2 - var linearray = a.lineArray - - var diffs = this.diff_main(text1, text2, false, deadline) - - // Convert the diff back to original text. - this.diff_charsToLines_(diffs, linearray) - // Eliminate freak matches (e.g. blank lines) - this.diff_cleanupSemantic(diffs) - - // Rediff any replacement blocks, this time character-by-character. - // Add a dummy entry at the end. - diffs.push([DIFF_EQUAL, '']) - var pointer = 0 - var count_delete = 0 - var count_insert = 0 - var text_delete = '' - var text_insert = '' - while (pointer < diffs.length) { - switch (diffs[pointer][0]) { - case DIFF_INSERT: - count_insert++ - text_insert += diffs[pointer][1] - break - case DIFF_DELETE: - count_delete++ - text_delete += diffs[pointer][1] - break - case DIFF_EQUAL: - // Upon reaching an equality, check for prior redundancies. - if (count_delete >= 1 && count_insert >= 1) { - // Delete the offending records and add the merged ones. - diffs.splice( - pointer - count_delete - count_insert, - count_delete + count_insert - ) - pointer = pointer - count_delete - count_insert - var a = this.diff_main(text_delete, text_insert, false, deadline) - for (var j = a.length - 1; j >= 0; j--) { - diffs.splice(pointer, 0, a[j]) - } - pointer = pointer + a.length - } - count_insert = 0 - count_delete = 0 - text_delete = '' - text_insert = '' - break - } - pointer++ - } - diffs.pop() // Remove the dummy entry at the end. - - return diffs -} - -/** - * Find the 'middle snake' of a diff, split the problem in two - * and return the recursively constructed diff. - * See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {number} deadline Time at which to bail if not yet complete. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_bisect_ = function (text1, text2, deadline) { - // Cache the text lengths to prevent multiple calls. - var text1_length = text1.length - var text2_length = text2.length - var max_d = Math.ceil((text1_length + text2_length) / 2) - var v_offset = max_d - var v_length = 2 * max_d - var v1 = new Array(v_length) - var v2 = new Array(v_length) - // Setting all elements to -1 is faster in Chrome & Firefox than mixing - // integers and undefined. - for (var x = 0; x < v_length; x++) { - v1[x] = -1 - v2[x] = -1 - } - v1[v_offset + 1] = 0 - v2[v_offset + 1] = 0 - var delta = text1_length - text2_length - // If the total number of characters is odd, then the front path will collide - // with the reverse path. - var front = delta % 2 != 0 - // Offsets for start and end of k loop. - // Prevents mapping of space beyond the grid. - var k1start = 0 - var k1end = 0 - var k2start = 0 - var k2end = 0 - for (var d = 0; d < max_d; d++) { - // Bail out if deadline is reached. - if (new Date().getTime() > deadline) { - break - } - - // Walk the front path one step. - for (var k1 = -d + k1start; k1 <= d - k1end; k1 += 2) { - var k1_offset = v_offset + k1 - var x1 - if (k1 == -d || (k1 != d && v1[k1_offset - 1] < v1[k1_offset + 1])) { - x1 = v1[k1_offset + 1] - } else { - x1 = v1[k1_offset - 1] + 1 - } - var y1 = x1 - k1 - while ( - x1 < text1_length && - y1 < text2_length && - text1.charAt(x1) == text2.charAt(y1) - ) { - x1++ - y1++ - } - v1[k1_offset] = x1 - if (x1 > text1_length) { - // Ran off the right of the graph. - k1end += 2 - } else if (y1 > text2_length) { - // Ran off the bottom of the graph. - k1start += 2 - } else if (front) { - var k2_offset = v_offset + delta - k1 - if (k2_offset >= 0 && k2_offset < v_length && v2[k2_offset] != -1) { - // Mirror x2 onto top-left coordinate system. - var x2 = text1_length - v2[k2_offset] - if (x1 >= x2) { - // Overlap detected. - return this.diff_bisectSplit_(text1, text2, x1, y1, deadline) - } - } - } - } - - // Walk the reverse path one step. - for (var k2 = -d + k2start; k2 <= d - k2end; k2 += 2) { - var k2_offset = v_offset + k2 - var x2 - if (k2 == -d || (k2 != d && v2[k2_offset - 1] < v2[k2_offset + 1])) { - x2 = v2[k2_offset + 1] - } else { - x2 = v2[k2_offset - 1] + 1 - } - var y2 = x2 - k2 - while ( - x2 < text1_length && - y2 < text2_length && - text1.charAt(text1_length - x2 - 1) == - text2.charAt(text2_length - y2 - 1) - ) { - x2++ - y2++ - } - v2[k2_offset] = x2 - if (x2 > text1_length) { - // Ran off the left of the graph. - k2end += 2 - } else if (y2 > text2_length) { - // Ran off the top of the graph. - k2start += 2 - } else if (!front) { - var k1_offset = v_offset + delta - k2 - if (k1_offset >= 0 && k1_offset < v_length && v1[k1_offset] != -1) { - var x1 = v1[k1_offset] - var y1 = v_offset + x1 - k1_offset - // Mirror x2 onto top-left coordinate system. - x2 = text1_length - x2 - if (x1 >= x2) { - // Overlap detected. - return this.diff_bisectSplit_(text1, text2, x1, y1, deadline) - } - } - } - } - } - // Diff took too long and hit the deadline or - // number of diffs equals number of characters, no commonality at all. - return [ - [DIFF_DELETE, text1], - [DIFF_INSERT, text2], - ] -} - -/** - * Given the location of the 'middle snake', split the diff in two parts - * and recurse. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {number} x Index of split point in text1. - * @param {number} y Index of split point in text2. - * @param {number} deadline Time at which to bail if not yet complete. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_bisectSplit_ = function ( - text1, - text2, - x, - y, - deadline -) { - var text1a = text1.substring(0, x) - var text2a = text2.substring(0, y) - var text1b = text1.substring(x) - var text2b = text2.substring(y) - - // Compute both diffs serially. - var diffs = this.diff_main(text1a, text2a, false, deadline) - var diffsb = this.diff_main(text1b, text2b, false, deadline) - - return diffs.concat(diffsb) -} - -/** - * Split two texts into an array of strings. Reduce the texts to a string of - * hashes where each Unicode character represents one line. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {{chars1: string, chars2: string, lineArray: !Array.}} - * An object containing the encoded text1, the encoded text2 and - * the array of unique strings. - * The zeroth element of the array of unique strings is intentionally blank. - * @private - */ -diff_match_patch.prototype.diff_linesToChars_ = function (text1, text2) { - var lineArray = [] // e.g. lineArray[4] == 'Hello\n' - var lineHash = {} // e.g. lineHash['Hello\n'] == 4 - - // '\x00' is a valid character, but various debuggers don't like it. - // So we'll insert a junk entry to avoid generating a null character. - lineArray[0] = '' - - /** - * Split a text into an array of strings. Reduce the texts to a string of - * hashes where each Unicode character represents one line. - * Modifies linearray and linehash through being a closure. - * @param {string} text String to encode. - * @return {string} Encoded string. - * @private - */ - function diff_linesToCharsMunge_(text) { - var chars = '' - // Walk the text, pulling out a substring for each line. - // text.split('\n') would would temporarily double our memory footprint. - // Modifying text would create many large strings to garbage collect. - var lineStart = 0 - var lineEnd = -1 - // Keeping our own length variable is faster than looking it up. - var lineArrayLength = lineArray.length - while (lineEnd < text.length - 1) { - lineEnd = text.indexOf('\n', lineStart) - if (lineEnd == -1) { - lineEnd = text.length - 1 - } - var line = text.substring(lineStart, lineEnd + 1) - lineStart = lineEnd + 1 - - if ( - lineHash.hasOwnProperty - ? lineHash.hasOwnProperty(line) - : lineHash[line] !== undefined - ) { - chars += String.fromCharCode(lineHash[line]) - } else { - chars += String.fromCharCode(lineArrayLength) - lineHash[line] = lineArrayLength - lineArray[lineArrayLength++] = line - } - } - return chars - } - - var chars1 = diff_linesToCharsMunge_(text1) - var chars2 = diff_linesToCharsMunge_(text2) - return { chars1: chars1, chars2: chars2, lineArray: lineArray } -} - -/** - * Rehydrate the text in a diff from a string of line hashes to real lines of - * text. - * @param {!Array.} diffs Array of diff tuples. - * @param {!Array.} lineArray Array of unique strings. - * @private - */ -diff_match_patch.prototype.diff_charsToLines_ = function (diffs, lineArray) { - for (var x = 0; x < diffs.length; x++) { - var chars = diffs[x][1] - var text = [] - for (var y = 0; y < chars.length; y++) { - text[y] = lineArray[chars.charCodeAt(y)] - } - diffs[x][1] = text.join('') - } -} - -/** - * Determine the common prefix of two strings. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {number} The number of characters common to the start of each - * string. - */ -diff_match_patch.prototype.diff_commonPrefix = function (text1, text2) { - // Quick check for common null cases. - if (!text1 || !text2 || text1.charAt(0) != text2.charAt(0)) { - return 0 - } - // Binary search. - // Performance analysis: http://neil.fraser.name/news/2007/10/09/ - var pointermin = 0 - var pointermax = Math.min(text1.length, text2.length) - var pointermid = pointermax - var pointerstart = 0 - while (pointermin < pointermid) { - if ( - text1.substring(pointerstart, pointermid) == - text2.substring(pointerstart, pointermid) - ) { - pointermin = pointermid - pointerstart = pointermin - } else { - pointermax = pointermid - } - pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin) - } - return pointermid -} - -/** - * Determine the common suffix of two strings. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {number} The number of characters common to the end of each string. - */ -diff_match_patch.prototype.diff_commonSuffix = function (text1, text2) { - // Quick check for common null cases. - if ( - !text1 || - !text2 || - text1.charAt(text1.length - 1) != text2.charAt(text2.length - 1) - ) { - return 0 - } - // Binary search. - // Performance analysis: http://neil.fraser.name/news/2007/10/09/ - var pointermin = 0 - var pointermax = Math.min(text1.length, text2.length) - var pointermid = pointermax - var pointerend = 0 - while (pointermin < pointermid) { - if ( - text1.substring(text1.length - pointermid, text1.length - pointerend) == - text2.substring(text2.length - pointermid, text2.length - pointerend) - ) { - pointermin = pointermid - pointerend = pointermin - } else { - pointermax = pointermid - } - pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin) - } - return pointermid -} - -/** - * Determine if the suffix of one string is the prefix of another. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {number} The number of characters common to the end of the first - * string and the start of the second string. - * @private - */ -diff_match_patch.prototype.diff_commonOverlap_ = function (text1, text2) { - // Cache the text lengths to prevent multiple calls. - var text1_length = text1.length - var text2_length = text2.length - // Eliminate the null case. - if (text1_length == 0 || text2_length == 0) { - return 0 - } - // Truncate the longer string. - if (text1_length > text2_length) { - text1 = text1.substring(text1_length - text2_length) - } else if (text1_length < text2_length) { - text2 = text2.substring(0, text1_length) - } - var text_length = Math.min(text1_length, text2_length) - // Quick check for the worst case. - if (text1 == text2) { - return text_length - } - - // Start by looking for a single character match - // and increase length until no match is found. - // Performance analysis: http://neil.fraser.name/news/2010/11/04/ - var best = 0 - var length = 1 - while (true) { - var pattern = text1.substring(text_length - length) - var found = text2.indexOf(pattern) - if (found == -1) { - return best - } - length += found - if ( - found == 0 || - text1.substring(text_length - length) == text2.substring(0, length) - ) { - best = length - length++ - } - } -} - -/** - * Do the two texts share a substring which is at least half the length of the - * longer text? - * This speedup can produce non-minimal diffs. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {Array.} Five element Array, containing the prefix of - * text1, the suffix of text1, the prefix of text2, the suffix of - * text2 and the common middle. Or null if there was no match. - * @private - */ -diff_match_patch.prototype.diff_halfMatch_ = function (text1, text2) { - if (this.Diff_Timeout <= 0) { - // Don't risk returning a non-optimal diff if we have unlimited time. - return null - } - var longtext = text1.length > text2.length ? text1 : text2 - var shorttext = text1.length > text2.length ? text2 : text1 - if (longtext.length < 4 || shorttext.length * 2 < longtext.length) { - return null // Pointless. - } - var dmp = this // 'this' becomes 'window' in a closure. - - /** - * Does a substring of shorttext exist within longtext such that the substring - * is at least half the length of longtext? - * Closure, but does not reference any external variables. - * @param {string} longtext Longer string. - * @param {string} shorttext Shorter string. - * @param {number} i Start index of quarter length substring within longtext. - * @return {Array.} Five element Array, containing the prefix of - * longtext, the suffix of longtext, the prefix of shorttext, the suffix - * of shorttext and the common middle. Or null if there was no match. - * @private - */ - function diff_halfMatchI_(longtext, shorttext, i) { - // Start with a 1/4 length substring at position i as a seed. - var seed = longtext.substring(i, i + Math.floor(longtext.length / 4)) - var j = -1 - var best_common = '' - var best_longtext_a, best_longtext_b, best_shorttext_a, best_shorttext_b - while ((j = shorttext.indexOf(seed, j + 1)) != -1) { - var prefixLength = dmp.diff_commonPrefix( - longtext.substring(i), - shorttext.substring(j) - ) - var suffixLength = dmp.diff_commonSuffix( - longtext.substring(0, i), - shorttext.substring(0, j) - ) - if (best_common.length < suffixLength + prefixLength) { - best_common = - shorttext.substring(j - suffixLength, j) + - shorttext.substring(j, j + prefixLength) - best_longtext_a = longtext.substring(0, i - suffixLength) - best_longtext_b = longtext.substring(i + prefixLength) - best_shorttext_a = shorttext.substring(0, j - suffixLength) - best_shorttext_b = shorttext.substring(j + prefixLength) - } - } - if (best_common.length * 2 >= longtext.length) { - return [ - best_longtext_a, - best_longtext_b, - best_shorttext_a, - best_shorttext_b, - best_common, - ] - } else { - return null - } - } - - // First check if the second quarter is the seed for a half-match. - var hm1 = diff_halfMatchI_( - longtext, - shorttext, - Math.ceil(longtext.length / 4) - ) - // Check again based on the third quarter. - var hm2 = diff_halfMatchI_( - longtext, - shorttext, - Math.ceil(longtext.length / 2) - ) - var hm - if (!hm1 && !hm2) { - return null - } else if (!hm2) { - hm = hm1 - } else if (!hm1) { - hm = hm2 - } else { - // Both matched. Select the longest. - hm = hm1[4].length > hm2[4].length ? hm1 : hm2 - } - - // A half-match was found, sort out the return data. - var text1_a, text1_b, text2_a, text2_b - if (text1.length > text2.length) { - text1_a = hm[0] - text1_b = hm[1] - text2_a = hm[2] - text2_b = hm[3] - } else { - text2_a = hm[0] - text2_b = hm[1] - text1_a = hm[2] - text1_b = hm[3] - } - var mid_common = hm[4] - return [text1_a, text1_b, text2_a, text2_b, mid_common] -} - -/** - * Reduce the number of edits by eliminating semantically trivial equalities. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupSemantic = function (diffs) { - var changes = false - var equalities = [] // Stack of indices where equalities are found. - var equalitiesLength = 0 // Keeping our own length var is faster in JS. - /** @type {?string} */ - var lastequality = null - // Always equal to diffs[equalities[equalitiesLength - 1]][1] - var pointer = 0 // Index of current position. - // Number of characters that changed prior to the equality. - var length_insertions1 = 0 - var length_deletions1 = 0 - // Number of characters that changed after the equality. - var length_insertions2 = 0 - var length_deletions2 = 0 - while (pointer < diffs.length) { - if (diffs[pointer][0] == DIFF_EQUAL) { - // Equality found. - equalities[equalitiesLength++] = pointer - length_insertions1 = length_insertions2 - length_deletions1 = length_deletions2 - length_insertions2 = 0 - length_deletions2 = 0 - lastequality = diffs[pointer][1] - } else { - // An insertion or deletion. - if (diffs[pointer][0] == DIFF_INSERT) { - length_insertions2 += diffs[pointer][1].length - } else { - length_deletions2 += diffs[pointer][1].length - } - // Eliminate an equality that is smaller or equal to the edits on both - // sides of it. - if ( - lastequality && - lastequality.length <= - Math.max(length_insertions1, length_deletions1) && - lastequality.length <= Math.max(length_insertions2, length_deletions2) - ) { - // Duplicate record. - diffs.splice(equalities[equalitiesLength - 1], 0, [ - DIFF_DELETE, - lastequality, - ]) - // Change second copy to insert. - diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT - // Throw away the equality we just deleted. - equalitiesLength-- - // Throw away the previous equality (it needs to be reevaluated). - equalitiesLength-- - pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1 - length_insertions1 = 0 // Reset the counters. - length_deletions1 = 0 - length_insertions2 = 0 - length_deletions2 = 0 - lastequality = null - changes = true - } - } - pointer++ - } - - // Normalize the diff. - if (changes) { - this.diff_cleanupMerge(diffs) - } - this.diff_cleanupSemanticLossless(diffs) - - // Find any overlaps between deletions and insertions. - // e.g: abcxxxxxxdef - // -> abcxxxdef - // e.g: xxxabcdefxxx - // -> defxxxabc - // Only extract an overlap if it is as big as the edit ahead or behind it. - pointer = 1 - while (pointer < diffs.length) { - if ( - diffs[pointer - 1][0] == DIFF_DELETE && - diffs[pointer][0] == DIFF_INSERT - ) { - var deletion = diffs[pointer - 1][1] - var insertion = diffs[pointer][1] - var overlap_length1 = this.diff_commonOverlap_(deletion, insertion) - var overlap_length2 = this.diff_commonOverlap_(insertion, deletion) - if (overlap_length1 >= overlap_length2) { - if ( - overlap_length1 >= deletion.length / 2 || - overlap_length1 >= insertion.length / 2 - ) { - // Overlap found. Insert an equality and trim the surrounding edits. - diffs.splice(pointer, 0, [ - DIFF_EQUAL, - insertion.substring(0, overlap_length1), - ]) - diffs[pointer - 1][1] = deletion.substring( - 0, - deletion.length - overlap_length1 - ) - diffs[pointer + 1][1] = insertion.substring(overlap_length1) - pointer++ - } - } else { - if ( - overlap_length2 >= deletion.length / 2 || - overlap_length2 >= insertion.length / 2 - ) { - // Reverse overlap found. - // Insert an equality and swap and trim the surrounding edits. - diffs.splice(pointer, 0, [ - DIFF_EQUAL, - deletion.substring(0, overlap_length2), - ]) - diffs[pointer - 1][0] = DIFF_INSERT - diffs[pointer - 1][1] = insertion.substring( - 0, - insertion.length - overlap_length2 - ) - diffs[pointer + 1][0] = DIFF_DELETE - diffs[pointer + 1][1] = deletion.substring(overlap_length2) - pointer++ - } - } - pointer++ - } - pointer++ - } -} - -/** - * Look for single edits surrounded on both sides by equalities - * which can be shifted sideways to align the edit to a word boundary. - * e.g: The cat came. -> The cat came. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupSemanticLossless = function (diffs) { - /** - * Given two strings, compute a score representing whether the internal - * boundary falls on logical boundaries. - * Scores range from 6 (best) to 0 (worst). - * Closure, but does not reference any external variables. - * @param {string} one First string. - * @param {string} two Second string. - * @return {number} The score. - * @private - */ - function diff_cleanupSemanticScore_(one, two) { - if (!one || !two) { - // Edges are the best. - return 6 - } - - // Each port of this function behaves slightly differently due to - // subtle differences in each language's definition of things like - // 'whitespace'. Since this function's purpose is largely cosmetic, - // the choice has been made to use each language's native features - // rather than force total conformity. - var char1 = one.charAt(one.length - 1) - var char2 = two.charAt(0) - var nonAlphaNumeric1 = char1.match(diff_match_patch.nonAlphaNumericRegex_) - var nonAlphaNumeric2 = char2.match(diff_match_patch.nonAlphaNumericRegex_) - var whitespace1 = - nonAlphaNumeric1 && char1.match(diff_match_patch.whitespaceRegex_) - var whitespace2 = - nonAlphaNumeric2 && char2.match(diff_match_patch.whitespaceRegex_) - var lineBreak1 = - whitespace1 && char1.match(diff_match_patch.linebreakRegex_) - var lineBreak2 = - whitespace2 && char2.match(diff_match_patch.linebreakRegex_) - var blankLine1 = - lineBreak1 && one.match(diff_match_patch.blanklineEndRegex_) - var blankLine2 = - lineBreak2 && two.match(diff_match_patch.blanklineStartRegex_) - - if (blankLine1 || blankLine2) { - // Five points for blank lines. - return 5 - } else if (lineBreak1 || lineBreak2) { - // Four points for line breaks. - return 4 - } else if (nonAlphaNumeric1 && !whitespace1 && whitespace2) { - // Three points for end of sentences. - return 3 - } else if (whitespace1 || whitespace2) { - // Two points for whitespace. - return 2 - } else if (nonAlphaNumeric1 || nonAlphaNumeric2) { - // One point for non-alphanumeric. - return 1 - } - return 0 - } - - var pointer = 1 - // Intentionally ignore the first and last element (don't need checking). - while (pointer < diffs.length - 1) { - if ( - diffs[pointer - 1][0] == DIFF_EQUAL && - diffs[pointer + 1][0] == DIFF_EQUAL - ) { - // This is a single edit surrounded by equalities. - var equality1 = diffs[pointer - 1][1] - var edit = diffs[pointer][1] - var equality2 = diffs[pointer + 1][1] - - // First, shift the edit as far left as possible. - var commonOffset = this.diff_commonSuffix(equality1, edit) - if (commonOffset) { - var commonString = edit.substring(edit.length - commonOffset) - equality1 = equality1.substring(0, equality1.length - commonOffset) - edit = commonString + edit.substring(0, edit.length - commonOffset) - equality2 = commonString + equality2 - } - - // Second, step character by character right, looking for the best fit. - var bestEquality1 = equality1 - var bestEdit = edit - var bestEquality2 = equality2 - var bestScore = - diff_cleanupSemanticScore_(equality1, edit) + - diff_cleanupSemanticScore_(edit, equality2) - while (edit.charAt(0) === equality2.charAt(0)) { - equality1 += edit.charAt(0) - edit = edit.substring(1) + equality2.charAt(0) - equality2 = equality2.substring(1) - var score = - diff_cleanupSemanticScore_(equality1, edit) + - diff_cleanupSemanticScore_(edit, equality2) - // The >= encourages trailing rather than leading whitespace on edits. - if (score >= bestScore) { - bestScore = score - bestEquality1 = equality1 - bestEdit = edit - bestEquality2 = equality2 - } - } - - if (diffs[pointer - 1][1] != bestEquality1) { - // We have an improvement, save it back to the diff. - if (bestEquality1) { - diffs[pointer - 1][1] = bestEquality1 - } else { - diffs.splice(pointer - 1, 1) - pointer-- - } - diffs[pointer][1] = bestEdit - if (bestEquality2) { - diffs[pointer + 1][1] = bestEquality2 - } else { - diffs.splice(pointer + 1, 1) - pointer-- - } - } - } - pointer++ - } -} - -// Define some regex patterns for matching boundaries. -diff_match_patch.nonAlphaNumericRegex_ = /[^a-zA-Z0-9]/ -diff_match_patch.whitespaceRegex_ = /\s/ -diff_match_patch.linebreakRegex_ = /[\r\n]/ -diff_match_patch.blanklineEndRegex_ = /\n\r?\n$/ -diff_match_patch.blanklineStartRegex_ = /^\r?\n\r?\n/ - -/** - * Reduce the number of edits by eliminating operationally trivial equalities. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupEfficiency = function (diffs) { - var changes = false - var equalities = [] // Stack of indices where equalities are found. - var equalitiesLength = 0 // Keeping our own length var is faster in JS. - /** @type {?string} */ - var lastequality = null - // Always equal to diffs[equalities[equalitiesLength - 1]][1] - var pointer = 0 // Index of current position. - // Is there an insertion operation before the last equality. - var pre_ins = false - // Is there a deletion operation before the last equality. - var pre_del = false - // Is there an insertion operation after the last equality. - var post_ins = false - // Is there a deletion operation after the last equality. - var post_del = false - while (pointer < diffs.length) { - if (diffs[pointer][0] == DIFF_EQUAL) { - // Equality found. - if ( - diffs[pointer][1].length < this.Diff_EditCost && - (post_ins || post_del) - ) { - // Candidate found. - equalities[equalitiesLength++] = pointer - pre_ins = post_ins - pre_del = post_del - lastequality = diffs[pointer][1] - } else { - // Not a candidate, and can never become one. - equalitiesLength = 0 - lastequality = null - } - post_ins = post_del = false - } else { - // An insertion or deletion. - if (diffs[pointer][0] == DIFF_DELETE) { - post_del = true - } else { - post_ins = true - } - /* - * Five types to be split: - * ABXYCD - * AXCD - * ABXC - * AXCD - * ABXC - */ - if ( - lastequality && - ((pre_ins && pre_del && post_ins && post_del) || - (lastequality.length < this.Diff_EditCost / 2 && - pre_ins + pre_del + post_ins + post_del == 3)) - ) { - // Duplicate record. - diffs.splice(equalities[equalitiesLength - 1], 0, [ - DIFF_DELETE, - lastequality, - ]) - // Change second copy to insert. - diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT - equalitiesLength-- // Throw away the equality we just deleted; - lastequality = null - if (pre_ins && pre_del) { - // No changes made which could affect previous entry, keep going. - post_ins = post_del = true - equalitiesLength = 0 - } else { - equalitiesLength-- // Throw away the previous equality. - pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1 - post_ins = post_del = false - } - changes = true - } - } - pointer++ - } - - if (changes) { - this.diff_cleanupMerge(diffs) - } -} - -/** - * Reorder and merge like edit sections. Merge equalities. - * Any edit section can move as long as it doesn't cross an equality. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupMerge = function (diffs) { - diffs.push([DIFF_EQUAL, '']) // Add a dummy entry at the end. - var pointer = 0 - var count_delete = 0 - var count_insert = 0 - var text_delete = '' - var text_insert = '' - var commonlength - while (pointer < diffs.length) { - switch (diffs[pointer][0]) { - case DIFF_INSERT: - count_insert++ - text_insert += diffs[pointer][1] - pointer++ - break - case DIFF_DELETE: - count_delete++ - text_delete += diffs[pointer][1] - pointer++ - break - case DIFF_EQUAL: - // Upon reaching an equality, check for prior redundancies. - if (count_delete + count_insert > 1) { - if (count_delete !== 0 && count_insert !== 0) { - // Factor out any common prefixies. - commonlength = this.diff_commonPrefix(text_insert, text_delete) - if (commonlength !== 0) { - if ( - pointer - count_delete - count_insert > 0 && - diffs[pointer - count_delete - count_insert - 1][0] == - DIFF_EQUAL - ) { - diffs[pointer - count_delete - count_insert - 1][1] += - text_insert.substring(0, commonlength) - } else { - diffs.splice(0, 0, [ - DIFF_EQUAL, - text_insert.substring(0, commonlength), - ]) - pointer++ - } - text_insert = text_insert.substring(commonlength) - text_delete = text_delete.substring(commonlength) - } - // Factor out any common suffixies. - commonlength = this.diff_commonSuffix(text_insert, text_delete) - if (commonlength !== 0) { - diffs[pointer][1] = - text_insert.substring(text_insert.length - commonlength) + - diffs[pointer][1] - text_insert = text_insert.substring( - 0, - text_insert.length - commonlength - ) - text_delete = text_delete.substring( - 0, - text_delete.length - commonlength - ) - } - } - // Delete the offending records and add the merged ones. - if (count_delete === 0) { - diffs.splice(pointer - count_insert, count_delete + count_insert, [ - DIFF_INSERT, - text_insert, - ]) - } else if (count_insert === 0) { - diffs.splice(pointer - count_delete, count_delete + count_insert, [ - DIFF_DELETE, - text_delete, - ]) - } else { - diffs.splice( - pointer - count_delete - count_insert, - count_delete + count_insert, - [DIFF_DELETE, text_delete], - [DIFF_INSERT, text_insert] - ) - } - pointer = - pointer - - count_delete - - count_insert + - (count_delete ? 1 : 0) + - (count_insert ? 1 : 0) + - 1 - } else if (pointer !== 0 && diffs[pointer - 1][0] == DIFF_EQUAL) { - // Merge this equality with the previous one. - diffs[pointer - 1][1] += diffs[pointer][1] - diffs.splice(pointer, 1) - } else { - pointer++ - } - count_insert = 0 - count_delete = 0 - text_delete = '' - text_insert = '' - break - } - } - if (diffs[diffs.length - 1][1] === '') { - diffs.pop() // Remove the dummy entry at the end. - } - - // Second pass: look for single edits surrounded on both sides by equalities - // which can be shifted sideways to eliminate an equality. - // e.g: ABAC -> ABAC - var changes = false - pointer = 1 - // Intentionally ignore the first and last element (don't need checking). - while (pointer < diffs.length - 1) { - if ( - diffs[pointer - 1][0] == DIFF_EQUAL && - diffs[pointer + 1][0] == DIFF_EQUAL - ) { - // This is a single edit surrounded by equalities. - if ( - diffs[pointer][1].substring( - diffs[pointer][1].length - diffs[pointer - 1][1].length - ) == diffs[pointer - 1][1] - ) { - // Shift the edit over the previous equality. - diffs[pointer][1] = - diffs[pointer - 1][1] + - diffs[pointer][1].substring( - 0, - diffs[pointer][1].length - diffs[pointer - 1][1].length - ) - diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1] - diffs.splice(pointer - 1, 1) - changes = true - } else if ( - diffs[pointer][1].substring(0, diffs[pointer + 1][1].length) == - diffs[pointer + 1][1] - ) { - // Shift the edit over the next equality. - diffs[pointer - 1][1] += diffs[pointer + 1][1] - diffs[pointer][1] = - diffs[pointer][1].substring(diffs[pointer + 1][1].length) + - diffs[pointer + 1][1] - diffs.splice(pointer + 1, 1) - changes = true - } - } - pointer++ - } - // If shifts were made, the diff needs reordering and another shift sweep. - if (changes) { - this.diff_cleanupMerge(diffs) - } -} - -/** - * loc is a location in text1, compute and return the equivalent location in - * text2. - * e.g. 'The cat' vs 'The big cat', 1->1, 5->8 - * @param {!Array.} diffs Array of diff tuples. - * @param {number} loc Location within text1. - * @return {number} Location within text2. - */ -diff_match_patch.prototype.diff_xIndex = function (diffs, loc) { - var chars1 = 0 - var chars2 = 0 - var last_chars1 = 0 - var last_chars2 = 0 - var x - for (x = 0; x < diffs.length; x++) { - if (diffs[x][0] !== DIFF_INSERT) { - // Equality or deletion. - chars1 += diffs[x][1].length - } - if (diffs[x][0] !== DIFF_DELETE) { - // Equality or insertion. - chars2 += diffs[x][1].length - } - if (chars1 > loc) { - // Overshot the location. - break - } - last_chars1 = chars1 - last_chars2 = chars2 - } - // Was the location was deleted? - if (diffs.length != x && diffs[x][0] === DIFF_DELETE) { - return last_chars2 - } - // Add the remaining character length. - return last_chars2 + (loc - last_chars1) -} - -/** - * Convert a diff array into a pretty HTML report. - * @param {!Array.} diffs Array of diff tuples. - * @return {string} HTML representation. - */ -diff_match_patch.prototype.diff_prettyHtml = function (diffs) { - var html = [] - var pattern_amp = /&/g - var pattern_lt = //g - var pattern_para = /\n/g - for (var x = 0; x < diffs.length; x++) { - var op = diffs[x][0] // Operation (insert, delete, equal) - var data = diffs[x][1] // Text of change. - var text = data - .replace(pattern_amp, '&') - .replace(pattern_lt, '<') - .replace(pattern_gt, '>') - .replace(pattern_para, '¶
') - switch (op) { - case DIFF_INSERT: - html[x] = '' + text + '' - break - case DIFF_DELETE: - html[x] = '' + text + '' - break - case DIFF_EQUAL: - html[x] = '' + text + '' - break - } - } - return html.join('') -} - -/** - * Compute and return the source text (all equalities and deletions). - * @param {!Array.} diffs Array of diff tuples. - * @return {string} Source text. - */ -diff_match_patch.prototype.diff_text1 = function (diffs) { - var text = [] - for (var x = 0; x < diffs.length; x++) { - if (diffs[x][0] !== DIFF_INSERT) { - text[x] = diffs[x][1] - } - } - return text.join('') -} - -/** - * Compute and return the destination text (all equalities and insertions). - * @param {!Array.} diffs Array of diff tuples. - * @return {string} Destination text. - */ -diff_match_patch.prototype.diff_text2 = function (diffs) { - var text = [] - for (var x = 0; x < diffs.length; x++) { - if (diffs[x][0] !== DIFF_DELETE) { - text[x] = diffs[x][1] - } - } - return text.join('') -} - -/** - * Compute the Levenshtein distance; the number of inserted, deleted or - * substituted characters. - * @param {!Array.} diffs Array of diff tuples. - * @return {number} Number of changes. - */ -diff_match_patch.prototype.diff_levenshtein = function (diffs) { - var levenshtein = 0 - var insertions = 0 - var deletions = 0 - for (var x = 0; x < diffs.length; x++) { - var op = diffs[x][0] - var data = diffs[x][1] - switch (op) { - case DIFF_INSERT: - insertions += data.length - break - case DIFF_DELETE: - deletions += data.length - break - case DIFF_EQUAL: - // A deletion and an insertion is one substitution. - levenshtein += Math.max(insertions, deletions) - insertions = 0 - deletions = 0 - break - } - } - levenshtein += Math.max(insertions, deletions) - return levenshtein -} - -/** - * Crush the diff into an encoded string which describes the operations - * required to transform text1 into text2. - * E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. - * Operations are tab-separated. Inserted text is escaped using %xx notation. - * @param {!Array.} diffs Array of diff tuples. - * @return {string} Delta text. - */ -diff_match_patch.prototype.diff_toDelta = function (diffs) { - var text = [] - for (var x = 0; x < diffs.length; x++) { - switch (diffs[x][0]) { - case DIFF_INSERT: - text[x] = '+' + encodeURI(diffs[x][1]) - break - case DIFF_DELETE: - text[x] = '-' + diffs[x][1].length - break - case DIFF_EQUAL: - text[x] = '=' + diffs[x][1].length - break - } - } - return text.join('\t').replace(/%20/g, ' ') -} - -/** - * Given the original text1, and an encoded string which describes the - * operations required to transform text1 into text2, compute the full diff. - * @param {string} text1 Source string for the diff. - * @param {string} delta Delta text. - * @return {!Array.} Array of diff tuples. - * @throws {!Error} If invalid input. - */ -diff_match_patch.prototype.diff_fromDelta = function (text1, delta) { - var diffs = [] - var diffsLength = 0 // Keeping our own length var is faster in JS. - var pointer = 0 // Cursor in text1 - var tokens = delta.split(/\t/g) - for (var x = 0; x < tokens.length; x++) { - // Each token begins with a one character parameter which specifies the - // operation of this token (delete, insert, equality). - var param = tokens[x].substring(1) - switch (tokens[x].charAt(0)) { - case '+': - try { - diffs[diffsLength++] = [DIFF_INSERT, decodeURI(param)] - } catch (ex) { - // Malformed URI sequence. - throw new Error('Illegal escape in diff_fromDelta: ' + param) - } - break - case '-': - // Fall through. - case '=': - var n = parseInt(param, 10) - if (isNaN(n) || n < 0) { - throw new Error('Invalid number in diff_fromDelta: ' + param) - } - var text = text1.substring(pointer, (pointer += n)) - if (tokens[x].charAt(0) == '=') { - diffs[diffsLength++] = [DIFF_EQUAL, text] - } else { - diffs[diffsLength++] = [DIFF_DELETE, text] - } - break - default: - // Blank tokens are ok (from a trailing \t). - // Anything else is an error. - if (tokens[x]) { - throw new Error( - 'Invalid diff operation in diff_fromDelta: ' + tokens[x] - ) - } - } - } - if (pointer != text1.length) { - throw new Error( - 'Delta length (' + - pointer + - ') does not equal source text length (' + - text1.length + - ').' - ) - } - return diffs -} - -// MATCH FUNCTIONS - -/** - * Locate the best instance of 'pattern' in 'text' near 'loc'. - * @param {string} text The text to search. - * @param {string} pattern The pattern to search for. - * @param {number} loc The location to search around. - * @return {number} Best match index or -1. - */ -diff_match_patch.prototype.match_main = function (text, pattern, loc) { - // Check for null inputs. - if (text == null || pattern == null || loc == null) { - throw new Error('Null input. (match_main)') - } - - loc = Math.max(0, Math.min(loc, text.length)) - if (text == pattern) { - // Shortcut (potentially not guaranteed by the algorithm) - return 0 - } else if (!text.length) { - // Nothing to match. - return -1 - } else if (text.substring(loc, loc + pattern.length) == pattern) { - // Perfect match at the perfect spot! (Includes case of null pattern) - return loc - } else { - // Do a fuzzy compare. - return this.match_bitap_(text, pattern, loc) - } -} - -/** - * Locate the best instance of 'pattern' in 'text' near 'loc' using the - * Bitap algorithm. - * @param {string} text The text to search. - * @param {string} pattern The pattern to search for. - * @param {number} loc The location to search around. - * @return {number} Best match index or -1. - * @private - */ -diff_match_patch.prototype.match_bitap_ = function (text, pattern, loc) { - if (pattern.length > this.Match_MaxBits) { - throw new Error('Pattern too long for this browser.') - } - - // Initialise the alphabet. - var s = this.match_alphabet_(pattern) - - var dmp = this // 'this' becomes 'window' in a closure. - - /** - * Compute and return the score for a match with e errors and x location. - * Accesses loc and pattern through being a closure. - * @param {number} e Number of errors in match. - * @param {number} x Location of match. - * @return {number} Overall score for match (0.0 = good, 1.0 = bad). - * @private - */ - function match_bitapScore_(e, x) { - var accuracy = e / pattern.length - var proximity = Math.abs(loc - x) - if (!dmp.Match_Distance) { - // Dodge divide by zero error. - return proximity ? 1.0 : accuracy - } - return accuracy + proximity / dmp.Match_Distance - } - - // Highest score beyond which we give up. - var score_threshold = this.Match_Threshold - // Is there a nearby exact match? (speedup) - var best_loc = text.indexOf(pattern, loc) - if (best_loc != -1) { - score_threshold = Math.min(match_bitapScore_(0, best_loc), score_threshold) - // What about in the other direction? (speedup) - best_loc = text.lastIndexOf(pattern, loc + pattern.length) - if (best_loc != -1) { - score_threshold = Math.min( - match_bitapScore_(0, best_loc), - score_threshold - ) - } - } - - // Initialise the bit arrays. - var matchmask = 1 << (pattern.length - 1) - best_loc = -1 - - var bin_min, bin_mid - var bin_max = pattern.length + text.length - var last_rd - for (var d = 0; d < pattern.length; d++) { - // Scan for the best match; each iteration allows for one more error. - // Run a binary search to determine how far from 'loc' we can stray at this - // error level. - bin_min = 0 - bin_mid = bin_max - while (bin_min < bin_mid) { - if (match_bitapScore_(d, loc + bin_mid) <= score_threshold) { - bin_min = bin_mid - } else { - bin_max = bin_mid - } - bin_mid = Math.floor((bin_max - bin_min) / 2 + bin_min) - } - // Use the result from this iteration as the maximum for the next. - bin_max = bin_mid - var start = Math.max(1, loc - bin_mid + 1) - var finish = Math.min(loc + bin_mid, text.length) + pattern.length - - var rd = Array(finish + 2) - rd[finish + 1] = (1 << d) - 1 - for (var j = finish; j >= start; j--) { - // The alphabet (s) is a sparse hash, so the following line generates - // warnings. - var charMatch = s[text.charAt(j - 1)] - if (d === 0) { - // First pass: exact match. - rd[j] = ((rd[j + 1] << 1) | 1) & charMatch - } else { - // Subsequent passes: fuzzy match. - rd[j] = - (((rd[j + 1] << 1) | 1) & charMatch) | - (((last_rd[j + 1] | last_rd[j]) << 1) | 1) | - last_rd[j + 1] - } - if (rd[j] & matchmask) { - var score = match_bitapScore_(d, j - 1) - // This match will almost certainly be better than any existing match. - // But check anyway. - if (score <= score_threshold) { - // Told you so. - score_threshold = score - best_loc = j - 1 - if (best_loc > loc) { - // When passing loc, don't exceed our current distance from loc. - start = Math.max(1, 2 * loc - best_loc) - } else { - // Already passed loc, downhill from here on in. - break - } - } - } - } - // No hope for a (better) match at greater error levels. - if (match_bitapScore_(d + 1, loc) > score_threshold) { - break - } - last_rd = rd - } - return best_loc -} - -/** - * Initialise the alphabet for the Bitap algorithm. - * @param {string} pattern The text to encode. - * @return {!Object} Hash of character locations. - * @private - */ -diff_match_patch.prototype.match_alphabet_ = function (pattern) { - var s = {} - for (var i = 0; i < pattern.length; i++) { - s[pattern.charAt(i)] = 0 - } - for (var i = 0; i < pattern.length; i++) { - s[pattern.charAt(i)] |= 1 << (pattern.length - i - 1) - } - return s -} - -// PATCH FUNCTIONS - -/** - * Increase the context until it is unique, - * but don't let the pattern expand beyond Match_MaxBits. - * @param {!diff_match_patch.patch_obj} patch The patch to grow. - * @param {string} text Source text. - * @private - */ -diff_match_patch.prototype.patch_addContext_ = function (patch, text) { - if (text.length == 0) { - return - } - var pattern = text.substring(patch.start2, patch.start2 + patch.length1) - var padding = 0 - - // Look for the first and last matches of pattern in text. If two different - // matches are found, increase the pattern length. - while ( - text.indexOf(pattern) != text.lastIndexOf(pattern) && - pattern.length < this.Match_MaxBits - this.Patch_Margin - this.Patch_Margin - ) { - padding += this.Patch_Margin - pattern = text.substring( - patch.start2 - padding, - patch.start2 + patch.length1 + padding - ) - } - // Add one chunk for good luck. - padding += this.Patch_Margin - - // Add the prefix. - var prefix = text.substring(patch.start2 - padding, patch.start2) - if (prefix) { - patch.diffs.unshift([DIFF_EQUAL, prefix]) - } - // Add the suffix. - var suffix = text.substring( - patch.start2 + patch.length1, - patch.start2 + patch.length1 + padding - ) - if (suffix) { - patch.diffs.push([DIFF_EQUAL, suffix]) - } - - // Roll back the start points. - patch.start1 -= prefix.length - patch.start2 -= prefix.length - // Extend the lengths. - patch.length1 += prefix.length + suffix.length - patch.length2 += prefix.length + suffix.length -} - -/** - * Compute a list of patches to turn text1 into text2. - * Use diffs if provided, otherwise compute it ourselves. - * There are four ways to call this function, depending on what data is - * available to the caller: - * Method 1: - * a = text1, b = text2 - * Method 2: - * a = diffs - * Method 3 (optimal): - * a = text1, b = diffs - * Method 4 (deprecated, use method 3): - * a = text1, b = text2, c = diffs - * - * @param {string|!Array.} a text1 (methods 1,3,4) or - * Array of diff tuples for text1 to text2 (method 2). - * @param {string|!Array.} opt_b text2 (methods 1,4) or - * Array of diff tuples for text1 to text2 (method 3) or undefined (method 2). - * @param {string|!Array.} opt_c Array of diff tuples - * for text1 to text2 (method 4) or undefined (methods 1,2,3). - * @return {!Array.} Array of Patch objects. - */ -diff_match_patch.prototype.patch_make = function (a, opt_b, opt_c) { - var text1, diffs - if ( - typeof a === 'string' && - typeof opt_b === 'string' && - typeof opt_c === 'undefined' - ) { - // Method 1: text1, text2 - // Compute diffs from text1 and text2. - text1 = /** @type {string} */ (a) - diffs = this.diff_main(text1, /** @type {string} */ (opt_b), true) - if (diffs.length > 2) { - this.diff_cleanupSemantic(diffs) - this.diff_cleanupEfficiency(diffs) - } - } else if ( - a && - typeof a === 'object' && - typeof opt_b === 'undefined' && - typeof opt_c === 'undefined' - ) { - // Method 2: diffs - // Compute text1 from diffs. - diffs = /** @type {!Array.} */ (a) - text1 = this.diff_text1(diffs) - } else if ( - typeof a === 'string' && - opt_b && - typeof opt_b === 'object' && - typeof opt_c === 'undefined' - ) { - // Method 3: text1, diffs - text1 = /** @type {string} */ (a) - diffs = /** @type {!Array.} */ (opt_b) - } else if ( - typeof a === 'string' && - typeof opt_b === 'string' && - opt_c && - typeof opt_c === 'object' - ) { - // Method 4: text1, text2, diffs - // text2 is not used. - text1 = /** @type {string} */ (a) - diffs = /** @type {!Array.} */ (opt_c) - } else { - throw new Error('Unknown call format to patch_make.') - } - - if (diffs.length === 0) { - return [] // Get rid of the null case. - } - var patches = [] - var patch = new diff_match_patch.patch_obj() - var patchDiffLength = 0 // Keeping our own length var is faster in JS. - var char_count1 = 0 // Number of characters into the text1 string. - var char_count2 = 0 // Number of characters into the text2 string. - // Start with text1 (prepatch_text) and apply the diffs until we arrive at - // text2 (postpatch_text). We recreate the patches one by one to determine - // context info. - var prepatch_text = text1 - var postpatch_text = text1 - for (var x = 0; x < diffs.length; x++) { - var diff_type = diffs[x][0] - var diff_text = diffs[x][1] - - if (!patchDiffLength && diff_type !== DIFF_EQUAL) { - // A new patch starts here. - patch.start1 = char_count1 - patch.start2 = char_count2 - } - - switch (diff_type) { - case DIFF_INSERT: - patch.diffs[patchDiffLength++] = diffs[x] - patch.length2 += diff_text.length - postpatch_text = - postpatch_text.substring(0, char_count2) + - diff_text + - postpatch_text.substring(char_count2) - break - case DIFF_DELETE: - patch.length1 += diff_text.length - patch.diffs[patchDiffLength++] = diffs[x] - postpatch_text = - postpatch_text.substring(0, char_count2) + - postpatch_text.substring(char_count2 + diff_text.length) - break - case DIFF_EQUAL: - if ( - diff_text.length <= 2 * this.Patch_Margin && - patchDiffLength && - diffs.length != x + 1 - ) { - // Small equality inside a patch. - patch.diffs[patchDiffLength++] = diffs[x] - patch.length1 += diff_text.length - patch.length2 += diff_text.length - } else if (diff_text.length >= 2 * this.Patch_Margin) { - // Time for a new patch. - if (patchDiffLength) { - this.patch_addContext_(patch, prepatch_text) - patches.push(patch) - patch = new diff_match_patch.patch_obj() - patchDiffLength = 0 - // Unlike Unidiff, our patch lists have a rolling context. - // http://code.google.com/p/google-diff-match-patch/wiki/Unidiff - // Update prepatch text & pos to reflect the application of the - // just completed patch. - prepatch_text = postpatch_text - char_count1 = char_count2 - } - } - break - } - - // Update the current character count. - if (diff_type !== DIFF_INSERT) { - char_count1 += diff_text.length - } - if (diff_type !== DIFF_DELETE) { - char_count2 += diff_text.length - } - } - // Pick up the leftover patch if not empty. - if (patchDiffLength) { - this.patch_addContext_(patch, prepatch_text) - patches.push(patch) - } - - return patches -} - -/** - * Given an array of patches, return another array that is identical. - * @param {!Array.} patches Array of Patch objects. - * @return {!Array.} Array of Patch objects. - */ -diff_match_patch.prototype.patch_deepCopy = function (patches) { - // Making deep copies is hard in JavaScript. - var patchesCopy = [] - for (var x = 0; x < patches.length; x++) { - var patch = patches[x] - var patchCopy = new diff_match_patch.patch_obj() - patchCopy.diffs = [] - for (var y = 0; y < patch.diffs.length; y++) { - patchCopy.diffs[y] = patch.diffs[y].slice() - } - patchCopy.start1 = patch.start1 - patchCopy.start2 = patch.start2 - patchCopy.length1 = patch.length1 - patchCopy.length2 = patch.length2 - patchesCopy[x] = patchCopy - } - return patchesCopy -} - -/** - * Merge a set of patches onto the text. Return a patched text, as well - * as a list of true/false values indicating which patches were applied. - * @param {!Array.} patches Array of Patch objects. - * @param {string} text Old text. - * @return {!Array.>} Two element Array, containing the - * new text and an array of boolean values. - */ -diff_match_patch.prototype.patch_apply = function (patches, text) { - if (patches.length == 0) { - return [text, []] - } - - // Deep copy the patches so that no changes are made to originals. - patches = this.patch_deepCopy(patches) - - var nullPadding = this.patch_addPadding(patches) - text = nullPadding + text + nullPadding - - this.patch_splitMax(patches) - // delta keeps track of the offset between the expected and actual location - // of the previous patch. If there are patches expected at positions 10 and - // 20, but the first patch was found at 12, delta is 2 and the second patch - // has an effective expected position of 22. - var delta = 0 - var results = [] - for (var x = 0; x < patches.length; x++) { - var expected_loc = patches[x].start2 + delta - var text1 = this.diff_text1(patches[x].diffs) - var start_loc - var end_loc = -1 - if (text1.length > this.Match_MaxBits) { - // patch_splitMax will only provide an oversized pattern in the case of - // a monster delete. - start_loc = this.match_main( - text, - text1.substring(0, this.Match_MaxBits), - expected_loc - ) - if (start_loc != -1) { - end_loc = this.match_main( - text, - text1.substring(text1.length - this.Match_MaxBits), - expected_loc + text1.length - this.Match_MaxBits - ) - if (end_loc == -1 || start_loc >= end_loc) { - // Can't find valid trailing context. Drop this patch. - start_loc = -1 - } - } - } else { - start_loc = this.match_main(text, text1, expected_loc) - } - if (start_loc == -1) { - // No match found. :( - results[x] = false - // Subtract the delta for this failed patch from subsequent patches. - delta -= patches[x].length2 - patches[x].length1 - } else { - // Found a match. :) - results[x] = true - delta = start_loc - expected_loc - var text2 - if (end_loc == -1) { - text2 = text.substring(start_loc, start_loc + text1.length) - } else { - text2 = text.substring(start_loc, end_loc + this.Match_MaxBits) - } - if (text1 == text2) { - // Perfect match, just shove the replacement text in. - text = - text.substring(0, start_loc) + - this.diff_text2(patches[x].diffs) + - text.substring(start_loc + text1.length) - } else { - // Imperfect match. Run a diff to get a framework of equivalent - // indices. - var diffs = this.diff_main(text1, text2, false) - if ( - text1.length > this.Match_MaxBits && - this.diff_levenshtein(diffs) / text1.length > - this.Patch_DeleteThreshold - ) { - // The end points match, but the content is unacceptably bad. - results[x] = false - } else { - this.diff_cleanupSemanticLossless(diffs) - var index1 = 0 - var index2 - for (var y = 0; y < patches[x].diffs.length; y++) { - var mod = patches[x].diffs[y] - if (mod[0] !== DIFF_EQUAL) { - index2 = this.diff_xIndex(diffs, index1) - } - if (mod[0] === DIFF_INSERT) { - // Insertion - text = - text.substring(0, start_loc + index2) + - mod[1] + - text.substring(start_loc + index2) - } else if (mod[0] === DIFF_DELETE) { - // Deletion - text = - text.substring(0, start_loc + index2) + - text.substring( - start_loc + this.diff_xIndex(diffs, index1 + mod[1].length) - ) - } - if (mod[0] !== DIFF_DELETE) { - index1 += mod[1].length - } - } - } - } - } - } - // Strip the padding off. - text = text.substring(nullPadding.length, text.length - nullPadding.length) - return [text, results] -} - -/** - * Add some padding on text start and end so that edges can match something. - * Intended to be called only from within patch_apply. - * @param {!Array.} patches Array of Patch objects. - * @return {string} The padding string added to each side. - */ -diff_match_patch.prototype.patch_addPadding = function (patches) { - var paddingLength = this.Patch_Margin - var nullPadding = '' - for (var x = 1; x <= paddingLength; x++) { - nullPadding += String.fromCharCode(x) - } - - // Bump all the patches forward. - for (var x = 0; x < patches.length; x++) { - patches[x].start1 += paddingLength - patches[x].start2 += paddingLength - } - - // Add some padding on start of first diff. - var patch = patches[0] - var diffs = patch.diffs - if (diffs.length == 0 || diffs[0][0] != DIFF_EQUAL) { - // Add nullPadding equality. - diffs.unshift([DIFF_EQUAL, nullPadding]) - patch.start1 -= paddingLength // Should be 0. - patch.start2 -= paddingLength // Should be 0. - patch.length1 += paddingLength - patch.length2 += paddingLength - } else if (paddingLength > diffs[0][1].length) { - // Grow first equality. - var extraLength = paddingLength - diffs[0][1].length - diffs[0][1] = nullPadding.substring(diffs[0][1].length) + diffs[0][1] - patch.start1 -= extraLength - patch.start2 -= extraLength - patch.length1 += extraLength - patch.length2 += extraLength - } - - // Add some padding on end of last diff. - patch = patches[patches.length - 1] - diffs = patch.diffs - if (diffs.length == 0 || diffs[diffs.length - 1][0] != DIFF_EQUAL) { - // Add nullPadding equality. - diffs.push([DIFF_EQUAL, nullPadding]) - patch.length1 += paddingLength - patch.length2 += paddingLength - } else if (paddingLength > diffs[diffs.length - 1][1].length) { - // Grow last equality. - var extraLength = paddingLength - diffs[diffs.length - 1][1].length - diffs[diffs.length - 1][1] += nullPadding.substring(0, extraLength) - patch.length1 += extraLength - patch.length2 += extraLength - } - - return nullPadding -} - -/** - * Look through the patches and break up any which are longer than the maximum - * limit of the match algorithm. - * Intended to be called only from within patch_apply. - * @param {!Array.} patches Array of Patch objects. - */ -diff_match_patch.prototype.patch_splitMax = function (patches) { - var patch_size = this.Match_MaxBits - for (var x = 0; x < patches.length; x++) { - if (patches[x].length1 <= patch_size) { - continue - } - var bigpatch = patches[x] - // Remove the big old patch. - patches.splice(x--, 1) - var start1 = bigpatch.start1 - var start2 = bigpatch.start2 - var precontext = '' - while (bigpatch.diffs.length !== 0) { - // Create one of several smaller patches. - var patch = new diff_match_patch.patch_obj() - var empty = true - patch.start1 = start1 - precontext.length - patch.start2 = start2 - precontext.length - if (precontext !== '') { - patch.length1 = patch.length2 = precontext.length - patch.diffs.push([DIFF_EQUAL, precontext]) - } - while ( - bigpatch.diffs.length !== 0 && - patch.length1 < patch_size - this.Patch_Margin - ) { - var diff_type = bigpatch.diffs[0][0] - var diff_text = bigpatch.diffs[0][1] - if (diff_type === DIFF_INSERT) { - // Insertions are harmless. - patch.length2 += diff_text.length - start2 += diff_text.length - patch.diffs.push(bigpatch.diffs.shift()) - empty = false - } else if ( - diff_type === DIFF_DELETE && - patch.diffs.length == 1 && - patch.diffs[0][0] == DIFF_EQUAL && - diff_text.length > 2 * patch_size - ) { - // This is a large deletion. Let it pass in one chunk. - patch.length1 += diff_text.length - start1 += diff_text.length - empty = false - patch.diffs.push([diff_type, diff_text]) - bigpatch.diffs.shift() - } else { - // Deletion or equality. Only take as much as we can stomach. - diff_text = diff_text.substring( - 0, - patch_size - patch.length1 - this.Patch_Margin - ) - patch.length1 += diff_text.length - start1 += diff_text.length - if (diff_type === DIFF_EQUAL) { - patch.length2 += diff_text.length - start2 += diff_text.length - } else { - empty = false - } - patch.diffs.push([diff_type, diff_text]) - if (diff_text == bigpatch.diffs[0][1]) { - bigpatch.diffs.shift() - } else { - bigpatch.diffs[0][1] = bigpatch.diffs[0][1].substring( - diff_text.length - ) - } - } - } - // Compute the head context for the next patch. - precontext = this.diff_text2(patch.diffs) - precontext = precontext.substring(precontext.length - this.Patch_Margin) - // Append the end context for this patch. - var postcontext = this.diff_text1(bigpatch.diffs).substring( - 0, - this.Patch_Margin - ) - if (postcontext !== '') { - patch.length1 += postcontext.length - patch.length2 += postcontext.length - if ( - patch.diffs.length !== 0 && - patch.diffs[patch.diffs.length - 1][0] === DIFF_EQUAL - ) { - patch.diffs[patch.diffs.length - 1][1] += postcontext - } else { - patch.diffs.push([DIFF_EQUAL, postcontext]) - } - } - if (!empty) { - patches.splice(++x, 0, patch) - } - } - } -} - -/** - * Take a list of patches and return a textual representation. - * @param {!Array.} patches Array of Patch objects. - * @return {string} Text representation of patches. - */ -diff_match_patch.prototype.patch_toText = function (patches) { - var text = [] - for (var x = 0; x < patches.length; x++) { - text[x] = patches[x] - } - return text.join('') -} - -/** - * Parse a textual representation of patches and return a list of Patch objects. - * @param {string} textline Text representation of patches. - * @return {!Array.} Array of Patch objects. - * @throws {!Error} If invalid input. - */ -diff_match_patch.prototype.patch_fromText = function (textline) { - var patches = [] - if (!textline) { - return patches - } - var text = textline.split('\n') - var textPointer = 0 - var patchHeader = /^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$/ - while (textPointer < text.length) { - var m = text[textPointer].match(patchHeader) - if (!m) { - throw new Error('Invalid patch string: ' + text[textPointer]) - } - var patch = new diff_match_patch.patch_obj() - patches.push(patch) - patch.start1 = parseInt(m[1], 10) - if (m[2] === '') { - patch.start1-- - patch.length1 = 1 - } else if (m[2] == '0') { - patch.length1 = 0 - } else { - patch.start1-- - patch.length1 = parseInt(m[2], 10) - } - - patch.start2 = parseInt(m[3], 10) - if (m[4] === '') { - patch.start2-- - patch.length2 = 1 - } else if (m[4] == '0') { - patch.length2 = 0 - } else { - patch.start2-- - patch.length2 = parseInt(m[4], 10) - } - textPointer++ - - while (textPointer < text.length) { - var sign = text[textPointer].charAt(0) - try { - var line = decodeURI(text[textPointer].substring(1)) - } catch (ex) { - // Malformed URI sequence. - throw new Error('Illegal escape in patch_fromText: ' + line) - } - if (sign == '-') { - // Deletion. - patch.diffs.push([DIFF_DELETE, line]) - } else if (sign == '+') { - // Insertion. - patch.diffs.push([DIFF_INSERT, line]) - } else if (sign == ' ') { - // Minor equality. - patch.diffs.push([DIFF_EQUAL, line]) - } else if (sign == '@') { - // Start of next patch. - break - } else if (sign === '') { - // Blank line? Whatever. - } else { - // WTF? - throw new Error('Invalid patch mode "' + sign + '" in: ' + line) - } - textPointer++ - } - } - return patches -} - -/** - * Class representing one patch operation. - * @constructor - */ -diff_match_patch.patch_obj = function () { - /** @type {!Array.} */ - this.diffs = [] - /** @type {?number} */ - this.start1 = null - /** @type {?number} */ - this.start2 = null - /** @type {number} */ - this.length1 = 0 - /** @type {number} */ - this.length2 = 0 -} - -/** - * Emmulate GNU diff's format. - * Header: @@ -382,8 +481,9 @@ - * Indicies are printed as 1-based, not 0-based. - * @return {string} The GNU diff string. - */ -diff_match_patch.patch_obj.prototype.toString = function () { - var coords1, coords2 - if (this.length1 === 0) { - coords1 = this.start1 + ',0' - } else if (this.length1 == 1) { - coords1 = this.start1 + 1 - } else { - coords1 = this.start1 + 1 + ',' + this.length1 - } - if (this.length2 === 0) { - coords2 = this.start2 + ',0' - } else if (this.length2 == 1) { - coords2 = this.start2 + 1 - } else { - coords2 = this.start2 + 1 + ',' + this.length2 - } - var text = ['@@ -' + coords1 + ' +' + coords2 + ' @@\n'] - var op - // Escape the body of the patch with %xx notation. - for (var x = 0; x < this.diffs.length; x++) { - switch (this.diffs[x][0]) { - case DIFF_INSERT: - op = '+' - break - case DIFF_DELETE: - op = '-' - break - case DIFF_EQUAL: - op = ' ' - break - } - text[x + 1] = op + encodeURI(this.diffs[x][1]) + '\n' - } - return text.join('').replace(/%20/g, ' ') -} - -// Export these global variables so that they survive Google's JS compiler. -// In a browser, 'this' will be 'window'. -// Users of node.js should 'require' the uncompressed version since Google's -// JS compiler may break the following exports for non-browser environments. -this.diff_match_patch = diff_match_patch -this.DIFF_DELETE = DIFF_DELETE -this.DIFF_INSERT = DIFF_INSERT -this.DIFF_EQUAL = DIFF_EQUAL diff --git a/services/track-changes/buildscript.txt b/services/track-changes/buildscript.txt deleted file mode 100644 index 914500f7f4..0000000000 --- a/services/track-changes/buildscript.txt +++ /dev/null @@ -1,9 +0,0 @@ -track-changes ---dependencies=mongo,redis,s3 ---docker-repos=gcr.io/overleaf-ops ---env-add=AWS_BUCKET=bucket ---env-pass-through= ---esmock-loader=False ---node-version=18.18.0 ---public-repo=True ---script-version=4.4.0 diff --git a/services/track-changes/config/settings.defaults.js b/services/track-changes/config/settings.defaults.js deleted file mode 100755 index faa8660b70..0000000000 --- a/services/track-changes/config/settings.defaults.js +++ /dev/null @@ -1,90 +0,0 @@ -const Path = require('path') -const TMP_DIR = - process.env.TMP_PATH || Path.resolve(Path.join(__dirname, '../../', 'tmp')) - -module.exports = { - mongo: { - options: { - useUnifiedTopology: - (process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true', - }, - url: - process.env.MONGO_CONNECTION_STRING || - `mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`, - }, - - internal: { - trackchanges: { - port: 3015, - host: process.env.LISTEN_ADDRESS || 'localhost', - }, - }, - apis: { - documentupdater: { - url: `http://${ - process.env.DOCUMENT_UPDATER_HOST || - process.env.DOCUPDATER_HOST || - 'localhost' - }:3003`, - }, - docstore: { - url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`, - }, - web: { - url: `http://${ - process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost' - }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`, - user: process.env.WEB_API_USER || 'sharelatex', - pass: process.env.WEB_API_PASSWORD || 'password', - }, - }, - redis: { - lock: { - host: process.env.REDIS_HOST || 'localhost', - port: process.env.REDIS_PORT || 6379, - password: process.env.REDIS_PASSWORD || '', - key_schema: { - historyLock({ doc_id: docId }) { - return `HistoryLock:{${docId}}` - }, - historyIndexLock({ project_id: projectId }) { - return `HistoryIndexLock:{${projectId}}` - }, - }, - }, - history: { - host: process.env.REDIS_HOST || 'localhost', - port: process.env.REDIS_PORT || 6379, - password: process.env.REDIS_PASSWORD || '', - key_schema: { - uncompressedHistoryOps({ doc_id: docId }) { - return `UncompressedHistoryOps:{${docId}}` - }, - docsWithHistoryOps({ project_id: projectId }) { - return `DocsWithHistoryOps:{${projectId}}` - }, - }, - }, - }, - - trackchanges: { - s3: { - key: process.env.AWS_ACCESS_KEY_ID, - secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: process.env.AWS_S3_PATH_STYLE === 'true', - }, - stores: { - doc_history: process.env.AWS_BUCKET, - }, - continueOnError: process.env.TRACK_CHANGES_CONTINUE_ON_ERROR || false, - }, - - path: { - dumpFolder: Path.join(TMP_DIR, 'dumpFolder'), - }, - - sentry: { - dsn: process.env.SENTRY_DSN, - }, -} diff --git a/services/track-changes/docker-compose.ci.yml b/services/track-changes/docker-compose.ci.yml deleted file mode 100644 index f484d2894a..0000000000 --- a/services/track-changes/docker-compose.ci.yml +++ /dev/null @@ -1,74 +0,0 @@ -# This file was auto-generated, do not edit it directly. -# Instead run bin/update_build_scripts from -# https://github.com/overleaf/internal/ - -version: "2.3" - -services: - test_unit: - image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER - user: node - command: npm run test:unit:_run - environment: - NODE_ENV: test - NODE_OPTIONS: "--unhandled-rejections=strict" - - - test_acceptance: - build: . - image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER - environment: - ELASTIC_SEARCH_DSN: es:9200 - REDIS_HOST: redis - QUEUES_REDIS_HOST: redis - ANALYTICS_QUEUES_REDIS_HOST: redis - MONGO_HOST: mongo - POSTGRES_HOST: postgres - AWS_S3_ENDPOINT: http://s3:9090 - AWS_S3_PATH_STYLE: 'true' - AWS_ACCESS_KEY_ID: fake - AWS_SECRET_ACCESS_KEY: fake - MOCHA_GREP: ${MOCHA_GREP} - NODE_ENV: test - NODE_OPTIONS: "--unhandled-rejections=strict" - AWS_BUCKET: bucket - depends_on: - mongo: - condition: service_healthy - redis: - condition: service_healthy - s3: - condition: service_healthy - user: node - command: npm run test:acceptance:_run - - - tar: - build: . - image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER - volumes: - - ./:/tmp/build/ - command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . - user: root - redis: - image: redis - healthcheck: - test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] - interval: 1s - retries: 20 - - mongo: - image: mongo:5.0.17 - command: --replSet overleaf - healthcheck: - test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 - s3: - image: adobe/s3mock:2.4.14 - environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket - healthcheck: - test: wget --quiet --output-document=/dev/null http://localhost:9090 - interval: 1s - retries: 20 diff --git a/services/track-changes/docker-compose.yml b/services/track-changes/docker-compose.yml deleted file mode 100644 index 9913424da9..0000000000 --- a/services/track-changes/docker-compose.yml +++ /dev/null @@ -1,77 +0,0 @@ -# This file was auto-generated, do not edit it directly. -# Instead run bin/update_build_scripts from -# https://github.com/overleaf/internal/ - -version: "2.3" - -services: - test_unit: - image: node:18.18.0 - volumes: - - .:/overleaf/services/track-changes - - ../../node_modules:/overleaf/node_modules - - ../../libraries:/overleaf/libraries - working_dir: /overleaf/services/track-changes - environment: - MOCHA_GREP: ${MOCHA_GREP} - NODE_ENV: test - NODE_OPTIONS: "--unhandled-rejections=strict" - command: npm run --silent test:unit - user: node - - test_acceptance: - image: node:18.18.0 - volumes: - - .:/overleaf/services/track-changes - - ../../node_modules:/overleaf/node_modules - - ../../libraries:/overleaf/libraries - working_dir: /overleaf/services/track-changes - environment: - ELASTIC_SEARCH_DSN: es:9200 - REDIS_HOST: redis - QUEUES_REDIS_HOST: redis - ANALYTICS_QUEUES_REDIS_HOST: redis - MONGO_HOST: mongo - POSTGRES_HOST: postgres - AWS_S3_ENDPOINT: http://s3:9090 - AWS_S3_PATH_STYLE: 'true' - AWS_ACCESS_KEY_ID: fake - AWS_SECRET_ACCESS_KEY: fake - MOCHA_GREP: ${MOCHA_GREP} - LOG_LEVEL: ERROR - NODE_ENV: test - NODE_OPTIONS: "--unhandled-rejections=strict" - AWS_BUCKET: bucket - user: node - depends_on: - mongo: - condition: service_healthy - redis: - condition: service_healthy - s3: - condition: service_healthy - command: npm run --silent test:acceptance - - redis: - image: redis - healthcheck: - test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] - interval: 1s - retries: 20 - - mongo: - image: mongo:5.0.17 - command: --replSet overleaf - healthcheck: - test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" - interval: 1s - retries: 20 - - s3: - image: adobe/s3mock:2.4.14 - environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket - healthcheck: - test: wget --quiet --output-document=/dev/null http://localhost:9090 - interval: 1s - retries: 20 diff --git a/services/track-changes/pack.sh b/services/track-changes/pack.sh deleted file mode 100755 index c2aad785c7..0000000000 --- a/services/track-changes/pack.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -x - -# find all the docHistories with unpacked ops and pack them - -# need to keep track of docs already done - -HOST=${1:-"localhost:3015"} -T=${2:-10} - -echo packing all docHistory on $HOST with delay of $T -for n in $(seq 5 -1 1) ; do - echo starting in $n seconds - sleep 1 -done - -while docs=$(curl "$HOST/doc/list?limit=1000&doc_id=$last_doc"); do - if [ -z "$docs" ] ; then break ; fi - for d in $docs ; do - echo "packing $d" - curl -X POST "$HOST/doc/$d/pack" - sleep $T - last_doc=$d - done -done diff --git a/services/track-changes/package.json b/services/track-changes/package.json deleted file mode 100644 index 64d221d971..0000000000 --- a/services/track-changes/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "@overleaf/track-changes", - "description": "An API for saving and compressing individual document updates into a browsable history", - "private": true, - "main": "app.js", - "scripts": { - "start": "node $NODE_APP_OPTIONS app.js", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", - "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", - "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", - "nodemon": "node --watch $NODE_APP_OPTIONS app.js", - "lint": "eslint --max-warnings 0 --format unix .", - "format": "prettier --list-different $PWD/'**/*.js'", - "format:fix": "prettier --write $PWD/'**/*.js'", - "lint:fix": "eslint --fix ." - }, - "dependencies": { - "@overleaf/logger": "*", - "@overleaf/metrics": "*", - "@overleaf/o-error": "*", - "@overleaf/redis-wrapper": "*", - "@overleaf/settings": "*", - "async": "^3.2.2", - "aws-sdk": "^2.643.0", - "body-parser": "^1.19.0", - "bson": "^1.1.5", - "bunyan": "^1.8.15", - "byline": "^5.0.0", - "express": "^4.18.2", - "heap": "^0.2.6", - "JSONStream": "^1.3.5", - "line-reader": "^0.4.0", - "lodash": "^4.17.21", - "mongo-uri": "^0.1.2", - "mongodb": "^3.6.0", - "redis": "~0.10.1", - "request": "~2.88.2", - "requestretry": "^7.1.0", - "s3-streams": "^0.4.0", - "underscore": "~1.13.1", - "yazl": "^2.5.1" - }, - "devDependencies": { - "chai": "^4.3.6", - "chai-as-promised": "^7.1.1", - "cli": "^1.0.1", - "memorystream": "0.3.1", - "mocha": "^10.2.0", - "sandboxed-module": "~2.0.3", - "sinon": "~9.0.1", - "timekeeper": "2.2.0" - } -} diff --git a/services/track-changes/scripts/flush_all.js b/services/track-changes/scripts/flush_all.js deleted file mode 100644 index d2280666e6..0000000000 --- a/services/track-changes/scripts/flush_all.js +++ /dev/null @@ -1,27 +0,0 @@ -const UpdatesManager = require('../app/js/UpdatesManager') -const { waitForDb } = require('../app/js/mongodb') - -async function main() { - await waitForDb() - return new Promise((resolve, reject) => { - const limit = -1 - console.log('Flushing all updates') - UpdatesManager.flushAll(limit, err => { - if (err) { - reject(err) - } else { - resolve() - } - }) - }) -} - -main() - .then(() => { - console.log('Done flushing all updates') - process.exit(0) - }) - .catch(error => { - console.error('There was an error flushing updates', { error }) - process.exit(1) - }) diff --git a/services/track-changes/test/acceptance/deps/Dockerfile.s3mock b/services/track-changes/test/acceptance/deps/Dockerfile.s3mock deleted file mode 100644 index 6509092bd8..0000000000 --- a/services/track-changes/test/acceptance/deps/Dockerfile.s3mock +++ /dev/null @@ -1,4 +0,0 @@ -FROM adobe/s3mock:2.4.14 -RUN apk add --update --no-cache curl -COPY healthcheck.sh /healthcheck.sh -HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 diff --git a/services/track-changes/test/acceptance/deps/healthcheck.sh b/services/track-changes/test/acceptance/deps/healthcheck.sh deleted file mode 100644 index cd19cea637..0000000000 --- a/services/track-changes/test/acceptance/deps/healthcheck.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh - -# health check to allow 404 status code as valid -STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" $1) -# will be 000 on non-http error (e.g. connection failure) -if test $STATUSCODE -ge 500 || test $STATUSCODE -lt 200; then - exit 1 -fi -exit 0 diff --git a/services/track-changes/test/acceptance/js/AppendingUpdatesTests.js b/services/track-changes/test/acceptance/js/AppendingUpdatesTests.js deleted file mode 100644 index bd73105f2c..0000000000 --- a/services/track-changes/test/acceptance/js/AppendingUpdatesTests.js +++ /dev/null @@ -1,587 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const { ObjectId } = require('../../../app/js/mongodb') -const Settings = require('@overleaf/settings') -const request = require('request') -const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now - -const TrackChangesApp = require('./helpers/TrackChangesApp') -const TrackChangesClient = require('./helpers/TrackChangesClient') -const MockWebApi = require('./helpers/MockWebApi') - -describe('Appending doc ops to the history', function () { - before(function (done) { - return TrackChangesApp.ensureRunning(done) - }) - - describe('when the history does not exist yet', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: false } } - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - { - op: [{ i: 'o', p: 4 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 4, - }, - { - op: [{ i: 'o', p: 5 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 5, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - it('should insert the compressed op into mongo', function () { - return expect(this.updates[0].pack[0].op).to.deep.equal([ - { - p: 3, - i: 'foo', - }, - ]) - }) - - it('should insert the correct version number into mongo', function () { - return expect(this.updates[0].v).to.equal(5) - }) - - it('should store the doc id', function () { - return expect(this.updates[0].doc_id.toString()).to.equal(this.doc_id) - }) - - it('should store the project id', function () { - return expect(this.updates[0].project_id.toString()).to.equal( - this.project_id - ) - }) - - return it('should clear the doc from the DocsWithHistoryOps set', function (done) { - rclient.sismember( - `DocsWithHistoryOps:${this.project_id}`, - this.doc_id, - (error, member) => { - if (error) return done(error) - member.should.equal(0) - return done() - } - ) - return null - }) - }) - - describe('when the history has already been started', function () { - beforeEach(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: false } } - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - { - op: [{ i: 'o', p: 4 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 4, - }, - { - op: [{ i: 'o', p: 5 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 5, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - describe('when the updates are recent and from the same user', function () { - beforeEach(function (done) { - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'b', p: 6 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 6, - }, - { - op: [{ i: 'a', p: 7 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 7, - }, - { - op: [{ i: 'r', p: 8 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 8, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - it('should combine all the updates into one pack', function () { - return expect(this.updates[0].pack[1].op).to.deep.equal([ - { - p: 6, - i: 'bar', - }, - ]) - }) - - return it('should insert the correct version number into mongo', function () { - return expect(this.updates[0].v_end).to.equal(8) - }) - }) - - return describe('when the updates are far apart', function () { - beforeEach(function (done) { - const oneDay = 24 * 60 * 60 * 1000 - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'b', p: 6 }], - meta: { ts: Date.now() + oneDay, user_id: this.user_id }, - v: 6, - }, - { - op: [{ i: 'a', p: 7 }], - meta: { ts: Date.now() + oneDay, user_id: this.user_id }, - v: 7, - }, - { - op: [{ i: 'r', p: 8 }], - meta: { ts: Date.now() + oneDay, user_id: this.user_id }, - v: 8, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - return it('should combine the updates into one pack', function () { - expect(this.updates[0].pack[0].op).to.deep.equal([ - { - p: 3, - i: 'foo', - }, - ]) - return expect(this.updates[0].pack[1].op).to.deep.equal([ - { - p: 6, - i: 'bar', - }, - ]) - }) - }) - }) - - describe('when the updates need processing in batches', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: false } } - const updates = [] - this.expectedOp = [{ p: 0, i: '' }] - for (let i = 0; i <= 250; i++) { - updates.push({ - op: [{ i: 'a', p: 0 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: i, - }) - this.expectedOp[0].i = `a${this.expectedOp[0].i}` - } - - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - updates, - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates1) => { - this.updates = updates1 - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - it('should concat the compressed op into mongo', function () { - return expect(this.updates[0].pack.length).to.deep.equal(3) - }) // batch size is 100 - - return it('should insert the correct version number into mongo', function () { - return expect(this.updates[0].v_end).to.equal(250) - }) - }) - - describe('when there are multiple ops in each update', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: false } } - const oneDay = 24 * 60 * 60 * 1000 - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [ - { i: 'f', p: 3 }, - { i: 'o', p: 4 }, - { i: 'o', p: 5 }, - ], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - { - op: [ - { i: 'b', p: 6 }, - { i: 'a', p: 7 }, - { i: 'r', p: 8 }, - ], - meta: { ts: Date.now() + oneDay, user_id: this.user_id }, - v: 4, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - it('should insert the compressed ops into mongo', function () { - expect(this.updates[0].pack[0].op).to.deep.equal([ - { - p: 3, - i: 'foo', - }, - ]) - return expect(this.updates[0].pack[1].op).to.deep.equal([ - { - p: 6, - i: 'bar', - }, - ]) - }) - - return it('should insert the correct version numbers into mongo', function () { - expect(this.updates[0].pack[0].v).to.equal(3) - return expect(this.updates[0].pack[1].v).to.equal(4) - }) - }) - - describe('when there is a no-op update', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: false } } - const oneDay = 24 * 60 * 60 * 1000 - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - { - op: [{ i: 'foo', p: 3 }], - meta: { ts: Date.now() + oneDay, user_id: this.user_id }, - v: 4, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - it('should insert the compressed no-op into mongo', function () { - return expect(this.updates[0].pack[0].op).to.deep.equal([]) - }) - - it('should insert the compressed next update into mongo', function () { - return expect(this.updates[0].pack[1].op).to.deep.equal([ - { - p: 3, - i: 'foo', - }, - ]) - }) - - return it('should insert the correct version numbers into mongo', function () { - expect(this.updates[0].pack[0].v).to.equal(3) - return expect(this.updates[0].pack[1].v).to.equal(4) - }) - }) - - describe('when there is a comment update', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: false } } - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [ - { c: 'foo', p: 3 }, - { d: 'bar', p: 6 }, - ], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - it('should ignore the comment op', function () { - return expect(this.updates[0].pack[0].op).to.deep.equal([ - { d: 'bar', p: 6 }, - ]) - }) - - return it('should insert the correct version numbers into mongo', function () { - return expect(this.updates[0].pack[0].v).to.equal(3) - }) - }) - - describe('when the project has versioning enabled', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: true } } - - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - return it('should not add a expiresAt entry in the update in mongo', function () { - return expect(this.updates[0].expiresAt).to.be.undefined - }) - }) - - return describe('when the project does not have versioning enabled', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: false } } - - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushAndGetCompressedUpdates( - this.project_id, - this.doc_id, - (error, updates) => { - this.updates = updates - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - return it('should add a expiresAt entry in the update in mongo', function () { - return expect(this.updates[0].expiresAt).to.exist - }) - }) -}) diff --git a/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js b/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js deleted file mode 100644 index cfbb4a29bc..0000000000 --- a/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js +++ /dev/null @@ -1,319 +0,0 @@ -/* eslint-disable - no-undef, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS103: Rewrite code to no longer use __guard__ - * DS202: Simplify dynamic range loops - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const { db, ObjectId } = require('../../../app/js/mongodb') -const Settings = require('@overleaf/settings') -const request = require('request') -const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now - -const TrackChangesApp = require('./helpers/TrackChangesApp') -const TrackChangesClient = require('./helpers/TrackChangesClient') -const MockDocStoreApi = require('./helpers/MockDocStoreApi') -const MockWebApi = require('./helpers/MockWebApi') - -describe('Archiving updates', function () { - before(function (done) { - if ( - __guard__( - __guard__( - Settings != null ? Settings.trackchanges : undefined, - x1 => x1.s3 - ), - x => x.key.length - ) < 1 - ) { - const message = new Error('s3 keys not setup, this test setup will fail') - return done(message) - } - - return TrackChangesClient.waitForS3(done) - }) - - before(function (done) { - this.now = Date.now() - this.to = this.now - this.user_id = ObjectId().toString() - this.user_id_2 = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.project_id = ObjectId().toString() - - this.minutes = 60 * 1000 - this.hours = 60 * this.minutes - - MockWebApi.projects[this.project_id] = { - features: { - versioning: true, - }, - } - sinon.spy(MockWebApi, 'getProjectDetails') - - MockWebApi.users[this.user_id] = this.user = { - email: 'user@sharelatex.com', - first_name: 'Leo', - last_name: 'Lion', - id: this.user_id, - } - sinon.spy(MockWebApi, 'getUserInfo') - - MockDocStoreApi.docs[this.doc_id] = this.doc = { - _id: this.doc_id, - project_id: this.project_id, - } - sinon.spy(MockDocStoreApi, 'getAllDoc') - - this.updates = [] - for ( - let i = 0, end = 512 + 10, asc = end >= 0; - asc ? i <= end : i >= end; - asc ? i++ : i-- - ) { - this.updates.push({ - op: [{ i: 'a', p: 0 }], - meta: { ts: this.now + (i - 2048) * this.hours, user_id: this.user_id }, - v: 2 * i + 1, - }) - this.updates.push({ - op: [{ i: 'b', p: 0 }], - meta: { - ts: this.now + (i - 2048) * this.hours + 10 * this.minutes, - user_id: this.user_id_2, - }, - v: 2 * i + 2, - }) - } - TrackChangesApp.ensureRunning(() => { - return TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - this.updates, - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushDoc( - this.project_id, - this.doc_id, - error => { - if (error != null) { - throw error - } - return done() - } - ) - } - ) - }) - return null - }) - - after(function (done) { - MockWebApi.getUserInfo.restore() - return db.docHistory.deleteMany( - { project_id: ObjectId(this.project_id) }, - () => { - return db.docHistoryIndex.remove( - { project_id: ObjectId(this.project_id) }, - () => { - return TrackChangesClient.removeS3Doc( - this.project_id, - this.doc_id, - done - ) - } - ) - } - ) - }) - - function testExportFeature() { - describe('exporting the project', function () { - before('fetch export', function (done) { - TrackChangesClient.exportProject( - this.project_id, - (error, updates, userIds) => { - if (error) { - return done(error) - } - this.exportedUpdates = updates - this.exportedUserIds = userIds - done() - } - ) - }) - - it('should include all the imported updates, with ids, sorted by timestamp', function () { - // Add a safe guard for an empty array matching an empty export. - expect(this.updates).to.have.length(1024 + 22) - - const expectedExportedUpdates = this.updates - .slice() - .reverse() - .map(update => { - // clone object, updates are created once in before handler - const exportedUpdate = Object.assign({}, update) - exportedUpdate.meta = Object.assign({}, update.meta) - - exportedUpdate.doc_id = this.doc_id - exportedUpdate.project_id = this.project_id - - // This is for merged updates, which does not apply here. - exportedUpdate.meta.start_ts = exportedUpdate.meta.end_ts = - exportedUpdate.meta.ts - delete exportedUpdate.meta.ts - return exportedUpdate - }) - expect(this.exportedUpdates).to.deep.equal(expectedExportedUpdates) - expect(this.exportedUserIds).to.deep.equal([ - this.user_id, - this.user_id_2, - ]) - }) - }) - } - - describe("before archiving a doc's updates", function () { - testExportFeature() - }) - - describe("archiving a doc's updates", function () { - before(function (done) { - TrackChangesClient.pushDocHistory(this.project_id, this.doc_id, error => { - if (error != null) { - throw error - } - return done() - }) - return null - }) - - it('should have one cached pack', function (done) { - return db.docHistory.count( - { doc_id: ObjectId(this.doc_id), expiresAt: { $exists: true } }, - (error, count) => { - if (error != null) { - throw error - } - count.should.equal(1) - return done() - } - ) - }) - - it('should have one remaining pack after cache is expired', function (done) { - return db.docHistory.deleteMany( - { - doc_id: ObjectId(this.doc_id), - expiresAt: { $exists: true }, - }, - (err, result) => { - if (err) return done(err) - return db.docHistory.count( - { doc_id: ObjectId(this.doc_id) }, - (error, count) => { - if (error != null) { - throw error - } - count.should.equal(1) - return done() - } - ) - } - ) - }) - - it('should have a docHistoryIndex entry marked as inS3', function (done) { - return db.docHistoryIndex.findOne( - { _id: ObjectId(this.doc_id) }, - (error, index) => { - if (error != null) { - throw error - } - index.packs[0].inS3.should.equal(true) - return done() - } - ) - }) - - it('should have a docHistoryIndex entry with the last version', function (done) { - return db.docHistoryIndex.findOne( - { _id: ObjectId(this.doc_id) }, - (error, index) => { - if (error != null) { - throw error - } - index.packs[0].v_end.should.equal(1024) - return done() - } - ) - }) - - it('should store 1024 doc changes in S3 in one pack', function (done) { - return db.docHistoryIndex.findOne( - { _id: ObjectId(this.doc_id) }, - (error, index) => { - if (error != null) { - throw error - } - const packId = index.packs[0]._id - return TrackChangesClient.getS3Doc( - this.project_id, - this.doc_id, - packId, - (error, doc) => { - if (error) return done(error) - doc.n.should.equal(1024) - doc.pack.length.should.equal(1024) - return done() - } - ) - } - ) - }) - - testExportFeature() - }) - - return describe("unarchiving a doc's updates", function () { - before(function (done) { - TrackChangesClient.pullDocHistory(this.project_id, this.doc_id, error => { - if (error != null) { - throw error - } - return done() - }) - return null - }) - - return it('should restore both packs', function (done) { - return db.docHistory.count( - { doc_id: ObjectId(this.doc_id) }, - (error, count) => { - if (error != null) { - throw error - } - count.should.equal(2) - return done() - } - ) - }) - }) -}) - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} diff --git a/services/track-changes/test/acceptance/js/ExportProjectTests.js b/services/track-changes/test/acceptance/js/ExportProjectTests.js deleted file mode 100644 index b6ca106a60..0000000000 --- a/services/track-changes/test/acceptance/js/ExportProjectTests.js +++ /dev/null @@ -1,34 +0,0 @@ -const { expect } = require('chai') -const { ObjectId } = require('../../../app/js/mongodb') - -const TrackChangesApp = require('./helpers/TrackChangesApp') -const TrackChangesClient = require('./helpers/TrackChangesClient') - -describe('ExportProject', function () { - before('start app', function (done) { - TrackChangesApp.ensureRunning(done) - }) - - describe('when there are no updates', function () { - before('fetch export', function (done) { - TrackChangesClient.exportProject( - ObjectId(), - (error, updates, userIds) => { - if (error) { - return done(error) - } - this.exportedUpdates = updates - this.exportedUserIds = userIds - done() - } - ) - }) - - it('should export an empty array', function () { - expect(this.exportedUpdates).to.deep.equal([]) - expect(this.exportedUserIds).to.deep.equal([]) - }) - }) - - // see ArchivingUpdatesTests for tests with data in mongo/s3 -}) diff --git a/services/track-changes/test/acceptance/js/FlushingUpdatesTests.js b/services/track-changes/test/acceptance/js/FlushingUpdatesTests.js deleted file mode 100644 index 71b454a292..0000000000 --- a/services/track-changes/test/acceptance/js/FlushingUpdatesTests.js +++ /dev/null @@ -1,277 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const { ObjectId } = require('../../../app/js/mongodb') -const Settings = require('@overleaf/settings') -const request = require('request') -const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now - -const TrackChangesApp = require('./helpers/TrackChangesApp') -const TrackChangesClient = require('./helpers/TrackChangesClient') -const MockWebApi = require('./helpers/MockWebApi') - -describe('Flushing updates', function () { - before(function (done) { - return TrackChangesApp.ensureRunning(done) - }) - - describe("flushing a doc's updates", function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: true } } - - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushDoc( - this.project_id, - this.doc_id, - error => { - if (error != null) { - throw error - } - return done() - } - ) - } - ) - return null - }) - - return it('should flush the op into mongo', function (done) { - TrackChangesClient.getCompressedUpdates(this.doc_id, (error, updates) => { - if (error) return done(error) - expect(updates[0].pack[0].op).to.deep.equal([ - { - p: 3, - i: 'f', - }, - ]) - return done() - }) - return null - }) - }) - - return describe("flushing a project's updates", function () { - describe('with versioning enabled', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - - this.weeks = 7 * 24 * 60 * 60 * 1000 - - MockWebApi.projects[this.project_id] = { - features: { - versioning: true, - }, - } - - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'g', p: 2 }], - meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id }, - v: 2, - }, - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushProject(this.project_id, error => { - if (error != null) { - throw error - } - return done() - }) - } - ) - return null - }) - - it('should not mark the updates for deletion', function (done) { - TrackChangesClient.getCompressedUpdates( - this.doc_id, - (error, updates) => { - if (error) return done(error) - expect(updates[0].expiresAt).to.not.exist - return done() - } - ) - return null - }) - - return it('should preserve history forever', function (done) { - TrackChangesClient.getProjectMetaData( - this.project_id, - (error, project) => { - if (error) return done(error) - expect(project.preserveHistory).to.equal(true) - return done() - } - ) - return null - }) - }) - - describe('without versioning enabled', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - - this.weeks = 7 * 24 * 60 * 60 * 1000 - - MockWebApi.projects[this.project_id] = { - features: { - versioning: false, - }, - } - - TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'g', p: 2 }], - meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id }, - v: 2, - }, - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushProject(this.project_id, error => { - if (error != null) { - throw error - } - return done() - }) - } - ) - return null - }) - - return it('should mark the updates for deletion', function (done) { - TrackChangesClient.getCompressedUpdates( - this.doc_id, - (error, updates) => { - if (error) return done(error) - expect(updates[0].expiresAt).to.exist - return done() - } - ) - return null - }) - }) - - return describe('without versioning enabled but with preserveHistory set to true', function () { - before(function (done) { - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.user_id = ObjectId().toString() - - this.weeks = 7 * 24 * 60 * 60 * 1000 - - MockWebApi.projects[this.project_id] = { - features: { - versioning: false, - }, - } - - TrackChangesClient.setPreserveHistoryForProject( - this.project_id, - error => { - if (error != null) { - throw error - } - return TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - [ - { - op: [{ i: 'g', p: 2 }], - meta: { - ts: Date.now() - 2 * this.weeks, - user_id: this.user_id, - }, - v: 2, - }, - { - op: [{ i: 'f', p: 3 }], - meta: { ts: Date.now(), user_id: this.user_id }, - v: 3, - }, - ], - error => { - if (error != null) { - throw error - } - return TrackChangesClient.flushProject( - this.project_id, - error => { - if (error != null) { - throw error - } - return done() - } - ) - } - ) - } - ) - return null - }) - - return it('should not mark the updates for deletion', function (done) { - TrackChangesClient.getCompressedUpdates( - this.doc_id, - (error, updates) => { - if (error) return done(error) - expect(updates[0].expiresAt).to.not.exist - return done() - } - ) - return null - }) - }) - }) -}) diff --git a/services/track-changes/test/acceptance/js/GettingADiffTests.js b/services/track-changes/test/acceptance/js/GettingADiffTests.js deleted file mode 100644 index f2de3d7d7f..0000000000 --- a/services/track-changes/test/acceptance/js/GettingADiffTests.js +++ /dev/null @@ -1,127 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const { ObjectId } = require('../../../app/js/mongodb') -const Settings = require('@overleaf/settings') - -const TrackChangesApp = require('./helpers/TrackChangesApp') -const TrackChangesClient = require('./helpers/TrackChangesClient') -const MockDocUpdaterApi = require('./helpers/MockDocUpdaterApi') -const MockWebApi = require('./helpers/MockWebApi') - -describe('Getting a diff', function () { - beforeEach(function (done) { - sinon.spy(MockDocUpdaterApi, 'getDoc') - - this.now = Date.now() - this.from = this.now - 100000000 - this.to = this.now - this.user_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.project_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: true } } - - MockWebApi.users[this.user_id] = this.user = { - email: 'user@sharelatex.com', - first_name: 'Leo', - last_name: 'Lion', - id: this.user_id, - } - sinon.spy(MockWebApi, 'getUserInfo') - - const twoMinutes = 2 * 60 * 1000 - - this.updates = [ - { - op: [{ i: 'one ', p: 0 }], - meta: { ts: this.from - twoMinutes, user_id: this.user_id }, - v: 3, - }, - { - op: [{ i: 'two ', p: 4 }], - meta: { ts: this.from + twoMinutes, user_id: this.user_id }, - v: (this.fromVersion = 4), - }, - { - op: [{ i: 'three ', p: 8 }], - meta: { ts: this.to - twoMinutes, user_id: this.user_id }, - v: (this.toVersion = 5), - }, - { - op: [{ i: 'four', p: 14 }], - meta: { ts: this.to + twoMinutes, user_id: this.user_id }, - v: 6, - }, - ] - this.lines = ['one two three four'] - this.expected_diff = [ - { u: 'one ' }, - { - i: 'two three ', - meta: { - start_ts: this.from + twoMinutes, - end_ts: this.to - twoMinutes, - user: this.user, - }, - }, - ] - - MockDocUpdaterApi.docs[this.doc_id] = { - lines: this.lines, - version: 7, - } - TrackChangesApp.ensureRunning(() => { - return TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - this.updates, - error => { - if (error != null) { - throw error - } - return TrackChangesClient.getDiff( - this.project_id, - this.doc_id, - this.fromVersion, - this.toVersion, - (error, diff) => { - if (error != null) { - throw error - } - this.diff = diff.diff - return done() - } - ) - } - ) - }) - return null - }) - - afterEach(function () { - MockDocUpdaterApi.getDoc.restore() - MockWebApi.getUserInfo.restore() - return null - }) - - it('should return the diff', function () { - return expect(this.diff).to.deep.equal(this.expected_diff) - }) - - return it('should get the doc from the doc updater', function () { - MockDocUpdaterApi.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true) - return null - }) -}) diff --git a/services/track-changes/test/acceptance/js/GettingUpdatesTests.js b/services/track-changes/test/acceptance/js/GettingUpdatesTests.js deleted file mode 100644 index d3fce21171..0000000000 --- a/services/track-changes/test/acceptance/js/GettingUpdatesTests.js +++ /dev/null @@ -1,185 +0,0 @@ -/* eslint-disable - chai-friendly/no-unused-expressions, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const { ObjectId } = require('../../../app/js/mongodb') -const Settings = require('@overleaf/settings') - -const TrackChangesApp = require('./helpers/TrackChangesApp') -const TrackChangesClient = require('./helpers/TrackChangesClient') -const MockWebApi = require('./helpers/MockWebApi') - -describe('Getting updates', function () { - before(function (done) { - this.now = Date.now() - this.to = this.now - this.user_id = ObjectId().toString() - this.deleted_user_id = 'deleted_user' - this.doc_id = ObjectId().toString() - this.project_id = ObjectId().toString() - - this.minutes = 60 * 1000 - this.hours = 60 * this.minutes - - MockWebApi.projects[this.project_id] = { - features: { - versioning: true, - }, - } - - MockWebApi.users[this.user_id] = this.user = { - email: 'user@sharelatex.com', - first_name: 'Leo', - last_name: 'Lion', - id: this.user_id, - } - sinon.spy(MockWebApi, 'getUserInfo') - - this.updates = [] - for (let i = 0; i <= 9; i++) { - this.updates.push({ - op: [{ i: 'a', p: 0 }], - meta: { - ts: this.now - (9 - i) * this.hours - 2 * this.minutes, - user_id: this.user_id, - }, - v: 2 * i + 1, - }) - this.updates.push({ - op: [{ i: 'b', p: 0 }], - meta: { ts: this.now - (9 - i) * this.hours, user_id: this.user_id }, - v: 2 * i + 2, - }) - } - this.updates[0].meta.user_id = this.deleted_user_id - - TrackChangesApp.ensureRunning(() => { - return TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - this.updates, - error => { - if (error != null) { - throw error - } - return done() - } - ) - }) - return null - }) - ;({ - after() { - MockWebApi.getUserInfo.restore() - return null - }, - }) - - describe('getting updates up to the limit', function () { - before(function (done) { - TrackChangesClient.getUpdates( - this.project_id, - { before: this.to + 1, min_count: 3 }, - (error, body) => { - if (error != null) { - throw error - } - this.updates = body.updates - return done() - } - ) - return null - }) - - it('should fetch the user details from the web api', function () { - return MockWebApi.getUserInfo.calledWith(this.user_id).should.equal(true) - }) - - return it('should return at least the min_count number of summarized updates', function () { - const docs1 = {} - docs1[this.doc_id] = { toV: 20, fromV: 19 } - const docs2 = {} - docs2[this.doc_id] = { toV: 18, fromV: 17 } - const docs3 = {} - docs3[this.doc_id] = { toV: 16, fromV: 15 } - return expect(this.updates.slice(0, 3)).to.deep.equal([ - { - docs: docs1, - meta: { - start_ts: this.to - 2 * this.minutes, - end_ts: this.to, - users: [this.user], - }, - }, - { - docs: docs2, - meta: { - start_ts: this.to - 1 * this.hours - 2 * this.minutes, - end_ts: this.to - 1 * this.hours, - users: [this.user], - }, - }, - { - docs: docs3, - meta: { - start_ts: this.to - 2 * this.hours - 2 * this.minutes, - end_ts: this.to - 2 * this.hours, - users: [this.user], - }, - }, - ]) - }) - }) - - return describe('getting updates beyond the end of the database', function () { - before(function (done) { - TrackChangesClient.getUpdates( - this.project_id, - { before: this.to - 8 * this.hours + 1, min_count: 30 }, - (error, body) => { - if (error != null) { - throw error - } - this.updates = body.updates - return done() - } - ) - return null - }) - - return it('should return as many updates as it can', function () { - const docs1 = {} - docs1[this.doc_id] = { toV: 4, fromV: 3 } - const docs2 = {} - docs2[this.doc_id] = { toV: 2, fromV: 1 } - return expect(this.updates).to.deep.equal([ - { - docs: docs1, - meta: { - start_ts: this.to - 8 * this.hours - 2 * this.minutes, - end_ts: this.to - 8 * this.hours, - users: [this.user], - }, - }, - { - docs: docs2, - meta: { - start_ts: this.to - 9 * this.hours - 2 * this.minutes, - end_ts: this.to - 9 * this.hours, - users: [this.user, null], - }, - }, - ]) - }) - }) -}) diff --git a/services/track-changes/test/acceptance/js/LockManagerTests.js b/services/track-changes/test/acceptance/js/LockManagerTests.js deleted file mode 100644 index 4577e0280b..0000000000 --- a/services/track-changes/test/acceptance/js/LockManagerTests.js +++ /dev/null @@ -1,64 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const Settings = require('@overleaf/settings') -const LockManager = require('../../../app/js/LockManager') -const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now -const TrackChangesApp = require('./helpers/TrackChangesApp') - -describe('Locking document', function () { - before(function (done) { - TrackChangesApp.ensureRunning(done) - return null - }) - - return describe('when the lock has expired in redis', function () { - before(function (done) { - LockManager.LOCK_TTL = 1 // second - LockManager.runWithLock( - 'doc123', - releaseA => { - // we create a lock A and allow it to expire in redis - return setTimeout( - () => - // now we create a new lock B and try to release A - LockManager.runWithLock( - 'doc123', - releaseB => { - return releaseA() - }, // try to release lock A to see if it wipes out lock B - () => {} - ), - - // we never release lock B so nothing should happen here - 1500 - ) - }, // enough time to wait until the lock has expired - err => { - // we get here after trying to release lock A - expect(err).to.exist - done() - } - ) - return null - }) - - return it('the new lock should not be removed by the expired locker', function (done) { - LockManager.checkLock('doc123', (err, isFree) => { - if (err) return done(err) - expect(isFree).to.equal(false) - return done() - }) - return null - }) - }) -}) diff --git a/services/track-changes/test/acceptance/js/RestoringVersions.js b/services/track-changes/test/acceptance/js/RestoringVersions.js deleted file mode 100644 index 312d92bef9..0000000000 --- a/services/track-changes/test/acceptance/js/RestoringVersions.js +++ /dev/null @@ -1,116 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const { ObjectId } = require('../../../app/js/mongodb') -const Settings = require('@overleaf/settings') - -const TrackChangesApp = require('./helpers/TrackChangesApp') -const TrackChangesClient = require('./helpers/TrackChangesClient') -const MockDocUpdaterApi = require('./helpers/MockDocUpdaterApi') -const MockWebApi = require('./helpers/MockWebApi') - -describe('Restoring a version', function () { - before(function (done) { - sinon.spy(MockDocUpdaterApi, 'setDoc') - - this.now = Date.now() - this.user_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.project_id = ObjectId().toString() - MockWebApi.projects[this.project_id] = { features: { versioning: true } } - - const minutes = 60 * 1000 - - this.updates = [ - { - op: [{ i: 'one ', p: 0 }], - meta: { ts: this.now - 6 * minutes, user_id: this.user_id }, - v: 3, - }, - { - op: [{ i: 'two ', p: 4 }], - meta: { ts: this.now - 4 * minutes, user_id: this.user_id }, - v: 4, - }, - { - op: [{ i: 'three ', p: 8 }], - meta: { ts: this.now - 2 * minutes, user_id: this.user_id }, - v: 5, - }, - { - op: [{ i: 'four', p: 14 }], - meta: { ts: this.now, user_id: this.user_id }, - v: 6, - }, - ] - this.lines = ['one two three four'] - this.restored_lines = ['one two '] - this.beforeVersion = 5 - - MockWebApi.users[this.user_id] = this.user = { - email: 'user@sharelatex.com', - first_name: 'Leo', - last_name: 'Lion', - id: this.user_id, - } - - MockDocUpdaterApi.docs[this.doc_id] = { - lines: this.lines, - version: 7, - } - - TrackChangesApp.ensureRunning(() => { - return TrackChangesClient.pushRawUpdates( - this.project_id, - this.doc_id, - this.updates, - error => { - if (error != null) { - throw error - } - return TrackChangesClient.restoreDoc( - this.project_id, - this.doc_id, - this.beforeVersion, - this.user_id, - error => { - if (error != null) { - throw error - } - return done() - } - ) - } - ) - }) - return null - }) - - after(function () { - MockDocUpdaterApi.setDoc.restore() - return null - }) - - return it('should set the doc in the doc updater', function () { - MockDocUpdaterApi.setDoc - .calledWith( - this.project_id, - this.doc_id, - this.restored_lines, - this.user_id, - true - ) - .should.equal(true) - return null - }) -}) diff --git a/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js b/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js deleted file mode 100644 index b28a8fcb20..0000000000 --- a/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js +++ /dev/null @@ -1,50 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let MockDocUpdaterApi -const express = require('express') -const app = express() - -module.exports = MockDocUpdaterApi = { - docs: {}, - - getAllDoc(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return callback(null, this.docs) - }, - - run() { - app.get('/project/:project_id/doc', (req, res, next) => { - return this.getAllDoc(req.params.project_id, (error, docs) => { - if (error != null) { - res.sendStatus(500) - } - if (docs == null) { - return res.sendStatus(404) - } else { - return res.send(JSON.stringify(docs)) - } - }) - }) - - return app - .listen(3016, error => { - if (error != null) { - throw error - } - }) - .on('error', error => { - console.error('error starting MockDocStoreApi:', error.message) - return process.exit(1) - }) - }, -} - -MockDocUpdaterApi.run() diff --git a/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js b/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js deleted file mode 100644 index 8c83d7d332..0000000000 --- a/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js +++ /dev/null @@ -1,87 +0,0 @@ -/* eslint-disable - no-undef, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let MockDocUpdaterApi -const express = require('express') -const bodyParser = require('body-parser') -const app = express() -app.use(bodyParser.json()) - -module.exports = MockDocUpdaterApi = { - docs: {}, - - getDoc(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return callback(null, this.docs[docId]) - }, - - setDoc(projectId, docId, lines, userId, undoing, callback) { - if (callback == null) { - callback = function () {} - } - if (!this.docs[docId]) { - this.docs[docId] = {} - } - this.docs[docId].lines = lines - return callback() - }, - - run() { - app.get('/project/:project_id/doc/:doc_id', (req, res, next) => { - return this.getDoc( - req.params.project_id, - req.params.doc_id, - (error, doc) => { - if (error != null) { - res.sendStatus(500) - } - if (doc == null) { - return res.sendStatus(404) - } else { - return res.send(JSON.stringify(doc)) - } - } - ) - }) - - app.post('/project/:project_id/doc/:doc_id', (req, res, next) => { - return this.setDoc( - req.params.project_id, - req.params.doc_id, - req.body.lines, - req.body.user_id, - req.body.undoing, - (errr, doc) => { - if (typeof error !== 'undefined' && error !== null) { - return res.sendStatus(500) - } else { - return res.sendStatus(204) - } - } - ) - }) - - return app - .listen(3003, error => { - if (error != null) { - throw error - } - }) - .on('error', error => { - console.error('error starting MockDocUpdaterApi:', error.message) - return process.exit(1) - }) - }, -} - -MockDocUpdaterApi.run() diff --git a/services/track-changes/test/acceptance/js/helpers/MockWebApi.js b/services/track-changes/test/acceptance/js/helpers/MockWebApi.js deleted file mode 100644 index 2b7db026e1..0000000000 --- a/services/track-changes/test/acceptance/js/helpers/MockWebApi.js +++ /dev/null @@ -1,72 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let MockWebApi -const express = require('express') -const app = express() - -module.exports = MockWebApi = { - users: {}, - - projects: {}, - - getUserInfo(userId, callback) { - if (callback == null) { - callback = function () {} - } - return callback(null, this.users[userId] || null) - }, - - getProjectDetails(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return callback(null, this.projects[projectId]) - }, - - run() { - app.get('/user/:user_id/personal_info', (req, res, next) => { - return this.getUserInfo(req.params.user_id, (error, user) => { - if (error != null) { - res.sendStatus(500) - } - if (user == null) { - return res.sendStatus(404) - } else { - return res.send(JSON.stringify(user)) - } - }) - }) - - app.get('/project/:project_id/details', (req, res, next) => { - return this.getProjectDetails(req.params.project_id, (error, project) => { - if (error != null) { - res.sendStatus(500) - } - if (project == null) { - return res.sendStatus(404) - } else { - return res.send(JSON.stringify(project)) - } - }) - }) - - return app - .listen(3000, error => { - if (error != null) { - throw error - } - }) - .on('error', error => { - console.error('error starting MockWebApiServer:', error.message) - return process.exit(1) - }) - }, -} - -MockWebApi.run() diff --git a/services/track-changes/test/acceptance/js/helpers/TrackChangesApp.js b/services/track-changes/test/acceptance/js/helpers/TrackChangesApp.js deleted file mode 100644 index 0658e68687..0000000000 --- a/services/track-changes/test/acceptance/js/helpers/TrackChangesApp.js +++ /dev/null @@ -1,64 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS103: Rewrite code to no longer use __guard__ - * DS205: Consider reworking code to avoid use of IIFEs - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const app = require('../../../../app') -const { waitForDb } = require('../../../../app/js/mongodb') -const logger = require('@overleaf/logger') -const Settings = require('@overleaf/settings') - -module.exports = { - running: false, - initing: false, - callbacks: [], - ensureRunning(callback) { - if (callback == null) { - callback = function () {} - } - if (this.running) { - return callback() - } else if (this.initing) { - return this.callbacks.push(callback) - } - this.initing = true - this.callbacks.push(callback) - waitForDb().then(() => { - return app.listen( - __guard__( - Settings.internal != null - ? Settings.internal.trackchanges - : undefined, - x => x.port - ), - 'localhost', - error => { - if (error != null) { - throw error - } - this.running = true - logger.info('track changes running in dev mode') - - return (() => { - const result = [] - for (callback of Array.from(this.callbacks)) { - result.push(callback()) - } - return result - })() - } - ) - }) - }, -} -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} diff --git a/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js b/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js deleted file mode 100644 index ad68d88a1e..0000000000 --- a/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js +++ /dev/null @@ -1,302 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let TrackChangesClient -const async = require('async') -const zlib = require('zlib') -const request = require('request') -const Settings = require('@overleaf/settings') -const rclient = require('@overleaf/redis-wrapper').createClient( - Settings.redis.history -) // Only works locally for now -const Keys = Settings.redis.history.key_schema -const { db, ObjectId } = require('../../../../app/js/mongodb') - -const aws = require('aws-sdk') -const s3 = new aws.S3({ - accessKeyId: Settings.trackchanges.s3.key, - secretAccessKey: Settings.trackchanges.s3.secret, - endpoint: Settings.trackchanges.s3.endpoint, - s3ForcePathStyle: Settings.trackchanges.s3.pathStyle, -}) -const S3_BUCKET = Settings.trackchanges.stores.doc_history - -module.exports = TrackChangesClient = { - flushAndGetCompressedUpdates(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return TrackChangesClient.flushDoc(projectId, docId, error => { - if (error != null) { - return callback(error) - } - return TrackChangesClient.getCompressedUpdates(docId, callback) - }) - }, - - flushDoc(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return request.post( - { - url: `http://localhost:3015/project/${projectId}/doc/${docId}/flush`, - }, - (error, response, body) => { - response.statusCode.should.equal(204) - return callback(error) - } - ) - }, - - flushProject(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return request.post( - { - url: `http://localhost:3015/project/${projectId}/flush`, - }, - (error, response, body) => { - response.statusCode.should.equal(204) - return callback(error) - } - ) - }, - - getCompressedUpdates(docId, callback) { - if (callback == null) { - callback = function () {} - } - return db.docHistory - .find({ doc_id: ObjectId(docId) }) - .sort({ 'meta.end_ts': 1 }) - .toArray(callback) - }, - - getProjectMetaData(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return db.projectHistoryMetaData.findOne( - { - project_id: ObjectId(projectId), - }, - callback - ) - }, - - setPreserveHistoryForProject(projectId, callback) { - if (callback == null) { - callback = function () {} - } - return db.projectHistoryMetaData.updateOne( - { - project_id: ObjectId(projectId), - }, - { - $set: { preserveHistory: true }, - }, - { - upsert: true, - }, - callback - ) - }, - - pushRawUpdates(projectId, docId, updates, callback) { - if (callback == null) { - callback = function () {} - } - return rclient.sadd( - Keys.docsWithHistoryOps({ project_id: projectId }), - docId, - error => { - if (error != null) { - return callback(error) - } - return rclient.rpush( - Keys.uncompressedHistoryOps({ doc_id: docId }), - ...Array.from(Array.from(updates).map(u => JSON.stringify(u))), - callback - ) - } - ) - }, - - getDiff(projectId, docId, from, to, callback) { - if (callback == null) { - callback = function () {} - } - return request.get( - { - url: `http://localhost:3015/project/${projectId}/doc/${docId}/diff?from=${from}&to=${to}`, - }, - (error, response, body) => { - if (error) return callback(error) - response.statusCode.should.equal(200) - return callback(null, JSON.parse(body)) - } - ) - }, - - getUpdates(projectId, options, callback) { - if (callback == null) { - callback = function () {} - } - return request.get( - { - url: `http://localhost:3015/project/${projectId}/updates?before=${options.before}&min_count=${options.min_count}`, - }, - (error, response, body) => { - if (error) return callback(error) - response.statusCode.should.equal(200) - return callback(null, JSON.parse(body)) - } - ) - }, - - exportProject(projectId, callback) { - request.get( - { url: `http://localhost:3015/project/${projectId}/export`, json: true }, - (error, response, updates) => { - if (error) return callback(error) - response.statusCode.should.equal(200) - callback(null, updates, JSON.parse(response.trailers['x-user-ids'])) - } - ) - }, - - restoreDoc(projectId, docId, version, userId, callback) { - if (callback == null) { - callback = function () {} - } - return request.post( - { - url: `http://localhost:3015/project/${projectId}/doc/${docId}/version/${version}/restore`, - headers: { - 'X-User-Id': userId, - }, - }, - (error, response, body) => { - if (error) return callback(error) - response.statusCode.should.equal(204) - return callback(null) - } - ) - }, - - pushDocHistory(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return request.post( - { - url: `http://localhost:3015/project/${projectId}/doc/${docId}/push`, - }, - (error, response, body) => { - response.statusCode.should.equal(204) - return callback(error) - } - ) - }, - - pullDocHistory(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - return request.post( - { - url: `http://localhost:3015/project/${projectId}/doc/${docId}/pull`, - }, - (error, response, body) => { - response.statusCode.should.equal(204) - return callback(error) - } - ) - }, - - waitForS3(done, retries) { - if (retries == null) { - retries = 42 - } - if (!Settings.trackchanges.s3.endpoint) { - return done() - } - - return request.get(`${Settings.trackchanges.s3.endpoint}/`, (err, res) => { - if (res && res.statusCode < 500) { - return done() - } - - if (retries === 0) { - return done(err || new Error(`s3 returned ${res.statusCode}`)) - } - - return setTimeout( - () => TrackChangesClient.waitForS3(done, --retries), - 1000 - ) - }) - }, - - getS3Doc(projectId, docId, packId, callback) { - if (callback == null) { - callback = function () {} - } - const params = { - Bucket: S3_BUCKET, - Key: `${projectId}/changes-${docId}/pack-${packId}`, - } - - return s3.getObject(params, (error, data) => { - if (error != null) { - return callback(error) - } - const body = data.Body - if (body == null) { - return callback(new Error('empty response from s3')) - } - return zlib.gunzip(body, (err, result) => { - if (err != null) { - return callback(err) - } - return callback(null, JSON.parse(result.toString())) - }) - }) - }, - - removeS3Doc(projectId, docId, callback) { - if (callback == null) { - callback = function () {} - } - let params = { - Bucket: S3_BUCKET, - Prefix: `${projectId}/changes-${docId}`, - } - - return s3.listObjects(params, (error, data) => { - if (error != null) { - return callback(error) - } - - params = { - Bucket: S3_BUCKET, - Delete: { - Objects: data.Contents.map(s3object => ({ Key: s3object.Key })), - }, - } - - return s3.deleteObjects(params, callback) - }) - }, -} diff --git a/services/track-changes/test/setup.js b/services/track-changes/test/setup.js deleted file mode 100644 index e95a300d30..0000000000 --- a/services/track-changes/test/setup.js +++ /dev/null @@ -1,21 +0,0 @@ -const chai = require('chai') -const SandboxedModule = require('sandboxed-module') - -// Chai configuration -chai.should() - -// SandboxedModule configuration -SandboxedModule.configure({ - requires: { - '@overleaf/logger': { - debug() {}, - log() {}, - info() {}, - warn() {}, - err() {}, - error() {}, - fatal() {}, - }, - }, - globals: { Buffer, JSON, console, process }, -}) diff --git a/services/track-changes/test/unit/js/DiffGenerator/DiffGeneratorTests.js b/services/track-changes/test/unit/js/DiffGenerator/DiffGeneratorTests.js deleted file mode 100644 index 2b67220de9..0000000000 --- a/services/track-changes/test/unit/js/DiffGenerator/DiffGeneratorTests.js +++ /dev/null @@ -1,456 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/DiffGenerator.js' -const SandboxedModule = require('sandboxed-module') - -describe('DiffGenerator', function () { - beforeEach(function () { - this.DiffGenerator = SandboxedModule.require(modulePath, {}) - this.ts = Date.now() - this.user_id = 'mock-user-id' - this.user_id_2 = 'mock-user-id-2' - return (this.meta = { - start_ts: this.ts, - end_ts: this.ts, - user_id: this.user_id, - }) - }) - - describe('rewindOp', function () { - describe('rewinding an insert', function () { - return it('should undo the insert', function () { - const content = 'hello world' - const rewoundContent = this.DiffGenerator.rewindOp(content, { - p: 6, - i: 'wo', - }) - return rewoundContent.should.equal('hello rld') - }) - }) - - describe('rewinding a delete', function () { - return it('should undo the delete', function () { - const content = 'hello rld' - const rewoundContent = this.DiffGenerator.rewindOp(content, { - p: 6, - d: 'wo', - }) - return rewoundContent.should.equal('hello world') - }) - }) - - describe('with an inconsistent update', function () { - return it('should throw an error', function () { - const content = 'hello world' - return expect(() => { - return this.DiffGenerator.rewindOp(content, { p: 6, i: 'foo' }) - }).to.throw(this.DiffGenerator.ConsistencyError) - }) - }) - - return describe('with an update which is beyond the length of the content', function () { - return it('should undo the insert as if it were at the end of the content', function () { - const content = 'foobar' - const rewoundContent = this.DiffGenerator.rewindOp(content, { - p: 4, - i: 'bar', - }) - return rewoundContent.should.equal('foo') - }) - }) - }) - - describe('rewindUpdate', function () { - return it('should rewind ops in reverse', function () { - const content = 'aaabbbccc' - const update = { - op: [ - { p: 3, i: 'bbb' }, - { p: 6, i: 'ccc' }, - ], - } - const rewoundContent = this.DiffGenerator.rewindUpdate(content, update) - return rewoundContent.should.equal('aaa') - }) - }) - - describe('rewindUpdates', function () { - return it('should rewind updates in reverse', function () { - const content = 'aaabbbccc' - const updates = [ - { op: [{ p: 3, i: 'bbb' }] }, - { op: [{ p: 6, i: 'ccc' }] }, - ] - const rewoundContent = this.DiffGenerator.rewindUpdates(content, updates) - return rewoundContent.should.equal('aaa') - }) - }) - - describe('buildDiff', function () { - beforeEach(function () { - this.diff = [{ u: 'mock-diff' }] - this.content = 'Hello world' - this.updates = [ - { i: 'mock-update-1' }, - { i: 'mock-update-2' }, - { i: 'mock-update-3' }, - ] - this.DiffGenerator.applyUpdateToDiff = sinon.stub().returns(this.diff) - this.DiffGenerator.compressDiff = sinon.stub().returns(this.diff) - return (this.result = this.DiffGenerator.buildDiff( - this.content, - this.updates - )) - }) - - it('should return the diff', function () { - return this.result.should.deep.equal(this.diff) - }) - - it('should build the content into an initial diff', function () { - return this.DiffGenerator.applyUpdateToDiff - .calledWith( - [ - { - u: this.content, - }, - ], - this.updates[0] - ) - .should.equal(true) - }) - - it('should apply each update', function () { - return Array.from(this.updates).map(update => - this.DiffGenerator.applyUpdateToDiff - .calledWith(sinon.match.any, update) - .should.equal(true) - ) - }) - - return it('should compress the diff', function () { - return this.DiffGenerator.compressDiff - .calledWith(this.diff) - .should.equal(true) - }) - }) - - describe('compressDiff', function () { - describe('with adjacent inserts with the same user_id', function () { - return it('should create one update with combined meta data and min/max timestamps', function () { - const diff = this.DiffGenerator.compressDiff([ - { - i: 'foo', - meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }, - }, - { - i: 'bar', - meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } }, - }, - ]) - return expect(diff).to.deep.equal([ - { - i: 'foobar', - meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } }, - }, - ]) - }) - }) - - describe('with adjacent inserts with different user_ids', function () { - return it('should leave the inserts unchanged', function () { - const input = [ - { - i: 'foo', - meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }, - }, - { - i: 'bar', - meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } }, - }, - ] - const output = this.DiffGenerator.compressDiff(input) - return expect(output).to.deep.equal(input) - }) - }) - - describe('with adjacent deletes with the same user_id', function () { - return it('should create one update with combined meta data and min/max timestamps', function () { - const diff = this.DiffGenerator.compressDiff([ - { - d: 'foo', - meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }, - }, - { - d: 'bar', - meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } }, - }, - ]) - return expect(diff).to.deep.equal([ - { - d: 'foobar', - meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } }, - }, - ]) - }) - }) - - return describe('with adjacent deletes with different user_ids', function () { - return it('should leave the deletes unchanged', function () { - const input = [ - { - d: 'foo', - meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }, - }, - { - d: 'bar', - meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } }, - }, - ] - const output = this.DiffGenerator.compressDiff(input) - return expect(output).to.deep.equal(input) - }) - }) - }) - - return describe('applyUpdateToDiff', function () { - describe('an insert', function () { - it('should insert into the middle of (u)nchanged text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], { - op: [{ p: 3, i: 'baz' }], - meta: this.meta, - }) - return expect(diff).to.deep.equal([ - { u: 'foo' }, - { i: 'baz', meta: this.meta }, - { u: 'bar' }, - ]) - }) - - it('should insert into the start of (u)changed text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], { - op: [{ p: 0, i: 'baz' }], - meta: this.meta, - }) - return expect(diff).to.deep.equal([ - { i: 'baz', meta: this.meta }, - { u: 'foobar' }, - ]) - }) - - it('should insert into the end of (u)changed text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], { - op: [{ p: 6, i: 'baz' }], - meta: this.meta, - }) - return expect(diff).to.deep.equal([ - { u: 'foobar' }, - { i: 'baz', meta: this.meta }, - ]) - }) - - it('should insert into the middle of (i)inserted text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ i: 'foobar', meta: this.meta }], - { op: [{ p: 3, i: 'baz' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { i: 'foo', meta: this.meta }, - { i: 'baz', meta: this.meta }, - { i: 'bar', meta: this.meta }, - ]) - }) - - return it('should not count deletes in the running length total', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ d: 'deleted', meta: this.meta }, { u: 'foobar' }], - { op: [{ p: 3, i: 'baz' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { d: 'deleted', meta: this.meta }, - { u: 'foo' }, - { i: 'baz', meta: this.meta }, - { u: 'bar' }, - ]) - }) - }) - - return describe('a delete', function () { - describe('deleting unchanged text', function () { - it('should delete from the middle of (u)nchanged text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ u: 'foobazbar' }], - { op: [{ p: 3, d: 'baz' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { u: 'foo' }, - { d: 'baz', meta: this.meta }, - { u: 'bar' }, - ]) - }) - - it('should delete from the start of (u)nchanged text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ u: 'foobazbar' }], - { op: [{ p: 0, d: 'foo' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { d: 'foo', meta: this.meta }, - { u: 'bazbar' }, - ]) - }) - - it('should delete from the end of (u)nchanged text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ u: 'foobazbar' }], - { op: [{ p: 6, d: 'bar' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { u: 'foobaz' }, - { d: 'bar', meta: this.meta }, - ]) - }) - - return it('should delete across multiple (u)changed text parts', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ u: 'foo' }, { u: 'baz' }, { u: 'bar' }], - { op: [{ p: 2, d: 'obazb' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { u: 'fo' }, - { d: 'o', meta: this.meta }, - { d: 'baz', meta: this.meta }, - { d: 'b', meta: this.meta }, - { u: 'ar' }, - ]) - }) - }) - - describe('deleting inserts', function () { - it('should delete from the middle of (i)nserted text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ i: 'foobazbar', meta: this.meta }], - { op: [{ p: 3, d: 'baz' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { i: 'foo', meta: this.meta }, - { i: 'bar', meta: this.meta }, - ]) - }) - - it('should delete from the start of (u)nchanged text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ i: 'foobazbar', meta: this.meta }], - { op: [{ p: 0, d: 'foo' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([{ i: 'bazbar', meta: this.meta }]) - }) - - it('should delete from the end of (u)nchanged text', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ i: 'foobazbar', meta: this.meta }], - { op: [{ p: 6, d: 'bar' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([{ i: 'foobaz', meta: this.meta }]) - }) - - return it('should delete across multiple (u)changed and (i)nserted text parts', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ u: 'foo' }, { i: 'baz', meta: this.meta }, { u: 'bar' }], - { op: [{ p: 2, d: 'obazb' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { u: 'fo' }, - { d: 'o', meta: this.meta }, - { d: 'b', meta: this.meta }, - { u: 'ar' }, - ]) - }) - }) - - describe('deleting over existing deletes', function () { - return it('should delete across multiple (u)changed and (d)deleted text parts', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ u: 'foo' }, { d: 'baz', meta: this.meta }, { u: 'bar' }], - { op: [{ p: 2, d: 'ob' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { u: 'fo' }, - { d: 'o', meta: this.meta }, - { d: 'baz', meta: this.meta }, - { d: 'b', meta: this.meta }, - { u: 'ar' }, - ]) - }) - }) - - describe("deleting when the text doesn't match", function () { - it('should throw an error when deleting from the middle of (u)nchanged text', function () { - return expect(() => - this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], { - op: [{ p: 3, d: 'xxx' }], - meta: this.meta, - }) - ).to.throw(this.DiffGenerator.ConsistencyError) - }) - - it('should throw an error when deleting from the start of (u)nchanged text', function () { - return expect(() => - this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], { - op: [{ p: 0, d: 'xxx' }], - meta: this.meta, - }) - ).to.throw(this.DiffGenerator.ConsistencyError) - }) - - return it('should throw an error when deleting from the end of (u)nchanged text', function () { - return expect(() => - this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], { - op: [{ p: 6, d: 'xxx' }], - meta: this.meta, - }) - ).to.throw(this.DiffGenerator.ConsistencyError) - }) - }) - - describe('when the last update in the existing diff is a delete', function () { - return it('should insert the new update before the delete', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ u: 'foo' }, { d: 'bar', meta: this.meta }], - { op: [{ p: 3, i: 'baz' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { u: 'foo' }, - { i: 'baz', meta: this.meta }, - { d: 'bar', meta: this.meta }, - ]) - }) - }) - - return describe('when the only update in the existing diff is a delete', function () { - return it('should insert the new update after the delete', function () { - const diff = this.DiffGenerator.applyUpdateToDiff( - [{ d: 'bar', meta: this.meta }], - { op: [{ p: 0, i: 'baz' }], meta: this.meta } - ) - return expect(diff).to.deep.equal([ - { d: 'bar', meta: this.meta }, - { i: 'baz', meta: this.meta }, - ]) - }) - }) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js b/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js deleted file mode 100644 index 5cc2977921..0000000000 --- a/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js +++ /dev/null @@ -1,443 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/DiffManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('DiffManager', function () { - beforeEach(function () { - this.DiffManager = SandboxedModule.require(modulePath, { - requires: { - './UpdatesManager': (this.UpdatesManager = {}), - './DocumentUpdaterManager': (this.DocumentUpdaterManager = {}), - './DiffGenerator': (this.DiffGenerator = {}), - }, - }) - this.callback = sinon.stub() - this.from = new Date() - this.to = new Date(Date.now() + 10000) - this.project_id = 'mock-project-id' - return (this.doc_id = 'mock-doc-id') - }) - - describe('getLatestDocAndUpdates', function () { - beforeEach(function () { - this.content = 'hello world' - this.version = 42 - this.updates = ['mock-update-1', 'mock-update-2'] - - this.DocumentUpdaterManager.getDocument = sinon - .stub() - .callsArgWith(2, null, this.content, this.version) - return (this.UpdatesManager.getDocUpdatesWithUserInfo = sinon - .stub() - .callsArgWith(3, null, this.updates)) - }) - - describe('with a fromVersion', function () { - beforeEach(function () { - return this.DiffManager.getLatestDocAndUpdates( - this.project_id, - this.doc_id, - this.from, - this.callback - ) - }) - - it('should get the latest version of the doc', function () { - return this.DocumentUpdaterManager.getDocument - .calledWith(this.project_id, this.doc_id) - .should.equal(true) - }) - - it('should get the latest updates', function () { - return this.UpdatesManager.getDocUpdatesWithUserInfo - .calledWith(this.project_id, this.doc_id, { from: this.from }) - .should.equal(true) - }) - - return it('should call the callback with the content, version and updates', function () { - return this.callback - .calledWith(null, this.content, this.version, this.updates) - .should.equal(true) - }) - }) - - return describe('with no fromVersion', function () { - beforeEach(function () { - return this.DiffManager.getLatestDocAndUpdates( - this.project_id, - this.doc_id, - null, - this.callback - ) - }) - - it('should get the latest version of the doc', function () { - return this.DocumentUpdaterManager.getDocument - .calledWith(this.project_id, this.doc_id) - .should.equal(true) - }) - - it('should not get the latest updates', function () { - return this.UpdatesManager.getDocUpdatesWithUserInfo.called.should.equal( - false - ) - }) - - return it('should call the callback with the content, version and blank updates', function () { - return this.callback - .calledWith(null, this.content, this.version, []) - .should.equal(true) - }) - }) - }) - - describe('getDiff', function () { - beforeEach(function () { - this.content = 'hello world' - // Op versions are the version they were applied to, so doc is always one version - // ahead.s - this.version = 43 - this.updates = [ - { - op: 'mock-4', - v: 42, - meta: { start_ts: new Date(this.to.getTime() + 20) }, - }, - { - op: 'mock-3', - v: 41, - meta: { start_ts: new Date(this.to.getTime() + 10) }, - }, - { - op: 'mock-2', - v: 40, - meta: { start_ts: new Date(this.to.getTime() - 10) }, - }, - { - op: 'mock-1', - v: 39, - meta: { start_ts: new Date(this.to.getTime() - 20) }, - }, - ] - this.fromVersion = 39 - this.toVersion = 40 - this.diffed_updates = this.updates.slice(2) - this.rewound_content = 'rewound-content' - return (this.diff = [{ u: 'mock-diff' }]) - }) - - describe('with matching versions', function () { - beforeEach(function () { - this.DiffManager.getDocumentBeforeVersion = sinon - .stub() - .callsArgWith(3, null, this.rewound_content, this.updates) - this.DiffGenerator.buildDiff = sinon.stub().returns(this.diff) - return this.DiffManager.getDiff( - this.project_id, - this.doc_id, - this.fromVersion, - this.toVersion, - this.callback - ) - }) - - it('should get the latest doc and version with all recent updates', function () { - return this.DiffManager.getDocumentBeforeVersion - .calledWith(this.project_id, this.doc_id, this.fromVersion) - .should.equal(true) - }) - - it('should generate the diff', function () { - return this.DiffGenerator.buildDiff - .calledWith( - this.rewound_content, - this.diffed_updates.slice().reverse() - ) - .should.equal(true) - }) - - return it('should call the callback with the diff', function () { - return this.callback.calledWith(null, this.diff).should.equal(true) - }) - }) - - describe('when the updates are inconsistent', function () { - beforeEach(function () { - this.DiffManager.getLatestDocAndUpdates = sinon - .stub() - .callsArgWith(3, null, this.content, this.version, this.updates) - this.DiffGenerator.buildDiff = sinon - .stub() - .throws((this.error = new Error('inconsistent!'))) - this.DiffGenerator.rewindUpdates = sinon.stub() - this.DiffManager.getDiff( - this.project_id, - this.doc_id, - this.fromVersion, - this.toVersion, - this.callback - ) - }) - - it('should call the callback with an error', function () { - this.callback.calledWith(sinon.match(Error)).should.equal(true) - const errorObj = this.callback.args[0][0] - expect(errorObj.message).to.include('inconsistent!') - }) - }) - }) - - describe('getDocumentBeforeVersion', function () { - beforeEach(function () { - this.DiffManager._tryGetDocumentBeforeVersion = sinon.stub() - this.document = 'mock-documents' - return (this.rewound_updates = 'mock-rewound-updates') - }) - - describe('succesfully', function () { - beforeEach(function () { - this.DiffManager._tryGetDocumentBeforeVersion.yields( - null, - this.document, - this.rewound_updates - ) - return this.DiffManager.getDocumentBeforeVersion( - this.project_id, - this.doc_id, - this.version, - this.callback - ) - }) - - it('should call _tryGetDocumentBeforeVersion', function () { - return this.DiffManager._tryGetDocumentBeforeVersion - .calledWith(this.project_id, this.doc_id, this.version) - .should.equal(true) - }) - - return it('should call the callback with the response', function () { - return this.callback - .calledWith(null, this.document, this.rewound_updates) - .should.equal(true) - }) - }) - - describe('with a retry needed', function () { - beforeEach(function () { - let retried = false - this.DiffManager._tryGetDocumentBeforeVersion = ( - projectId, - docId, - version, - callback - ) => { - if (!retried) { - retried = true - const error = new Error() - error.retry = true - return callback(error) - } else { - return callback(null, this.document, this.rewound_updates) - } - } - sinon.spy(this.DiffManager, '_tryGetDocumentBeforeVersion') - return this.DiffManager.getDocumentBeforeVersion( - this.project_id, - this.doc_id, - this.version, - this.callback - ) - }) - - it('should call _tryGetDocumentBeforeVersion twice', function () { - return this.DiffManager._tryGetDocumentBeforeVersion.calledTwice.should.equal( - true - ) - }) - - return it('should call the callback with the response', function () { - return this.callback - .calledWith(null, this.document, this.rewound_updates) - .should.equal(true) - }) - }) - - describe('with a non-retriable error', function () { - beforeEach(function () { - this.error = new Error('oops') - this.DiffManager._tryGetDocumentBeforeVersion.yields(this.error) - return this.DiffManager.getDocumentBeforeVersion( - this.project_id, - this.doc_id, - this.version, - this.callback - ) - }) - - it('should call _tryGetDocumentBeforeVersion once', function () { - return this.DiffManager._tryGetDocumentBeforeVersion.calledOnce.should.equal( - true - ) - }) - - return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - - return describe('when retry limit is matched', function () { - beforeEach(function () { - this.error = new Error('oops') - this.error.retry = true - this.DiffManager._tryGetDocumentBeforeVersion.yields(this.error) - return this.DiffManager.getDocumentBeforeVersion( - this.project_id, - this.doc_id, - this.version, - this.callback - ) - }) - - it('should call _tryGetDocumentBeforeVersion three times (max retries)', function () { - return this.DiffManager._tryGetDocumentBeforeVersion.calledThrice.should.equal( - true - ) - }) - - return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - }) - - return describe('_tryGetDocumentBeforeVersion', function () { - beforeEach(function () { - this.content = 'hello world' - // Op versions are the version they were applied to, so doc is always one version - // ahead.s - this.version = 43 - this.updates = [ - { - op: 'mock-4', - v: 42, - meta: { start_ts: new Date(this.to.getTime() + 20) }, - }, - { - op: 'mock-3', - v: 41, - meta: { start_ts: new Date(this.to.getTime() + 10) }, - }, - { - op: 'mock-2', - v: 40, - meta: { start_ts: new Date(this.to.getTime() - 10) }, - }, - { - op: 'mock-1', - v: 39, - meta: { start_ts: new Date(this.to.getTime() - 20) }, - }, - ] - this.fromVersion = 39 - this.rewound_content = 'rewound-content' - return (this.diff = [{ u: 'mock-diff' }]) - }) - - describe('with matching versions', function () { - beforeEach(function () { - this.DiffManager.getLatestDocAndUpdates = sinon - .stub() - .callsArgWith(3, null, this.content, this.version, this.updates) - this.DiffGenerator.rewindUpdates = sinon.spy((content, updates) => { - // the rewindUpdates method reverses the 'updates' array - updates.reverse() - return this.rewound_content - }) - this.rewindUpdatesWithArgs = this.DiffGenerator.rewindUpdates.withArgs( - this.content, - this.updates.slice().reverse() - ) - return this.DiffManager._tryGetDocumentBeforeVersion( - this.project_id, - this.doc_id, - this.fromVersion, - this.callback - ) - }) - - it('should get the latest doc and version with all recent updates', function () { - return this.DiffManager.getLatestDocAndUpdates - .calledWith(this.project_id, this.doc_id, this.fromVersion) - .should.equal(true) - }) - - it('should rewind the diff', function () { - return sinon.assert.calledOnce(this.rewindUpdatesWithArgs) - }) - - return it('should call the callback with the rewound document and updates', function () { - return this.callback - .calledWith(null, this.rewound_content, this.updates) - .should.equal(true) - }) - }) - - describe('with mismatching versions', function () { - beforeEach(function () { - this.version = 50 - this.updates = [ - { op: 'mock-1', v: 40 }, - { op: 'mock-1', v: 39 }, - ] - this.DiffManager.getLatestDocAndUpdates = sinon - .stub() - .callsArgWith(3, null, this.content, this.version, this.updates) - return this.DiffManager._tryGetDocumentBeforeVersion( - this.project_id, - this.doc_id, - this.fromVersion, - this.callback - ) - }) - - return it('should call the callback with an error with retry = true set', function () { - this.callback.calledOnce.should.equal(true) - const error = this.callback.args[0][0] - return expect(error.retry).to.equal(true) - }) - }) - - return describe('when the updates are inconsistent', function () { - beforeEach(function () { - this.DiffManager.getLatestDocAndUpdates = sinon - .stub() - .callsArgWith(3, null, this.content, this.version, this.updates) - this.DiffGenerator.rewindUpdates = sinon - .stub() - .throws((this.error = new Error('inconsistent!'))) - return this.DiffManager.getDocumentBeforeVersion( - this.project_id, - this.doc_id, - this.fromVersion, - this.callback - ) - }) - - return it('should call the callback with an error', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/DocArchive/MongoAWS.js b/services/track-changes/test/unit/js/DocArchive/MongoAWS.js deleted file mode 100644 index 1fcced3d43..0000000000 --- a/services/track-changes/test/unit/js/DocArchive/MongoAWS.js +++ /dev/null @@ -1,110 +0,0 @@ -/* eslint-disable - no-return-assign, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const modulePath = '../../../../app/js/MongoAWS.js' -const SandboxedModule = require('sandboxed-module') -const { ObjectId } = require('mongodb') -const MemoryStream = require('memorystream') -const zlib = require('zlib') - -describe('MongoAWS', function () { - beforeEach(function () { - this.MongoAWS = SandboxedModule.require(modulePath, { - singleOnly: true, - requires: { - '@overleaf/settings': (this.settings = { - trackchanges: { - s3: { - secret: 's3-secret', - key: 's3-key', - }, - stores: { - doc_history: 's3-bucket', - }, - }, - }), - child_process: (this.child_process = {}), - 'mongo-uri': (this.mongouri = {}), - 'aws-sdk': (this.awssdk = {}), - fs: (this.fs = {}), - 's3-streams': (this.S3S = {}), - './mongodb': { db: (this.db = {}), ObjectId }, - JSONStream: (this.JSONStream = {}), - 'readline-stream': (this.readline = sinon.stub()), - '@overleaf/metrics': { inc() {} }, - }, - }) - - this.project_id = ObjectId().toString() - this.doc_id = ObjectId().toString() - this.pack_id = ObjectId() - this.update = { v: 123 } - return (this.callback = sinon.stub()) - }) - - describe('archivePack', function () { - beforeEach(function (done) { - this.awssdk.config = { update: sinon.stub() } - this.awssdk.S3 = sinon.stub() - this.S3S.WriteStream = () => MemoryStream.createWriteStream() - this.db.docHistory = {} - this.db.docHistory.findOne = sinon - .stub() - .callsArgWith(1, null, { pack: 'hello' }) - - return this.MongoAWS.archivePack( - this.project_id, - this.doc_id, - this.pack_id, - (err, result) => { - if (err) return done(err) - this.callback() - return done() - } - ) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - return describe('unArchivePack', function () { - beforeEach(function (done) { - return zlib.gzip('{"pack":"123"}', (err, zbuf) => { - if (err) return done(err) - this.awssdk.config = { update: sinon.stub() } - this.awssdk.S3 = sinon.stub() - this.S3S.ReadStream = () => - MemoryStream.createReadStream(zbuf, { readable: true }) - this.db.docHistory = {} - this.db.docHistory.insertOne = sinon - .stub() - .yields(null, { insertedId: ObjectId() }) - - return this.MongoAWS.unArchivePack( - this.project_id, - this.doc_id, - this.pack_id, - (err, result) => { - if (err) return done(err) - this.callback() - return done() - } - ) - }) - }) - - return it('should call db.docHistory.insert', function () { - return this.db.docHistory.insertOne.called.should.equal(true) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js b/services/track-changes/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js deleted file mode 100644 index 1f6fae8651..0000000000 --- a/services/track-changes/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js +++ /dev/null @@ -1,218 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/DocumentUpdaterManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('DocumentUpdaterManager', function () { - beforeEach(function () { - this.DocumentUpdaterManager = SandboxedModule.require(modulePath, { - requires: { - request: (this.request = {}), - '@overleaf/settings': (this.settings = { - apis: { documentupdater: { url: 'http://example.com' } }, - }), - }, - }) - this.callback = sinon.stub() - this.lines = ['one', 'two', 'three'] - return (this.version = 42) - }) - - describe('getDocument', function () { - describe('successfully', function () { - beforeEach(function () { - this.body = JSON.stringify({ - lines: this.lines, - version: this.version, - ops: [], - }) - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 200 }, this.body) - return this.DocumentUpdaterManager.getDocument( - this.project_id, - this.doc_id, - this.callback - ) - }) - - it('should get the document from the document updater', function () { - const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}` - return this.request.get.calledWith(url).should.equal(true) - }) - - return it('should call the callback with the content and version', function () { - return this.callback - .calledWith(null, this.lines.join('\n'), this.version) - .should.equal(true) - }) - }) - - describe('when the document updater API returns an error', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith( - 1, - (this.error = new Error('something went wrong')), - null, - null - ) - return this.DocumentUpdaterManager.getDocument( - this.project_id, - this.doc_id, - this.callback - ) - }) - - return it('should return an error to the callback', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - - describe('when the document updater returns not found', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 404 }, '') - return this.DocumentUpdaterManager.getDocument( - this.project_id, - this.doc_id, - this.callback - ) - }) - - it('should return the callback with a "not found" error', function () { - return this.callback - .calledWith(sinon.match.has('message', 'doc not found')) - .should.equal(true) - }) - }) - - return describe('when the document updater returns a failure error code', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 500 }, '') - return this.DocumentUpdaterManager.getDocument( - this.project_id, - this.doc_id, - this.callback - ) - }) - - return it('should return the callback with an error', function () { - return this.callback - .calledWith( - sinon.match.has( - 'message', - 'doc updater returned a non-success status code: 500' - ) - ) - .should.equal(true) - }) - }) - }) - - return describe('setDocument', function () { - beforeEach(function () { - this.content = 'mock content' - return (this.user_id = 'user-id-123') - }) - - describe('successfully', function () { - beforeEach(function () { - this.request.post = sinon - .stub() - .callsArgWith(1, null, { statusCode: 200 }) - return this.DocumentUpdaterManager.setDocument( - this.project_id, - this.doc_id, - this.content, - this.user_id, - this.callback - ) - }) - - it('should set the document in the document updater', function () { - const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}` - return this.request.post - .calledWith({ - url, - json: { - lines: this.content.split('\n'), - source: 'restore', - user_id: this.user_id, - undoing: true, - }, - }) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.calledWith(null).should.equal(true) - }) - }) - - describe('when the document updater API returns an error', function () { - beforeEach(function () { - this.request.post = sinon - .stub() - .callsArgWith( - 1, - (this.error = new Error('something went wrong')), - null, - null - ) - return this.DocumentUpdaterManager.setDocument( - this.project_id, - this.doc_id, - this.content, - this.user_id, - this.callback - ) - }) - - return it('should return an error to the callback', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - - return describe('when the document updater returns a failure error code', function () { - beforeEach(function () { - this.request.post = sinon - .stub() - .callsArgWith(1, null, { statusCode: 500 }, '') - return this.DocumentUpdaterManager.setDocument( - this.project_id, - this.doc_id, - this.content, - this.user_id, - this.callback - ) - }) - - return it('should return the callback with an error', function () { - return this.callback - .calledWith( - sinon.match.has( - 'message', - 'doc updater returned a non-success status code: 500' - ) - ) - .should.equal(true) - }) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/HttpController/HttpControllerTests.js b/services/track-changes/test/unit/js/HttpController/HttpControllerTests.js deleted file mode 100644 index ff28975b30..0000000000 --- a/services/track-changes/test/unit/js/HttpController/HttpControllerTests.js +++ /dev/null @@ -1,200 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/HttpController.js' -const SandboxedModule = require('sandboxed-module') - -describe('HttpController', function () { - beforeEach(function () { - this.HttpController = SandboxedModule.require(modulePath, { - singleOnly: true, - requires: { - './UpdatesManager': (this.UpdatesManager = {}), - './DiffManager': (this.DiffManager = {}), - './RestoreManager': (this.RestoreManager = {}), - './PackManager': (this.PackManager = {}), - './DocArchiveManager': (this.DocArchiveManager = {}), - './HealthChecker': (this.HealthChecker = {}), - './ZipManager': (this.ZipManager = {}), - }, - }) - this.doc_id = 'doc-id-123' - this.project_id = 'project-id-123' - this.next = sinon.stub() - this.user_id = 'mock-user-123' - return (this.now = Date.now()) - }) - - describe('flushDoc', function () { - beforeEach(function () { - this.req = { - params: { - doc_id: this.doc_id, - project_id: this.project_id, - }, - } - this.res = { sendStatus: sinon.stub() } - this.UpdatesManager.processUncompressedUpdatesWithLock = sinon - .stub() - .callsArg(2) - return this.HttpController.flushDoc(this.req, this.res, this.next) - }) - - it('should process the updates', function () { - return this.UpdatesManager.processUncompressedUpdatesWithLock - .calledWith(this.project_id, this.doc_id) - .should.equal(true) - }) - - return it('should return a success code', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) - }) - }) - - describe('flushProject', function () { - beforeEach(function () { - this.req = { - params: { - project_id: this.project_id, - }, - } - this.res = { sendStatus: sinon.stub() } - this.UpdatesManager.processUncompressedUpdatesForProject = sinon - .stub() - .callsArg(1) - return this.HttpController.flushProject(this.req, this.res, this.next) - }) - - it('should process the updates', function () { - return this.UpdatesManager.processUncompressedUpdatesForProject - .calledWith(this.project_id) - .should.equal(true) - }) - - return it('should return a success code', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) - }) - }) - - describe('getDiff', function () { - beforeEach(function () { - this.from = 42 - this.to = 45 - this.req = { - params: { - doc_id: this.doc_id, - project_id: this.project_id, - }, - query: { - from: this.from.toString(), - to: this.to.toString(), - }, - } - this.res = { json: sinon.stub() } - this.diff = [{ u: 'mock-diff' }] - this.DiffManager.getDiff = sinon.stub().callsArgWith(4, null, this.diff) - return this.HttpController.getDiff(this.req, this.res, this.next) - }) - - it('should get the diff', function () { - return this.DiffManager.getDiff - .calledWith( - this.project_id, - this.doc_id, - parseInt(this.from, 10), - parseInt(this.to, 10) - ) - .should.equal(true) - }) - - return it('should return the diff', function () { - return this.res.json.calledWith({ diff: this.diff }).should.equal(true) - }) - }) - - describe('getUpdates', function () { - beforeEach(function () { - this.before = Date.now() - this.nextBeforeTimestamp = this.before - 100 - this.min_count = 10 - this.req = { - params: { - project_id: this.project_id, - }, - query: { - before: this.before.toString(), - min_count: this.min_count.toString(), - }, - } - this.res = { json: sinon.stub() } - this.updates = ['mock-summarized-updates'] - this.UpdatesManager.getSummarizedProjectUpdates = sinon - .stub() - .callsArgWith(2, null, this.updates, this.nextBeforeTimestamp) - return this.HttpController.getUpdates(this.req, this.res, this.next) - }) - - it('should get the updates', function () { - return this.UpdatesManager.getSummarizedProjectUpdates - .calledWith(this.project_id, { - before: this.before, - min_count: this.min_count, - }) - .should.equal(true) - }) - - return it('should return the formatted updates', function () { - return this.res.json - .calledWith({ - updates: this.updates, - nextBeforeTimestamp: this.nextBeforeTimestamp, - }) - .should.equal(true) - }) - }) - - return describe('RestoreManager', function () { - beforeEach(function () { - this.version = '42' - this.req = { - params: { - doc_id: this.doc_id, - project_id: this.project_id, - version: this.version, - }, - headers: { - 'x-user-id': this.user_id, - }, - } - this.res = { sendStatus: sinon.stub() } - - this.RestoreManager.restoreToBeforeVersion = sinon.stub().callsArg(4) - return this.HttpController.restore(this.req, this.res, this.next) - }) - - it('should restore the document', function () { - return this.RestoreManager.restoreToBeforeVersion - .calledWith( - this.project_id, - this.doc_id, - parseInt(this.version, 10), - this.user_id - ) - .should.equal(true) - }) - - return it('should return a success code', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/LockManager/LockManagerTests.js b/services/track-changes/test/unit/js/LockManager/LockManagerTests.js deleted file mode 100644 index d99b3fcb83..0000000000 --- a/services/track-changes/test/unit/js/LockManager/LockManagerTests.js +++ /dev/null @@ -1,315 +0,0 @@ -/* eslint-disable - mocha/no-nested-tests, - no-return-assign, - no-undef, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/LockManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('LockManager', function () { - beforeEach(function () { - this.Settings = { - redis: { - lock: {}, - }, - } - this.LockManager = SandboxedModule.require(modulePath, { - requires: { - '@overleaf/redis-wrapper': { - createClient: () => { - return (this.rclient = { auth: sinon.stub() }) - }, - }, - '@overleaf/settings': this.Settings, - }, - }) - - this.key = 'lock-key' - return (this.callback = sinon.stub()) - }) - - describe('checkLock', function () { - describe('when the lock is taken', function () { - beforeEach(function () { - this.rclient.exists = sinon.stub().callsArgWith(1, null, '1') - return this.LockManager.checkLock(this.key, this.callback) - }) - - it('should check the lock in redis', function () { - return this.rclient.exists.calledWith(this.key).should.equal(true) - }) - - return it('should return the callback with false', function () { - return this.callback.calledWith(null, false).should.equal(true) - }) - }) - - return describe('when the lock is free', function () { - beforeEach(function () { - this.rclient.exists = sinon.stub().callsArgWith(1, null, '0') - return this.LockManager.checkLock(this.key, this.callback) - }) - - return it('should return the callback with true', function () { - return this.callback.calledWith(null, true).should.equal(true) - }) - }) - }) - - describe('tryLock', function () { - describe('when the lock is taken', function () { - beforeEach(function () { - this.rclient.set = sinon.stub().callsArgWith(5, null, null) - this.LockManager.randomLock = sinon - .stub() - .returns('locked-random-value') - return this.LockManager.tryLock(this.key, this.callback) - }) - - it('should check the lock in redis', function () { - return this.rclient.set - .calledWith( - this.key, - 'locked-random-value', - 'EX', - this.LockManager.LOCK_TTL, - 'NX' - ) - .should.equal(true) - }) - - return it('should return the callback with false', function () { - return this.callback.calledWith(null, false).should.equal(true) - }) - }) - - return describe('when the lock is free', function () { - beforeEach(function () { - this.rclient.set = sinon.stub().callsArgWith(5, null, 'OK') - return this.LockManager.tryLock(this.key, this.callback) - }) - - return it('should return the callback with true', function () { - return this.callback.calledWith(null, true).should.equal(true) - }) - }) - }) - - describe('deleteLock', function () { - return beforeEach(function () { - beforeEach(function () { - this.rclient.del = sinon.stub().callsArg(1) - return this.LockManager.deleteLock(this.key, this.callback) - }) - - it('should delete the lock in redis', function () { - return this.rclient.del.calledWith(key).should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - - describe('getLock', function () { - describe('when the lock is not taken', function () { - beforeEach(function (done) { - this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, true) - return this.LockManager.getLock(this.key, (...args) => { - this.callback(...Array.from(args || [])) - return done() - }) - }) - - it('should try to get the lock', function () { - return this.LockManager.tryLock.calledWith(this.key).should.equal(true) - }) - - it('should only need to try once', function () { - return this.LockManager.tryLock.callCount.should.equal(1) - }) - - return it('should return the callback', function () { - return this.callback.calledWith(null).should.equal(true) - }) - }) - - describe('when the lock is initially set', function () { - beforeEach(function (done) { - const startTime = Date.now() - this.LockManager.LOCK_TEST_INTERVAL = 5 - this.LockManager.tryLock = function (docId, callback) { - if (callback == null) { - callback = function () {} - } - if (Date.now() - startTime < 100) { - return callback(null, false) - } else { - return callback(null, true) - } - } - sinon.spy(this.LockManager, 'tryLock') - - return this.LockManager.getLock(this.key, (...args) => { - this.callback(...Array.from(args || [])) - return done() - }) - }) - - it('should call tryLock multiple times until free', function () { - return (this.LockManager.tryLock.callCount > 1).should.equal(true) - }) - - return it('should return the callback', function () { - return this.callback.calledWith(null).should.equal(true) - }) - }) - - return describe('when the lock times out', function () { - beforeEach(function (done) { - const time = Date.now() - this.LockManager.MAX_LOCK_WAIT_TIME = 5 - this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) - return this.LockManager.getLock(this.key, (...args) => { - this.callback(...Array.from(args || [])) - return done() - }) - }) - - return it('should return the callback with an error', function () { - return this.callback - .calledWith(sinon.match.instanceOf(Error)) - .should.equal(true) - }) - }) - }) - - return describe('runWithLock', function () { - describe('with successful run', function () { - beforeEach(function () { - this.runner = function (releaseLock) { - if (releaseLock == null) { - releaseLock = function () {} - } - return releaseLock() - } - sinon.spy(this, 'runner') - this.LockManager.getLock = sinon.stub().callsArg(1) - this.LockManager.releaseLock = sinon.stub().callsArg(2) - return this.LockManager.runWithLock( - this.key, - this.runner, - this.callback - ) - }) - - it('should get the lock', function () { - return this.LockManager.getLock.calledWith(this.key).should.equal(true) - }) - - it('should run the passed function', function () { - return this.runner.called.should.equal(true) - }) - - it('should release the lock', function () { - return this.LockManager.releaseLock - .calledWith(this.key) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('when the runner function returns an error', function () { - beforeEach(function () { - this.error = new Error('oops') - this.runner = releaseLock => { - if (releaseLock == null) { - releaseLock = function () {} - } - return releaseLock(this.error) - } - sinon.spy(this, 'runner') - this.LockManager.getLock = sinon.stub().callsArg(1) - this.LockManager.releaseLock = sinon.stub().callsArg(2) - return this.LockManager.runWithLock( - this.key, - this.runner, - this.callback - ) - }) - - it('should release the lock', function () { - return this.LockManager.releaseLock - .calledWith(this.key) - .should.equal(true) - }) - - return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - - return describe('releaseLock', function () { - describe('when the lock is current', function () { - beforeEach(function () { - this.rclient.eval = sinon.stub().yields(null, 1) - return this.LockManager.releaseLock( - this.key, - this.lockValue, - this.callback - ) - }) - - it('should clear the data from redis', function () { - return this.rclient.eval - .calledWith( - this.LockManager.unlockScript, - 1, - this.key, - this.lockValue - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - return describe('when the lock has expired', function () { - beforeEach(function () { - this.rclient.eval = sinon.stub().yields(null, 0) - return this.LockManager.releaseLock( - this.key, - this.lockValue, - this.callback - ) - }) - - return it('should return an error if the lock has expired', function () { - return this.callback - .calledWith( - sinon.match.has('message', 'tried to release timed out lock') - ) - .should.equal(true) - }) - }) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/MongoManager/MongoManagerTests.js b/services/track-changes/test/unit/js/MongoManager/MongoManagerTests.js deleted file mode 100644 index 6e560e7ab8..0000000000 --- a/services/track-changes/test/unit/js/MongoManager/MongoManagerTests.js +++ /dev/null @@ -1,239 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/MongoManager.js' -const packModulePath = '../../../../app/js/PackManager.js' -const SandboxedModule = require('sandboxed-module') -const { ObjectId } = require('mongodb') -const tk = require('timekeeper') - -describe('MongoManager', function () { - beforeEach(function () { - tk.freeze(new Date()) - this.MongoManager = SandboxedModule.require(modulePath, { - requires: { - './mongodb': { db: (this.db = {}), ObjectId }, - './PackManager': (this.PackManager = {}), - '@overleaf/metrics': { timeAsyncMethod() {} }, - }, - }) - this.callback = sinon.stub() - this.doc_id = ObjectId().toString() - return (this.project_id = ObjectId().toString()) - }) - - afterEach(function () { - return tk.reset() - }) - - describe('getLastCompressedUpdate', function () { - beforeEach(function () { - this.update = 'mock-update' - this.db.docHistory = {} - this.db.docHistory.find = sinon.stub().returns(this.db.docHistory) - this.db.docHistory.findOne = sinon.stub().returns(this.db.docHistory) - this.db.docHistory.sort = sinon.stub().returns(this.db.docHistory) - this.db.docHistory.limit = sinon.stub().returns(this.db.docHistory) - this.db.docHistory.toArray = sinon - .stub() - .callsArgWith(0, null, [this.update]) - - return this.MongoManager.getLastCompressedUpdate( - this.doc_id, - this.callback - ) - }) - - it('should find the updates for the doc', function () { - return this.db.docHistory.find - .calledWith({ doc_id: ObjectId(this.doc_id) }) - .should.equal(true) - }) - - it('should limit to one result', function () { - return this.db.docHistory.limit.calledWith(1).should.equal(true) - }) - - it('should sort in descending version order', function () { - return this.db.docHistory.sort.calledWith({ v: -1 }).should.equal(true) - }) - - return it('should call the call back with the update', function () { - return this.callback.calledWith(null, this.update).should.equal(true) - }) - }) - - describe('peekLastCompressedUpdate', function () { - describe('when there is no last update', function () { - beforeEach(function () { - this.PackManager.getLastPackFromIndex = sinon - .stub() - .callsArgWith(1, null, null) - this.MongoManager.getLastCompressedUpdate = sinon - .stub() - .callsArgWith(1, null, null) - return this.MongoManager.peekLastCompressedUpdate( - this.doc_id, - this.callback - ) - }) - - it('should get the last update', function () { - return this.MongoManager.getLastCompressedUpdate - .calledWith(this.doc_id) - .should.equal(true) - }) - - return it('should call the callback with no update', function () { - return this.callback.calledWith(null, null).should.equal(true) - }) - }) - - describe('when there is an update', function () { - beforeEach(function () { - this.update = { _id: Object() } - this.MongoManager.getLastCompressedUpdate = sinon - .stub() - .callsArgWith(1, null, this.update) - return this.MongoManager.peekLastCompressedUpdate( - this.doc_id, - this.callback - ) - }) - - it('should get the last update', function () { - return this.MongoManager.getLastCompressedUpdate - .calledWith(this.doc_id) - .should.equal(true) - }) - - return it('should call the callback with the update', function () { - return this.callback.calledWith(null, this.update).should.equal(true) - }) - }) - - return describe('when there is a last update in S3', function () { - beforeEach(function () { - this.update = { _id: Object(), v: 12345, v_end: 12345, inS3: true } - this.PackManager.getLastPackFromIndex = sinon - .stub() - .callsArgWith(1, null, this.update) - this.MongoManager.getLastCompressedUpdate = sinon - .stub() - .callsArgWith(1, null) - return this.MongoManager.peekLastCompressedUpdate( - this.doc_id, - this.callback - ) - }) - - it('should get the last update', function () { - return this.MongoManager.getLastCompressedUpdate - .calledWith(this.doc_id) - .should.equal(true) - }) - - return it('should call the callback with a null update and the correct version', function () { - return this.callback - .calledWith(null, null, this.update.v_end) - .should.equal(true) - }) - }) - }) - - describe('backportProjectId', function () { - beforeEach(function () { - this.db.docHistory = { updateMany: sinon.stub().yields() } - return this.MongoManager.backportProjectId( - this.project_id, - this.doc_id, - this.callback - ) - }) - - it("should insert the project_id into all entries for the doc_id which don't have it set", function () { - return this.db.docHistory.updateMany - .calledWith( - { - doc_id: ObjectId(this.doc_id), - project_id: { $exists: false }, - }, - { - $set: { project_id: ObjectId(this.project_id) }, - } - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('getProjectMetaData', function () { - beforeEach(function () { - this.metadata = { mock: 'metadata' } - this.db.projectHistoryMetaData = { - findOne: sinon.stub().callsArgWith(1, null, this.metadata), - } - return this.MongoManager.getProjectMetaData( - this.project_id, - this.callback - ) - }) - - it('should look up the meta data in the db', function () { - return this.db.projectHistoryMetaData.findOne - .calledWith({ project_id: ObjectId(this.project_id) }) - .should.equal(true) - }) - - return it('should return the metadata', function () { - return this.callback.calledWith(null, this.metadata).should.equal(true) - }) - }) - - return describe('setProjectMetaData', function () { - beforeEach(function () { - this.metadata = { mock: 'metadata' } - this.db.projectHistoryMetaData = { - updateOne: sinon.stub().yields(), - } - return this.MongoManager.setProjectMetaData( - this.project_id, - this.metadata, - this.callback - ) - }) - - it('should upsert the metadata into the DB', function () { - return this.db.projectHistoryMetaData.updateOne - .calledWith( - { - project_id: ObjectId(this.project_id), - }, - { - $set: this.metadata, - }, - { - upsert: true, - } - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/PackManager/PackManagerTests.js b/services/track-changes/test/unit/js/PackManager/PackManagerTests.js deleted file mode 100644 index 4f538f2109..0000000000 --- a/services/track-changes/test/unit/js/PackManager/PackManagerTests.js +++ /dev/null @@ -1,704 +0,0 @@ -/* eslint-disable - mocha/no-identical-title, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { assert, expect } = require('chai') -const modulePath = '../../../../app/js/PackManager.js' -const SandboxedModule = require('sandboxed-module') -const { ObjectId } = require('mongodb') -const _ = require('underscore') - -const tk = require('timekeeper') - -describe('PackManager', function () { - beforeEach(function () { - tk.freeze(new Date()) - this.PackManager = SandboxedModule.require(modulePath, { - requires: { - bson: require('bson'), - './mongodb': { db: (this.db = {}), ObjectId }, - './LockManager': {}, - './MongoAWS': {}, - '@overleaf/metrics': { inc() {} }, - './ProjectIterator': require('../../../../app/js/ProjectIterator.js'), // Cache for speed - '@overleaf/settings': { - redis: { lock: { key_schema: {} } }, - }, - }, - }) - this.callback = sinon.stub() - this.doc_id = ObjectId().toString() - this.project_id = ObjectId().toString() - return (this.PackManager.MAX_COUNT = 512) - }) - - afterEach(function () { - return tk.reset() - }) - - describe('insertCompressedUpdates', function () { - beforeEach(function () { - this.lastUpdate = { - _id: '12345', - pack: [ - { op: 'op-1', meta: 'meta-1', v: 1 }, - { op: 'op-2', meta: 'meta-2', v: 2 }, - ], - n: 2, - sz: 100, - } - this.newUpdates = [ - { op: 'op-3', meta: 'meta-3', v: 3 }, - { op: 'op-4', meta: 'meta-4', v: 4 }, - ] - return (this.db.docHistory = { - insertOne: sinon.stub().yields(), - insert: sinon.stub().callsArg(1), - updateOne: sinon.stub().yields(), - findAndModify: sinon.stub().callsArg(1), - }) - }) - - describe('with no last update', function () { - beforeEach(function () { - this.PackManager.insertUpdatesIntoNewPack = sinon.stub().callsArg(4) - return this.PackManager.insertCompressedUpdates( - this.project_id, - this.doc_id, - null, - this.newUpdates, - true, - this.callback - ) - }) - - describe('for a small update', function () { - it('should insert the update into a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith(this.project_id, this.doc_id, this.newUpdates, true) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - return describe('for many small updates', function () { - beforeEach(function () { - this.newUpdates = __range__(0, 2048, true).map(i => ({ - op: `op-${i}`, - meta: `meta-${i}`, - v: i, - })) - return this.PackManager.insertCompressedUpdates( - this.project_id, - this.doc_id, - null, - this.newUpdates, - false, - this.callback - ) - }) - - it('should append the initial updates to the existing pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(0, 512), - false - ) - .should.equal(true) - }) - - it('should insert the first set remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(512, 1024), - false - ) - .should.equal(true) - }) - - it('should insert the second set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(1024, 1536), - false - ) - .should.equal(true) - }) - - it('should insert the third set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(1536, 2048), - false - ) - .should.equal(true) - }) - - it('should insert the final set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(2048, 2049), - false - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - - describe('with an existing pack as the last update', function () { - beforeEach(function () { - this.PackManager.appendUpdatesToExistingPack = sinon.stub().callsArg(5) - this.PackManager.insertUpdatesIntoNewPack = sinon.stub().callsArg(4) - return this.PackManager.insertCompressedUpdates( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates, - false, - this.callback - ) - }) - - describe('for a small update', function () { - it('should append the update to the existing pack', function () { - return this.PackManager.appendUpdatesToExistingPack - .calledWith( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates, - false - ) - .should.equal(true) - }) - it('should not insert any new packs', function () { - return this.PackManager.insertUpdatesIntoNewPack.called.should.equal( - false - ) - }) - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('for many small updates', function () { - beforeEach(function () { - this.newUpdates = __range__(0, 2048, true).map(i => ({ - op: `op-${i}`, - meta: `meta-${i}`, - v: i, - })) - return this.PackManager.insertCompressedUpdates( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates, - false, - this.callback - ) - }) - - it('should append the initial updates to the existing pack', function () { - return this.PackManager.appendUpdatesToExistingPack - .calledWith( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates.slice(0, 510), - false - ) - .should.equal(true) - }) - - it('should insert the first set remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(510, 1022), - false - ) - .should.equal(true) - }) - - it('should insert the second set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(1022, 1534), - false - ) - .should.equal(true) - }) - - it('should insert the third set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(1534, 2046), - false - ) - .should.equal(true) - }) - - it('should insert the final set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(2046, 2049), - false - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - return describe('for many big updates', function () { - beforeEach(function () { - const longString = __range__( - 0, - 0.75 * this.PackManager.MAX_SIZE, - true - ) - .map(j => 'a') - .join('') - this.newUpdates = [0, 1, 2, 3, 4].map(i => ({ - op: `op-${i}-${longString}`, - meta: `meta-${i}`, - v: i, - })) - return this.PackManager.insertCompressedUpdates( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates, - false, - this.callback - ) - }) - - it('should append the initial updates to the existing pack', function () { - return this.PackManager.appendUpdatesToExistingPack - .calledWith( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates.slice(0, 1), - false - ) - .should.equal(true) - }) - - it('should insert the first set remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(1, 2), - false - ) - .should.equal(true) - }) - - it('should insert the second set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(2, 3), - false - ) - .should.equal(true) - }) - - it('should insert the third set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(3, 4), - false - ) - .should.equal(true) - }) - - it('should insert the final set of remaining updates as a new pack', function () { - return this.PackManager.insertUpdatesIntoNewPack - .calledWith( - this.project_id, - this.doc_id, - this.newUpdates.slice(4, 5), - false - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - - describe('flushCompressedUpdates', function () { - return describe('when there is no previous update', function () { - beforeEach(function () { - return this.PackManager.flushCompressedUpdates( - this.project_id, - this.doc_id, - null, - this.newUpdates, - true, - this.callback - ) - }) - - return describe('for a small update that will expire', function () { - it('should insert the update into mongo', function () { - return this.db.docHistory.insertOne - .calledWithMatch({ - pack: this.newUpdates, - project_id: ObjectId(this.project_id), - doc_id: ObjectId(this.doc_id), - n: this.newUpdates.length, - v: this.newUpdates[0].v, - v_end: this.newUpdates[this.newUpdates.length - 1].v, - }) - .should.equal(true) - }) - - it('should set an expiry time in the future', function () { - return this.db.docHistory.insertOne - .calledWithMatch({ - expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000), - }) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - }) - - describe('when there is a recent previous update in mongo that expires', function () { - beforeEach(function () { - this.lastUpdate = { - _id: '12345', - pack: [ - { op: 'op-1', meta: 'meta-1', v: 1 }, - { op: 'op-2', meta: 'meta-2', v: 2 }, - ], - n: 2, - sz: 100, - meta: { start_ts: Date.now() - 6 * 3600 * 1000 }, - expiresAt: new Date(Date.now()), - } - - return this.PackManager.flushCompressedUpdates( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates, - true, - this.callback - ) - }) - - return describe('for a small update that will expire', function () { - it('should append the update in mongo', function () { - return this.db.docHistory.updateOne - .calledWithMatch( - { _id: this.lastUpdate._id }, - { - $push: { pack: { $each: this.newUpdates } }, - $set: { v_end: this.newUpdates[this.newUpdates.length - 1].v }, - } - ) - .should.equal(true) - }) - - it('should set an expiry time in the future', function () { - return this.db.docHistory.updateOne - .calledWithMatch(sinon.match.any, { - $set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) }, - }) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - - describe('when there is a recent previous update in mongo that expires', function () { - beforeEach(function () { - this.PackManager.updateIndex = sinon.stub().callsArg(2) - - this.lastUpdate = { - _id: '12345', - pack: [ - { op: 'op-1', meta: 'meta-1', v: 1 }, - { op: 'op-2', meta: 'meta-2', v: 2 }, - ], - n: 2, - sz: 100, - meta: { start_ts: Date.now() - 6 * 3600 * 1000 }, - expiresAt: new Date(Date.now()), - } - - return this.PackManager.flushCompressedUpdates( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates, - false, - this.callback - ) - }) - - return describe('for a small update that will not expire', function () { - it('should insert the update into mongo', function () { - return this.db.docHistory.insertOne - .calledWithMatch({ - pack: this.newUpdates, - project_id: ObjectId(this.project_id), - doc_id: ObjectId(this.doc_id), - n: this.newUpdates.length, - v: this.newUpdates[0].v, - v_end: this.newUpdates[this.newUpdates.length - 1].v, - }) - .should.equal(true) - }) - - it('should not set any expiry time', function () { - return this.db.docHistory.insertOne - .neverCalledWithMatch(sinon.match.has('expiresAt')) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - - return describe('when there is an old previous update in mongo', function () { - beforeEach(function () { - this.lastUpdate = { - _id: '12345', - pack: [ - { op: 'op-1', meta: 'meta-1', v: 1 }, - { op: 'op-2', meta: 'meta-2', v: 2 }, - ], - n: 2, - sz: 100, - meta: { start_ts: Date.now() - 30 * 24 * 3600 * 1000 }, - expiresAt: new Date(Date.now() - 30 * 24 * 3600 * 1000), - } - - return this.PackManager.flushCompressedUpdates( - this.project_id, - this.doc_id, - this.lastUpdate, - this.newUpdates, - true, - this.callback - ) - }) - - return describe('for a small update that will expire', function () { - it('should insert the update into mongo', function () { - return this.db.docHistory.insertOne - .calledWithMatch({ - pack: this.newUpdates, - project_id: ObjectId(this.project_id), - doc_id: ObjectId(this.doc_id), - n: this.newUpdates.length, - v: this.newUpdates[0].v, - v_end: this.newUpdates[this.newUpdates.length - 1].v, - }) - .should.equal(true) - }) - - it('should set an expiry time in the future', function () { - return this.db.docHistory.insertOne - .calledWithMatch({ - expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000), - }) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - }) - - describe('getOpsByVersionRange', function () {}) - - describe('loadPacksByVersionRange', function () {}) - - describe('fetchPacksIfNeeded', function () {}) - - describe('makeProjectIterator', function () {}) - - describe('getPackById', function () {}) - - describe('increaseTTL', function () {}) - - describe('getIndex', function () {}) - - describe('getPackFromIndex', function () {}) - // getLastPackFromIndex: - // getIndexWithKeys - // initialiseIndex - // updateIndex - // findCompletedPacks - // findUnindexedPacks - // insertPacksIntoIndexWithLock - // _insertPacksIntoIndex - // archivePack - // checkArchivedPack - // processOldPack - // updateIndexIfNeeded - // findUnarchivedPacks - - return describe('checkArchiveNotInProgress', function () { - describe('when an archive is in progress', function () { - beforeEach(function () { - this.db.docHistoryIndex = { - findOne: sinon.stub().callsArgWith(2, null, { inS3: false }), - } - return this.PackManager.checkArchiveNotInProgress( - this.project_id, - this.doc_id, - this.pack_id, - this.callback - ) - }) - it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - return it('should return an error', function () { - return this.callback - .calledWith(sinon.match.has('message')) - .should.equal(true) - }) - }) - - describe('when an archive is completed', function () { - beforeEach(function () { - this.db.docHistoryIndex = { - findOne: sinon.stub().callsArgWith(2, null, { inS3: true }), - } - return this.PackManager.checkArchiveNotInProgress( - this.project_id, - this.doc_id, - this.pack_id, - this.callback - ) - }) - it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - return it('should return an error', function () { - return this.callback - .calledWith(sinon.match.has('message')) - .should.equal(true) - }) - }) - - return describe('when the archive has not started or completed', function () { - beforeEach(function () { - this.db.docHistoryIndex = { - findOne: sinon.stub().callsArgWith(2, null, {}), - } - return this.PackManager.checkArchiveNotInProgress( - this.project_id, - this.doc_id, - this.pack_id, - this.callback - ) - }) - it('should call the callback with no error', function () { - return this.callback.called.should.equal(true) - }) - return it('should return with no error', function () { - return (typeof this.callback.lastCall.args[0]).should.equal('undefined') - }) - }) - }) -}) - -// describe "setTTLOnArchivedPack", -> -// beforeEach -> -// @pack_id = "somepackid" -// @onedayinms = 86400000 -// @db.docHistory = -// findAndModify : sinon.stub().callsArgWith(1) - -// it "should set expires to 1 day", (done)-> -// #@PackManager._getOneDayInFutureWithRandomDelay = sinon.stub().returns(@onedayinms) -// @PackManager.setTTLOnArchivedPack @project_id, @doc_id, @pack_id, => -// args = @db.docHistory.findAndModify.args[0][0] -// args.query._id.should.equal @pack_id -// args.update['$set'].expiresAt.should.equal @onedayinms -// done() - -// describe "_getOneDayInFutureWithRandomDelay", -> -// beforeEach -> -// @onedayinms = 86400000 -// @thirtyMins = 1000 * 60 * 30 - -// it "should give 1 day + 30 mins random time", (done)-> -// loops = 10000 -// while --loops > 0 -// randomDelay = @PackManager._getOneDayInFutureWithRandomDelay() - new Date(Date.now() + @onedayinms) -// randomDelay.should.be.above(0) -// randomDelay.should.be.below(@thirtyMins + 1) -// done() - -function __range__(left, right, inclusive) { - const range = [] - const ascending = left < right - const end = !inclusive ? right : ascending ? right + 1 : right - 1 - for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { - range.push(i) - } - return range -} diff --git a/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js b/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js deleted file mode 100644 index a67159e09f..0000000000 --- a/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js +++ /dev/null @@ -1,163 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/RedisManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('RedisManager', function () { - beforeEach(function () { - this.RedisManager = SandboxedModule.require(modulePath, { - requires: { - '@overleaf/redis-wrapper': { - createClient: () => { - return (this.rclient = { - auth: sinon.stub(), - multi: () => this.rclient, - }) - }, - }, - '@overleaf/settings': { - redis: { - history: { - key_schema: { - uncompressedHistoryOps({ doc_id: docId }) { - return `UncompressedHistoryOps:${docId}` - }, - docsWithHistoryOps({ project_id: projectId }) { - return `DocsWithHistoryOps:${projectId}` - }, - }, - }, - }, - }, - }, - }) - this.doc_id = 'doc-id-123' - this.project_id = 'project-id-123' - this.batchSize = 100 - return (this.callback = sinon.stub()) - }) - - describe('getOldestDocUpdates', function () { - beforeEach(function () { - this.rawUpdates = [ - { v: 42, op: 'mock-op-42' }, - { v: 45, op: 'mock-op-45' }, - ] - this.jsonUpdates = Array.from(this.rawUpdates).map(update => - JSON.stringify(update) - ) - this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonUpdates) - return this.RedisManager.getOldestDocUpdates( - this.doc_id, - this.batchSize, - this.callback - ) - }) - - it('should read the updates from redis', function () { - return this.rclient.lrange - .calledWith( - `UncompressedHistoryOps:${this.doc_id}`, - 0, - this.batchSize - 1 - ) - .should.equal(true) - }) - - it('should call the callback with the unparsed ops', function () { - return this.callback.calledWith(null, this.jsonUpdates).should.equal(true) - }) - - describe('expandDocUpdates', function () { - beforeEach(function () { - return this.RedisManager.expandDocUpdates( - this.jsonUpdates, - this.callback - ) - }) - - return it('should call the callback with the parsed ops', function () { - return this.callback - .calledWith(null, this.rawUpdates) - .should.equal(true) - }) - }) - - return describe('deleteAppliedDocUpdates', function () { - beforeEach(function () { - this.rclient.lrem = sinon.stub() - this.rclient.srem = sinon.stub() - this.rclient.exec = sinon.stub().callsArgWith(0) - return this.RedisManager.deleteAppliedDocUpdates( - this.project_id, - this.doc_id, - this.jsonUpdates, - this.callback - ) - }) - - it('should delete the first update from redis', function () { - return this.rclient.lrem - .calledWith( - `UncompressedHistoryOps:${this.doc_id}`, - 1, - this.jsonUpdates[0] - ) - .should.equal(true) - }) - - it('should delete the second update from redis', function () { - return this.rclient.lrem - .calledWith( - `UncompressedHistoryOps:${this.doc_id}`, - 1, - this.jsonUpdates[1] - ) - .should.equal(true) - }) - - it('should delete the doc from the set of docs with history ops', function () { - return this.rclient.srem - .calledWith(`DocsWithHistoryOps:${this.project_id}`, this.doc_id) - .should.equal(true) - }) - - return it('should call the callback ', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - - return describe('getDocIdsWithHistoryOps', function () { - beforeEach(function () { - this.doc_ids = ['mock-id-1', 'mock-id-2'] - this.rclient.smembers = sinon.stub().callsArgWith(1, null, this.doc_ids) - return this.RedisManager.getDocIdsWithHistoryOps( - this.project_id, - this.callback - ) - }) - - it('should read the doc_ids from redis', function () { - return this.rclient.smembers - .calledWith(`DocsWithHistoryOps:${this.project_id}`) - .should.equal(true) - }) - - return it('should call the callback with the doc_ids', function () { - return this.callback.calledWith(null, this.doc_ids).should.equal(true) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/RestoreManager/RestoreManagerTests.js b/services/track-changes/test/unit/js/RestoreManager/RestoreManagerTests.js deleted file mode 100644 index 9047946405..0000000000 --- a/services/track-changes/test/unit/js/RestoreManager/RestoreManagerTests.js +++ /dev/null @@ -1,64 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/RestoreManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('RestoreManager', function () { - beforeEach(function () { - this.RestoreManager = SandboxedModule.require(modulePath, { - requires: { - './DocumentUpdaterManager': (this.DocumentUpdaterManager = {}), - './DiffManager': (this.DiffManager = {}), - }, - }) - this.callback = sinon.stub() - this.project_id = 'mock-project-id' - this.doc_id = 'mock-doc-id' - this.user_id = 'mock-user-id' - return (this.version = 42) - }) - - return describe('restoreToBeforeVersion', function () { - beforeEach(function () { - this.content = 'mock content' - this.DocumentUpdaterManager.setDocument = sinon.stub().callsArg(4) - this.DiffManager.getDocumentBeforeVersion = sinon - .stub() - .callsArgWith(3, null, this.content) - return this.RestoreManager.restoreToBeforeVersion( - this.project_id, - this.doc_id, - this.version, - this.user_id, - this.callback - ) - }) - - it('should get the content before the requested version', function () { - return this.DiffManager.getDocumentBeforeVersion - .calledWith(this.project_id, this.doc_id, this.version) - .should.equal(true) - }) - - it('should set the document in the document updater', function () { - return this.DocumentUpdaterManager.setDocument - .calledWith(this.project_id, this.doc_id, this.content, this.user_id) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/UpdateCompressor/UpdateCompressorTests.js b/services/track-changes/test/unit/js/UpdateCompressor/UpdateCompressorTests.js deleted file mode 100644 index cd7c6c4b76..0000000000 --- a/services/track-changes/test/unit/js/UpdateCompressor/UpdateCompressorTests.js +++ /dev/null @@ -1,848 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/UpdateCompressor.js' -const SandboxedModule = require('sandboxed-module') - -const bigstring = __range__(0, 2 * 1024 * 1024, true) - .map(i => 'a') - .join('') -const mediumstring = __range__(0, 1024 * 1024, true) - .map(j => 'a') - .join('') - -describe('UpdateCompressor', function () { - beforeEach(function () { - this.UpdateCompressor = SandboxedModule.require(modulePath, { - requires: { - '../lib/diff_match_patch': require('../../../../app/lib/diff_match_patch'), - }, - }) - this.user_id = 'user-id-1' - this.other_user_id = 'user-id-2' - this.ts1 = Date.now() - return (this.ts2 = Date.now() + 1000) - }) - - describe('convertToSingleOpUpdates', function () { - it('should split grouped updates into individual updates', function () { - return expect( - this.UpdateCompressor.convertToSingleOpUpdates([ - { - op: [ - (this.op1 = { p: 0, i: 'Foo' }), - (this.op2 = { p: 6, i: 'bar' }), - ], - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: [(this.op3 = { p: 10, i: 'baz' })], - meta: { ts: this.ts2, user_id: this.other_user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: this.op1, - meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: this.op2, - meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: this.op3, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.other_user_id, - }, - v: 43, - }, - ]) - }) - - it('should return no-op updates when the op list is empty', function () { - return expect( - this.UpdateCompressor.convertToSingleOpUpdates([ - { - op: [], - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - ]) - ).to.deep.equal([ - { - op: this.UpdateCompressor.NOOP, - meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - ]) - }) - - return it('should ignore comment ops', function () { - return expect( - this.UpdateCompressor.convertToSingleOpUpdates([ - { - op: [ - (this.op1 = { p: 0, i: 'Foo' }), - (this.op2 = { p: 9, c: 'baz' }), - (this.op3 = { p: 6, i: 'bar' }), - ], - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - ]) - ).to.deep.equal([ - { - op: this.op1, - meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: this.op3, - meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - ]) - }) - }) - - describe('concatUpdatesWithSameVersion', function () { - it('should concat updates with the same version', function () { - return expect( - this.UpdateCompressor.concatUpdatesWithSameVersion([ - { - op: (this.op1 = { p: 0, i: 'Foo' }), - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: (this.op2 = { p: 6, i: 'bar' }), - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: (this.op3 = { p: 10, i: 'baz' }), - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.other_user_id, - }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: [this.op1, this.op2], - meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: [this.op3], - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.other_user_id, - }, - v: 43, - }, - ]) - }) - - return it('should turn a noop into an empty op', function () { - return expect( - this.UpdateCompressor.concatUpdatesWithSameVersion([ - { - op: this.UpdateCompressor.NOOP, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - ]) - ).to.deep.equal([ - { - op: [], - meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - ]) - }) - }) - - describe('compress', function () { - describe('insert - insert', function () { - it('should append one insert to the other', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 6, i: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'foobar' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should insert one insert inside the other', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 5, i: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'fobaro' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should not append separated inserts', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 9, i: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'foo' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 9, i: 'bar' }, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should not append inserts that are too big (second op)', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 6, i: bigstring }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'foo' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 6, i: bigstring }, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should not append inserts that are too big (first op)', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: bigstring }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 3 + bigstring.length, i: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: bigstring }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 3 + bigstring.length, i: 'bar' }, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - return it('should not append inserts that are too big (first and second op)', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: mediumstring }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 3 + mediumstring.length, i: mediumstring }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: mediumstring }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 3 + mediumstring.length, i: mediumstring }, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - }) - - describe('delete - delete', function () { - it('should append one delete to the other', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, d: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 3, d: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, d: 'foobar' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should insert one delete inside the other', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, d: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 1, d: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 1, d: 'bafoor' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - return it('should not append separated deletes', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, d: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 9, d: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, d: 'foo' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 9, d: 'bar' }, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - }) - - describe('insert - delete', function () { - it('should undo a previous insert', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 5, d: 'o' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'fo' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should remove part of an insert from the middle', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'fobaro' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 5, d: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'foo' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should cancel out two opposite updates', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 3, d: 'foo' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: '' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - it('should not combine separated updates', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foo' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 9, d: 'bar' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'foo' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 9, d: 'bar' }, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - return it('should not combine updates with overlap beyond the end', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, i: 'foobar' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 6, d: 'bardle' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: 'foobar' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 6, d: 'bardle' }, - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - }) - - describe('delete - insert', function () { - it('should do a diff of the content', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, d: 'one two three four five six seven eight' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 3, i: 'one 2 three four five six seven eight' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 7, d: 'two' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - { - op: { p: 7, i: '2' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - - return it('should return a no-op if the delete and insert are the same', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: { p: 3, d: 'one two three four five six seven eight' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 3, i: 'one two three four five six seven eight' }, - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: { p: 3, i: '' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - }) - - describe('noop - insert', function () { - return it('should leave them untouched', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: this.UpdateCompressor.NOOP, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 6, i: 'bar' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: this.UpdateCompressor.NOOP, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 6, i: 'bar' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - }) - - return describe('noop - delete', function () { - return it('should leave them untouched', function () { - return expect( - this.UpdateCompressor.compressUpdates([ - { - op: this.UpdateCompressor.NOOP, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 42, - }, - { - op: { p: 6, d: 'bar' }, - meta: { ts: this.ts1, user_id: this.user_id }, - v: 43, - }, - ]) - ).to.deep.equal([ - { - op: this.UpdateCompressor.NOOP, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: { p: 6, d: 'bar' }, - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - }) - }) - - return describe('compressRawUpdates', function () { - return describe('merging in-place with an array op', function () { - return it('should not change the existing last updates', function () { - return expect( - this.UpdateCompressor.compressRawUpdates( - { - op: [ - { p: 1000, d: 'hello' }, - { p: 1000, i: 'HELLO()' }, - ], - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - [ - { - op: [{ p: 1006, i: 'WORLD' }], - meta: { ts: this.ts2, user_id: this.user_id }, - v: 43, - }, - ] - ) - ).to.deep.equal([ - { - op: [ - { p: 1000, d: 'hello' }, - { p: 1000, i: 'HELLO()' }, - ], - meta: { - start_ts: this.ts1, - end_ts: this.ts1, - user_id: this.user_id, - }, - v: 42, - }, - { - op: [{ p: 1006, i: 'WORLD' }], - meta: { - start_ts: this.ts2, - end_ts: this.ts2, - user_id: this.user_id, - }, - v: 43, - }, - ]) - }) - }) - }) -}) - -function __range__(left, right, inclusive) { - const range = [] - const ascending = left < right - const end = !inclusive ? right : ascending ? right + 1 : right - 1 - for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { - range.push(i) - } - return range -} diff --git a/services/track-changes/test/unit/js/UpdateTrimmer/UpdateTrimmerTests.js b/services/track-changes/test/unit/js/UpdateTrimmer/UpdateTrimmerTests.js deleted file mode 100644 index a49554d9bb..0000000000 --- a/services/track-changes/test/unit/js/UpdateTrimmer/UpdateTrimmerTests.js +++ /dev/null @@ -1,182 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/UpdateTrimmer.js' -const SandboxedModule = require('sandboxed-module') -const tk = require('timekeeper') - -describe('UpdateTrimmer', function () { - beforeEach(function () { - this.now = new Date() - tk.freeze(this.now) - - this.UpdateTrimmer = SandboxedModule.require(modulePath, { - requires: { - './WebApiManager': (this.WebApiManager = {}), - './MongoManager': (this.MongoManager = {}), - }, - }) - - this.callback = sinon.stub() - return (this.project_id = 'mock-project-id') - }) - - afterEach(function () { - return tk.reset() - }) - - return describe('shouldTrimUpdates', function () { - beforeEach(function () { - this.metadata = {} - this.details = { features: {} } - this.MongoManager.getProjectMetaData = sinon - .stub() - .callsArgWith(1, null, this.metadata) - this.MongoManager.setProjectMetaData = sinon.stub().callsArgWith(2) - this.MongoManager.upgradeHistory = sinon.stub().callsArgWith(1) - return (this.WebApiManager.getProjectDetails = sinon - .stub() - .callsArgWith(1, null, this.details)) - }) - - describe('with preserveHistory set in the project meta data', function () { - beforeEach(function () { - this.metadata.preserveHistory = true - return this.UpdateTrimmer.shouldTrimUpdates( - this.project_id, - this.callback - ) - }) - - it('should look up the meta data', function () { - return this.MongoManager.getProjectMetaData - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should not look up the project details', function () { - return this.WebApiManager.getProjectDetails.called.should.equal(false) - }) - - return it('should return false', function () { - return this.callback.calledWith(null, false).should.equal(true) - }) - }) - - describe('without preserveHistory set in the project meta data', function () { - beforeEach(function () { - return (this.metadata.preserveHistory = false) - }) - - describe('when the project has the versioning feature', function () { - beforeEach(function () { - this.details.features.versioning = true - return this.UpdateTrimmer.shouldTrimUpdates( - this.project_id, - this.callback - ) - }) - - it('should look up the meta data', function () { - return this.MongoManager.getProjectMetaData - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should look up the project details', function () { - return this.WebApiManager.getProjectDetails - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should insert preserveHistory into the metadata', function () { - return this.MongoManager.setProjectMetaData - .calledWith(this.project_id, { preserveHistory: true }) - .should.equal(true) - }) - - it('should upgrade any existing history', function () { - return this.MongoManager.upgradeHistory - .calledWith(this.project_id) - .should.equal(true) - }) - - return it('should return false', function () { - return this.callback.calledWith(null, false).should.equal(true) - }) - }) - - return describe('when the project does not have the versioning feature', function () { - beforeEach(function () { - this.details.features.versioning = false - return this.UpdateTrimmer.shouldTrimUpdates( - this.project_id, - this.callback - ) - }) - - return it('should return true', function () { - return this.callback.calledWith(null, true).should.equal(true) - }) - }) - }) - - return describe('without any meta data', function () { - beforeEach(function () { - return (this.MongoManager.getProjectMetaData = sinon - .stub() - .callsArgWith(1, null, null)) - }) - - describe('when the project has the versioning feature', function () { - beforeEach(function () { - this.details.features.versioning = true - return this.UpdateTrimmer.shouldTrimUpdates( - this.project_id, - this.callback - ) - }) - - it('should insert preserveHistory into the metadata', function () { - return this.MongoManager.setProjectMetaData - .calledWith(this.project_id, { preserveHistory: true }) - .should.equal(true) - }) - - it('should upgrade any existing history', function () { - return this.MongoManager.upgradeHistory - .calledWith(this.project_id) - .should.equal(true) - }) - - return it('should return false', function () { - return this.callback.calledWith(null, false).should.equal(true) - }) - }) - - return describe('when the project does not have the versioning feature', function () { - beforeEach(function () { - this.details.features.versioning = false - return this.UpdateTrimmer.shouldTrimUpdates( - this.project_id, - this.callback - ) - }) - - return it('should return true', function () { - return this.callback.calledWith(null, true).should.equal(true) - }) - }) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js b/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js deleted file mode 100644 index 09bb213dca..0000000000 --- a/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js +++ /dev/null @@ -1,1333 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const { ObjectId } = require('mongodb') -const modulePath = '../../../../app/js/UpdatesManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('UpdatesManager', function () { - beforeEach(function () { - this.UpdatesManager = SandboxedModule.require(modulePath, { - singleOnly: true, - requires: { - './UpdateCompressor': (this.UpdateCompressor = {}), - './MongoManager': (this.MongoManager = {}), - './PackManager': (this.PackManager = {}), - './RedisManager': (this.RedisManager = {}), - './LockManager': (this.LockManager = {}), - './WebApiManager': (this.WebApiManager = {}), - './UpdateTrimmer': (this.UpdateTrimmer = {}), - './DocArchiveManager': (this.DocArchiveManager = {}), - '@overleaf/settings': { - redis: { - lock: { - key_schema: { - historyLock({ doc_id: docId }) { - return `HistoryLock:${docId}` - }, - }, - }, - }, - }, - }, - }) - this.doc_id = 'doc-id-123' - this.project_id = 'project-id-123' - this.callback = sinon.stub() - return (this.temporary = 'temp-mock') - }) - - describe('compressAndSaveRawUpdates', function () { - describe('when there are no raw ops', function () { - beforeEach(function () { - this.MongoManager.peekLastCompressedUpdate = sinon.stub() - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - [], - this.temporary, - this.callback - ) - }) - - it('should not need to access the database', function () { - return this.MongoManager.peekLastCompressedUpdate.called.should.equal( - false - ) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('when there is no compressed history to begin with', function () { - beforeEach(function () { - this.rawUpdates = [ - { v: 12, op: 'mock-op-12' }, - { v: 13, op: 'mock-op-13' }, - ] - this.compressedUpdates = [{ v: 13, op: 'compressed-op-12' }] - - this.MongoManager.peekLastCompressedUpdate = sinon - .stub() - .callsArgWith(1, null, null) - this.PackManager.insertCompressedUpdates = sinon.stub().callsArg(5) - this.UpdateCompressor.compressRawUpdates = sinon - .stub() - .returns(this.compressedUpdates) - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - this.rawUpdates, - this.temporary, - this.callback - ) - }) - - it('should look at the last compressed op', function () { - return this.MongoManager.peekLastCompressedUpdate - .calledWith(this.doc_id) - .should.equal(true) - }) - - it('should save the compressed ops as a pack', function () { - return this.PackManager.insertCompressedUpdates - .calledWith( - this.project_id, - this.doc_id, - null, - this.compressedUpdates, - this.temporary - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('when the raw ops need appending to existing history', function () { - beforeEach(function () { - this.lastCompressedUpdate = { v: 11, op: 'compressed-op-11' } - this.compressedUpdates = [ - { v: 12, op: 'compressed-op-11+12' }, - { v: 13, op: 'compressed-op-12' }, - ] - - this.MongoManager.peekLastCompressedUpdate = sinon - .stub() - .callsArgWith( - 1, - null, - this.lastCompressedUpdate, - this.lastCompressedUpdate.v - ) - this.PackManager.insertCompressedUpdates = sinon.stub().callsArg(5) - return (this.UpdateCompressor.compressRawUpdates = sinon - .stub() - .returns(this.compressedUpdates)) - }) - - describe('when the raw ops start where the existing history ends', function () { - beforeEach(function () { - this.rawUpdates = [ - { v: 12, op: 'mock-op-12' }, - { v: 13, op: 'mock-op-13' }, - ] - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - this.rawUpdates, - this.temporary, - this.callback - ) - }) - - it('should look at the last compressed op', function () { - return this.MongoManager.peekLastCompressedUpdate - .calledWith(this.doc_id) - .should.equal(true) - }) - - it('should compress the raw ops', function () { - return this.UpdateCompressor.compressRawUpdates - .calledWith(null, this.rawUpdates) - .should.equal(true) - }) - - it('should save the new compressed ops into a pack', function () { - return this.PackManager.insertCompressedUpdates - .calledWith( - this.project_id, - this.doc_id, - this.lastCompressedUpdate, - this.compressedUpdates, - this.temporary - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('when the raw ops start where the existing history ends and the history is in a pack', function () { - beforeEach(function () { - this.lastCompressedUpdate = { - pack: [{ v: 11, op: 'compressed-op-11' }], - v: 11, - } - this.rawUpdates = [ - { v: 12, op: 'mock-op-12' }, - { v: 13, op: 'mock-op-13' }, - ] - this.MongoManager.peekLastCompressedUpdate = sinon - .stub() - .callsArgWith( - 1, - null, - this.lastCompressedUpdate, - this.lastCompressedUpdate.v - ) - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - this.rawUpdates, - this.temporary, - this.callback - ) - }) - - it('should look at the last compressed op', function () { - return this.MongoManager.peekLastCompressedUpdate - .calledWith(this.doc_id) - .should.equal(true) - }) - - it('should compress the raw ops', function () { - return this.UpdateCompressor.compressRawUpdates - .calledWith(null, this.rawUpdates) - .should.equal(true) - }) - - it('should save the new compressed ops into a pack', function () { - return this.PackManager.insertCompressedUpdates - .calledWith( - this.project_id, - this.doc_id, - this.lastCompressedUpdate, - this.compressedUpdates, - this.temporary - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('when some raw ops are passed that have already been compressed', function () { - beforeEach(function () { - this.rawUpdates = [ - { v: 10, op: 'mock-op-10' }, - { v: 11, op: 'mock-op-11' }, - { v: 12, op: 'mock-op-12' }, - { v: 13, op: 'mock-op-13' }, - ] - - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - this.rawUpdates, - this.temporary, - this.callback - ) - }) - - return it('should only compress the more recent raw ops', function () { - return this.UpdateCompressor.compressRawUpdates - .calledWith(null, this.rawUpdates.slice(-2)) - .should.equal(true) - }) - }) - - describe('when the raw ops do not follow from the last compressed op version', function () { - beforeEach(function () { - this.rawUpdates = [{ v: 13, op: 'mock-op-13' }] - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - this.rawUpdates, - this.temporary, - this.callback - ) - }) - - it('should call the callback with an error', function () { - return this.callback - .calledWith( - sinon.match.has( - 'message', - 'Tried to apply raw op at version 13 to last compressed update with version 11 from unknown time' - ) - ) - .should.equal(true) - }) - - return it('should not insert any update into mongo', function () { - return this.PackManager.insertCompressedUpdates.called.should.equal( - false - ) - }) - }) - - return describe('when the raw ops are out of order', function () { - beforeEach(function () { - this.rawUpdates = [ - { v: 13, op: 'mock-op-13' }, - { v: 12, op: 'mock-op-12' }, - ] - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - this.rawUpdates, - this.temporary, - this.callback - ) - }) - - it('should call the callback with an error', function () { - return this.callback - .calledWith(sinon.match.has('message')) - .should.equal(true) - }) - - return it('should not insert any update into mongo', function () { - return this.PackManager.insertCompressedUpdates.called.should.equal( - false - ) - }) - }) - }) - - return describe('when the raw ops need appending to existing history which is in S3', function () { - beforeEach(function () { - this.lastCompressedUpdate = null - this.lastVersion = 11 - this.compressedUpdates = [{ v: 13, op: 'compressed-op-12' }] - - this.MongoManager.peekLastCompressedUpdate = sinon - .stub() - .callsArgWith(1, null, null, this.lastVersion) - this.PackManager.insertCompressedUpdates = sinon.stub().callsArg(5) - return (this.UpdateCompressor.compressRawUpdates = sinon - .stub() - .returns(this.compressedUpdates)) - }) - - return describe('when the raw ops start where the existing history ends', function () { - beforeEach(function () { - this.rawUpdates = [ - { v: 12, op: 'mock-op-12' }, - { v: 13, op: 'mock-op-13' }, - ] - return this.UpdatesManager.compressAndSaveRawUpdates( - this.project_id, - this.doc_id, - this.rawUpdates, - this.temporary, - this.callback - ) - }) - - it('should try to look at the last compressed op', function () { - return this.MongoManager.peekLastCompressedUpdate - .calledWith(this.doc_id) - .should.equal(true) - }) - - it('should compress the last compressed op and the raw ops', function () { - return this.UpdateCompressor.compressRawUpdates - .calledWith(this.lastCompressedUpdate, this.rawUpdates) - .should.equal(true) - }) - - it('should save the compressed ops', function () { - return this.PackManager.insertCompressedUpdates - .calledWith( - this.project_id, - this.doc_id, - null, - this.compressedUpdates, - this.temporary - ) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - }) - - describe('processUncompressedUpdates', function () { - beforeEach(function () { - this.UpdatesManager.compressAndSaveRawUpdates = sinon - .stub() - .callsArgWith(4) - this.RedisManager.deleteAppliedDocUpdates = sinon.stub().callsArg(3) - this.MongoManager.backportProjectId = sinon.stub().callsArg(2) - return (this.UpdateTrimmer.shouldTrimUpdates = sinon - .stub() - .callsArgWith(1, null, (this.temporary = 'temp mock'))) - }) - - describe('when there is fewer than one batch to send', function () { - beforeEach(function () { - this.updates = ['mock-update'] - this.RedisManager.getOldestDocUpdates = sinon - .stub() - .callsArgWith(2, null, this.updates) - this.RedisManager.expandDocUpdates = sinon - .stub() - .callsArgWith(1, null, this.updates) - return this.UpdatesManager.processUncompressedUpdates( - this.project_id, - this.doc_id, - this.temporary, - this.callback - ) - }) - - it('should get the oldest updates', function () { - return this.RedisManager.getOldestDocUpdates - .calledWith(this.doc_id, this.UpdatesManager.REDIS_READ_BATCH_SIZE) - .should.equal(true) - }) - - it('should compress and save the updates', function () { - return this.UpdatesManager.compressAndSaveRawUpdates - .calledWith( - this.project_id, - this.doc_id, - this.updates, - this.temporary - ) - .should.equal(true) - }) - - it('should delete the batch of uncompressed updates that was just processed', function () { - return this.RedisManager.deleteAppliedDocUpdates - .calledWith(this.project_id, this.doc_id, this.updates) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - return describe('when there are multiple batches to send', function () { - beforeEach(function (done) { - this.UpdatesManager.REDIS_READ_BATCH_SIZE = 2 - this.updates = [ - 'mock-update-0', - 'mock-update-1', - 'mock-update-2', - 'mock-update-3', - 'mock-update-4', - ] - this.redisArray = this.updates.slice() - this.RedisManager.getOldestDocUpdates = ( - docId, - batchSize, - callback - ) => { - if (callback == null) { - callback = function () {} - } - const updates = this.redisArray.slice(0, batchSize) - this.redisArray = this.redisArray.slice(batchSize) - return callback(null, updates) - } - sinon.spy(this.RedisManager, 'getOldestDocUpdates') - this.RedisManager.expandDocUpdates = (jsonUpdates, callback) => { - return callback(null, jsonUpdates) - } - sinon.spy(this.RedisManager, 'expandDocUpdates') - return this.UpdatesManager.processUncompressedUpdates( - this.project_id, - this.doc_id, - this.temporary, - (...args) => { - this.callback(...Array.from(args || [])) - return done() - } - ) - }) - - it('should get the oldest updates in three batches ', function () { - return this.RedisManager.getOldestDocUpdates.callCount.should.equal(3) - }) - - it('should compress and save the updates in batches', function () { - this.UpdatesManager.compressAndSaveRawUpdates - .calledWith( - this.project_id, - this.doc_id, - this.updates.slice(0, 2), - this.temporary - ) - .should.equal(true) - this.UpdatesManager.compressAndSaveRawUpdates - .calledWith( - this.project_id, - this.doc_id, - this.updates.slice(2, 4), - this.temporary - ) - .should.equal(true) - return this.UpdatesManager.compressAndSaveRawUpdates - .calledWith( - this.project_id, - this.doc_id, - this.updates.slice(4, 5), - this.temporary - ) - .should.equal(true) - }) - - it('should delete the batches of uncompressed updates', function () { - return this.RedisManager.deleteAppliedDocUpdates.callCount.should.equal( - 3 - ) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - }) - - describe('processCompressedUpdatesWithLock', function () { - beforeEach(function () { - this.UpdateTrimmer.shouldTrimUpdates = sinon - .stub() - .callsArgWith(1, null, (this.temporary = 'temp mock')) - this.MongoManager.backportProjectId = sinon.stub().callsArg(2) - this.UpdatesManager._processUncompressedUpdates = sinon.stub().callsArg(3) - this.LockManager.runWithLock = sinon.stub().callsArg(2) - return this.UpdatesManager.processUncompressedUpdatesWithLock( - this.project_id, - this.doc_id, - this.callback - ) - }) - - it('should check if the updates are temporary', function () { - return this.UpdateTrimmer.shouldTrimUpdates - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should backport the project id', function () { - return this.MongoManager.backportProjectId - .calledWith(this.project_id, this.doc_id) - .should.equal(true) - }) - - it('should run processUncompressedUpdates with the lock', function () { - return this.LockManager.runWithLock - .calledWith(`HistoryLock:${this.doc_id}`) - .should.equal(true) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('getDocUpdates', function () { - beforeEach(function () { - this.updates = ['mock-updates'] - this.options = { to: 'mock-to', limit: 'mock-limit' } - this.PackManager.getOpsByVersionRange = sinon - .stub() - .callsArgWith(4, null, this.updates) - this.UpdatesManager.processUncompressedUpdatesWithLock = sinon - .stub() - .callsArg(2) - return this.UpdatesManager.getDocUpdates( - this.project_id, - this.doc_id, - this.options, - this.callback - ) - }) - - it('should process outstanding updates', function () { - return this.UpdatesManager.processUncompressedUpdatesWithLock - .calledWith(this.project_id, this.doc_id) - .should.equal(true) - }) - - it('should get the updates from the database', function () { - return this.PackManager.getOpsByVersionRange - .calledWith( - this.project_id, - this.doc_id, - this.options.from, - this.options.to - ) - .should.equal(true) - }) - - return it('should return the updates', function () { - return this.callback.calledWith(null, this.updates).should.equal(true) - }) - }) - - describe('getDocUpdatesWithUserInfo', function () { - beforeEach(function () { - this.updates = ['mock-updates'] - this.options = { to: 'mock-to', limit: 'mock-limit' } - this.updatesWithUserInfo = ['updates-with-user-info'] - this.UpdatesManager.getDocUpdates = sinon - .stub() - .callsArgWith(3, null, this.updates) - this.UpdatesManager.fillUserInfo = sinon - .stub() - .callsArgWith(1, null, this.updatesWithUserInfo) - return this.UpdatesManager.getDocUpdatesWithUserInfo( - this.project_id, - this.doc_id, - this.options, - this.callback - ) - }) - - it('should get the updates', function () { - return this.UpdatesManager.getDocUpdates - .calledWith(this.project_id, this.doc_id, this.options) - .should.equal(true) - }) - - it('should file the updates with the user info', function () { - return this.UpdatesManager.fillUserInfo - .calledWith(this.updates) - .should.equal(true) - }) - - return it('should return the updates with the filled details', function () { - return this.callback - .calledWith(null, this.updatesWithUserInfo) - .should.equal(true) - }) - }) - - describe('processUncompressedUpdatesForProject', function () { - beforeEach(function (done) { - this.doc_ids = ['mock-id-1', 'mock-id-2'] - this.UpdateTrimmer.shouldTrimUpdates = sinon - .stub() - .callsArgWith(1, null, (this.temporary = 'temp mock')) - this.MongoManager.backportProjectId = sinon.stub().callsArg(2) - this.UpdatesManager._processUncompressedUpdatesForDocWithLock = sinon - .stub() - .callsArg(3) - this.RedisManager.getDocIdsWithHistoryOps = sinon - .stub() - .callsArgWith(1, null, this.doc_ids) - return this.UpdatesManager.processUncompressedUpdatesForProject( - this.project_id, - () => { - this.callback() - return done() - } - ) - }) - - it('should get all the docs with history ops', function () { - return this.RedisManager.getDocIdsWithHistoryOps - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should process the doc ops for the each doc_id', function () { - return Array.from(this.doc_ids).map(docId => - this.UpdatesManager._processUncompressedUpdatesForDocWithLock - .calledWith(this.project_id, docId, this.temporary) - .should.equal(true) - ) - }) - - return it('should call the callback', function () { - return this.callback.called.should.equal(true) - }) - }) - - describe('getSummarizedProjectUpdates', function () { - beforeEach(function () { - this.updates = [ - { - doc_id: 123, - v: 456, - op: 'mock-updates', - meta: { user_id: 123, start_ts: 1233, end_ts: 1234 }, - }, - ] - this.options = { before: 'mock-before', limit: 'mock-limit' } - this.summarizedUpdates = [ - { - meta: { user_ids: [123], start_ts: 1233, end_ts: 1234 }, - docs: { 123: { fromV: 456, toV: 456 } }, - }, - ] - this.updatesWithUserInfo = ['updates-with-user-info'] - this.done_state = false - this.iterator = { - next: cb => { - this.done_state = true - return cb(null, this.updates) - }, - done: () => { - return this.done_state - }, - } - this.PackManager.makeProjectIterator = sinon - .stub() - .callsArgWith(2, null, this.iterator) - this.UpdatesManager.processUncompressedUpdatesForProject = sinon - .stub() - .callsArg(1) - this.UpdatesManager.fillSummarizedUserInfo = sinon - .stub() - .callsArgWith(1, null, this.updatesWithUserInfo) - return this.UpdatesManager.getSummarizedProjectUpdates( - this.project_id, - this.options, - this.callback - ) - }) - - it('should process any outstanding updates', function () { - return this.UpdatesManager.processUncompressedUpdatesForProject - .calledWith(this.project_id) - .should.equal(true) - }) - - it('should get the updates', function () { - return this.PackManager.makeProjectIterator - .calledWith(this.project_id, this.options.before) - .should.equal(true) - }) - - it('should fill the updates with the user info', function () { - return this.UpdatesManager.fillSummarizedUserInfo - .calledWith(this.summarizedUpdates) - .should.equal(true) - }) - - return it('should return the updates with the filled details', function () { - return this.callback - .calledWith(null, this.updatesWithUserInfo) - .should.equal(true) - }) - }) - - // describe "_extendBatchOfSummarizedUpdates", -> - // beforeEach -> - // @before = Date.now() - // @min_count = 2 - // @existingSummarizedUpdates = ["summarized-updates-3"] - // @summarizedUpdates = ["summarized-updates-3", "summarized-update-2", "summarized-update-1"] - - // describe "when there are updates to get", -> - // beforeEach -> - // @updates = [ - // {op: "mock-op-1", meta: end_ts: @before - 10}, - // {op: "mock-op-1", meta: end_ts: @nextBeforeTimestamp = @before - 20} - // ] - // @existingSummarizedUpdates = ["summarized-updates-3"] - // @summarizedUpdates = ["summarized-updates-3", "summarized-update-2", "summarized-update-1"] - // @UpdatesManager._summarizeUpdates = sinon.stub().returns(@summarizedUpdates) - // @UpdatesManager.getProjectUpdatesWithUserInfo = sinon.stub().callsArgWith(2, null, @updates) - // @UpdatesManager._extendBatchOfSummarizedUpdates @project_id, @existingSummarizedUpdates, @before, @min_count, @callback - - // it "should get the updates", -> - // @UpdatesManager.getProjectUpdatesWithUserInfo - // .calledWith(@project_id, { before: @before, limit: 3 * @min_count }) - // .should.equal true - - // it "should summarize the updates", -> - // @UpdatesManager._summarizeUpdates - // .calledWith(@updates, @existingSummarizedUpdates) - // .should.equal true - - // it "should call the callback with the summarized updates and the next before timestamp", -> - // @callback.calledWith(null, @summarizedUpdates, @nextBeforeTimestamp).should.equal true - - // describe "when there are no more updates", -> - // beforeEach -> - // @updates = [] - // @UpdatesManager._summarizeUpdates = sinon.stub().returns(@summarizedUpdates) - // @UpdatesManager.getProjectUpdatesWithUserInfo = sinon.stub().callsArgWith(2, null, @updates) - // @UpdatesManager._extendBatchOfSummarizedUpdates @project_id, @existingSummarizedUpdates, @before, @min_count, @callback - - // it "should call the callback with the summarized updates and null for nextBeforeTimestamp", -> - // @callback.calledWith(null, @summarizedUpdates, null).should.equal true - - // describe "getSummarizedProjectUpdates", -> - // describe "when one batch of updates is enough to meet the limit", -> - // beforeEach -> - // @before = Date.now() - // @min_count = 2 - // @updates = ["summarized-updates-3", "summarized-updates-2"] - // @nextBeforeTimestamp = @before - 100 - // @UpdatesManager._extendBatchOfSummarizedUpdates = sinon.stub().callsArgWith(4, null, @updates, @nextBeforeTimestamp) - // @UpdatesManager.getSummarizedProjectUpdates @project_id, { before: @before, min_count: @min_count }, @callback - - // it "should get the batch of summarized updates", -> - // @UpdatesManager._extendBatchOfSummarizedUpdates - // .calledWith(@project_id, [], @before, @min_count) - // .should.equal true - - // it "should call the callback with the updates", -> - // @callback.calledWith(null, @updates, @nextBeforeTimestamp).should.equal true - - // describe "when multiple batches are needed to meet the limit", -> - // beforeEach -> - // @before = Date.now() - // @min_count = 4 - // @firstBatch = [{ toV: 6, fromV: 6 }, { toV: 5, fromV: 5 }] - // @nextBeforeTimestamp = @before - 100 - // @secondBatch = [{ toV: 4, fromV: 4 }, { toV: 3, fromV: 3 }] - // @nextNextBeforeTimestamp = @before - 200 - // @UpdatesManager._extendBatchOfSummarizedUpdates = (project_id, existingUpdates, before, desiredLength, callback) => - // if existingUpdates.length == 0 - // callback null, @firstBatch, @nextBeforeTimestamp - // else - // callback null, @firstBatch.concat(@secondBatch), @nextNextBeforeTimestamp - // sinon.spy @UpdatesManager, "_extendBatchOfSummarizedUpdates" - // @UpdatesManager.getSummarizedProjectUpdates @project_id, { before: @before, min_count: @min_count }, @callback - - // it "should get the first batch of summarized updates", -> - // @UpdatesManager._extendBatchOfSummarizedUpdates - // .calledWith(@project_id, [], @before, @min_count) - // .should.equal true - - // it "should get the second batch of summarized updates", -> - // @UpdatesManager._extendBatchOfSummarizedUpdates - // .calledWith(@project_id, @firstBatch, @nextBeforeTimestamp, @min_count) - // .should.equal true - - // it "should call the callback with all the updates", -> - // @callback.calledWith(null, @firstBatch.concat(@secondBatch), @nextNextBeforeTimestamp).should.equal true - - // describe "when the end of the database is hit", -> - // beforeEach -> - // @before = Date.now() - // @min_count = 4 - // @updates = [{ toV: 6, fromV: 6 }, { toV: 5, fromV: 5 }] - // @UpdatesManager._extendBatchOfSummarizedUpdates = sinon.stub().callsArgWith(4, null, @updates, null) - // @UpdatesManager.getSummarizedProjectUpdates @project_id, { before: @before, min_count: @min_count }, @callback - - // it "should get the batch of summarized updates", -> - // @UpdatesManager._extendBatchOfSummarizedUpdates - // .calledWith(@project_id, [], @before, @min_count) - // .should.equal true - - // it "should call the callback with the updates", -> - // @callback.calledWith(null, @updates, null).should.equal true - - describe('fillUserInfo', function () { - describe('with valid users', function () { - beforeEach(function (done) { - this.user_id_1 = ObjectId().toString() - this.user_id_2 = ObjectId().toString() - this.updates = [ - { - meta: { - user_id: this.user_id_1, - }, - op: 'mock-op-1', - }, - { - meta: { - user_id: this.user_id_1, - }, - op: 'mock-op-2', - }, - { - meta: { - user_id: this.user_id_2, - }, - op: 'mock-op-3', - }, - ] - this.user_info = {} - this.user_info[this.user_id_1] = { email: 'user1@sharelatex.com' } - this.user_info[this.user_id_2] = { email: 'user2@sharelatex.com' } - - this.WebApiManager.getUserInfo = (userId, callback) => { - if (callback == null) { - callback = function () {} - } - return callback(null, this.user_info[userId]) - } - sinon.spy(this.WebApiManager, 'getUserInfo') - - return this.UpdatesManager.fillUserInfo( - this.updates, - (error, results) => { - if (error) return done(error) - this.results = results - return done() - } - ) - }) - - it('should only call getUserInfo once for each user_id', function () { - this.WebApiManager.getUserInfo.calledTwice.should.equal(true) - this.WebApiManager.getUserInfo - .calledWith(this.user_id_1) - .should.equal(true) - return this.WebApiManager.getUserInfo - .calledWith(this.user_id_2) - .should.equal(true) - }) - - return it('should return the updates with the user info filled', function () { - return expect(this.results).to.deep.equal([ - { - meta: { - user: { - email: 'user1@sharelatex.com', - }, - }, - op: 'mock-op-1', - }, - { - meta: { - user: { - email: 'user1@sharelatex.com', - }, - }, - op: 'mock-op-2', - }, - { - meta: { - user: { - email: 'user2@sharelatex.com', - }, - }, - op: 'mock-op-3', - }, - ]) - }) - }) - - return describe('with invalid user ids', function () { - beforeEach(function (done) { - this.updates = [ - { - meta: { - user_id: null, - }, - op: 'mock-op-1', - }, - { - meta: { - user_id: 'anonymous-user', - }, - op: 'mock-op-2', - }, - ] - this.WebApiManager.getUserInfo = (userId, callback) => { - if (callback == null) { - callback = function () {} - } - return callback(null, this.user_info[userId]) - } - sinon.spy(this.WebApiManager, 'getUserInfo') - - return this.UpdatesManager.fillUserInfo( - this.updates, - (error, results) => { - if (error) return done(error) - this.results = results - return done() - } - ) - }) - - it('should not call getUserInfo', function () { - return this.WebApiManager.getUserInfo.called.should.equal(false) - }) - - return it('should return the updates without the user info filled', function () { - return expect(this.results).to.deep.equal([ - { - meta: {}, - op: 'mock-op-1', - }, - { - meta: {}, - op: 'mock-op-2', - }, - ]) - }) - }) - }) - - return describe('_summarizeUpdates', function () { - beforeEach(function () { - this.now = Date.now() - this.user_1 = { id: 'mock-user-1' } - return (this.user_2 = { id: 'mock-user-2' }) - }) - - it('should concat updates that are close in time', function () { - const result = this.UpdatesManager._summarizeUpdates([ - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_1.id, - start_ts: this.now + 20, - end_ts: this.now + 30, - }, - v: 5, - }, - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_2.id, - start_ts: this.now, - end_ts: this.now + 10, - }, - v: 4, - }, - ]) - - return expect(result).to.deep.equal([ - { - docs: { - 'doc-id-1': { - fromV: 4, - toV: 5, - }, - }, - meta: { - user_ids: [this.user_1.id, this.user_2.id], - start_ts: this.now, - end_ts: this.now + 30, - }, - }, - ]) - }) - - it('should leave updates that are far apart in time', function () { - const oneDay = 1000 * 60 * 60 * 24 - const result = this.UpdatesManager._summarizeUpdates([ - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_2.id, - start_ts: this.now + oneDay, - end_ts: this.now + oneDay + 10, - }, - v: 5, - }, - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_1.id, - start_ts: this.now, - end_ts: this.now + 10, - }, - v: 4, - }, - ]) - return expect(result).to.deep.equal([ - { - docs: { - 'doc-id-1': { - fromV: 5, - toV: 5, - }, - }, - meta: { - user_ids: [this.user_2.id], - start_ts: this.now + oneDay, - end_ts: this.now + oneDay + 10, - }, - }, - { - docs: { - 'doc-id-1': { - fromV: 4, - toV: 4, - }, - }, - meta: { - user_ids: [this.user_1.id], - start_ts: this.now, - end_ts: this.now + 10, - }, - }, - ]) - }) - - it('should concat onto existing summarized updates', function () { - const result = this.UpdatesManager._summarizeUpdates( - [ - { - doc_id: 'doc-id-2', - meta: { - user_id: this.user_1.id, - start_ts: this.now + 20, - end_ts: this.now + 30, - }, - v: 5, - }, - { - doc_id: 'doc-id-2', - meta: { - user_id: this.user_2.id, - start_ts: this.now, - end_ts: this.now + 10, - }, - v: 4, - }, - ], - [ - { - docs: { - 'doc-id-1': { - fromV: 6, - toV: 8, - }, - }, - meta: { - user_ids: [this.user_1.id], - start_ts: this.now + 40, - end_ts: this.now + 50, - }, - }, - ] - ) - return expect(result).to.deep.equal([ - { - docs: { - 'doc-id-1': { - toV: 8, - fromV: 6, - }, - 'doc-id-2': { - toV: 5, - fromV: 4, - }, - }, - meta: { - user_ids: [this.user_1.id, this.user_2.id], - start_ts: this.now, - end_ts: this.now + 50, - }, - }, - ]) - }) - - it('should include null user values', function () { - const result = this.UpdatesManager._summarizeUpdates([ - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_1.id, - start_ts: this.now + 20, - end_ts: this.now + 30, - }, - v: 5, - }, - { - doc_id: 'doc-id-1', - meta: { - user_id: null, - start_ts: this.now, - end_ts: this.now + 10, - }, - v: 4, - }, - ]) - return expect(result).to.deep.equal([ - { - docs: { - 'doc-id-1': { - fromV: 4, - toV: 5, - }, - }, - meta: { - user_ids: [this.user_1.id, null], - start_ts: this.now, - end_ts: this.now + 30, - }, - }, - ]) - }) - - it('should include null user values, when the null is earlier in the updates list', function () { - const result = this.UpdatesManager._summarizeUpdates([ - { - doc_id: 'doc-id-1', - meta: { - user_id: null, - start_ts: this.now, - end_ts: this.now + 10, - }, - v: 4, - }, - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_1.id, - start_ts: this.now + 20, - end_ts: this.now + 30, - }, - v: 5, - }, - ]) - return expect(result).to.deep.equal([ - { - docs: { - 'doc-id-1': { - fromV: 4, - toV: 5, - }, - }, - meta: { - user_ids: [null, this.user_1.id], - start_ts: this.now, - end_ts: this.now + 30, - }, - }, - ]) - }) - - it('should roll several null user values into one', function () { - const result = this.UpdatesManager._summarizeUpdates([ - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_1.id, - start_ts: this.now + 20, - end_ts: this.now + 30, - }, - v: 5, - }, - { - doc_id: 'doc-id-1', - meta: { - user_id: null, - start_ts: this.now, - end_ts: this.now + 10, - }, - v: 4, - }, - { - doc_id: 'doc-id-1', - meta: { - user_id: null, - start_ts: this.now + 2, - end_ts: this.now + 4, - }, - v: 4, - }, - ]) - return expect(result).to.deep.equal([ - { - docs: { - 'doc-id-1': { - fromV: 4, - toV: 5, - }, - }, - meta: { - user_ids: [this.user_1.id, null], - start_ts: this.now, - end_ts: this.now + 30, - }, - }, - ]) - }) - - return it('should split updates before a big delete', function () { - const result = this.UpdatesManager._summarizeUpdates([ - { - doc_id: 'doc-id-1', - op: [{ d: 'this is a long long long long long delete', p: 34 }], - meta: { - user_id: this.user_1.id, - start_ts: this.now + 20, - end_ts: this.now + 30, - }, - v: 5, - }, - { - doc_id: 'doc-id-1', - meta: { - user_id: this.user_2.id, - start_ts: this.now, - end_ts: this.now + 10, - }, - v: 4, - }, - ]) - - return expect(result).to.deep.equal([ - { - docs: { - 'doc-id-1': { - fromV: 5, - toV: 5, - }, - }, - meta: { - user_ids: [this.user_1.id], - start_ts: this.now + 20, - end_ts: this.now + 30, - }, - }, - { - docs: { - 'doc-id-1': { - fromV: 4, - toV: 4, - }, - }, - meta: { - user_ids: [this.user_2.id], - start_ts: this.now, - end_ts: this.now + 10, - }, - }, - ]) - }) - }) -}) diff --git a/services/track-changes/test/unit/js/Util/PackUtilsTests.js b/services/track-changes/test/unit/js/Util/PackUtilsTests.js deleted file mode 100644 index 5d247e9039..0000000000 --- a/services/track-changes/test/unit/js/Util/PackUtilsTests.js +++ /dev/null @@ -1,55 +0,0 @@ -const { expect } = require('chai') -const { ObjectId } = require('mongodb') -const { packsAreDuplicated } = require('../../../../app/js/util/PackUtils') - -const examplePack = { - v: 12, - meta: { - user_id: '525e6018b53de7a920002545', - start_ts: 1399130007228, - end_ts: 1399130007228, - }, - op: [ - { - p: 2372, - d: 'Test for a Subsection', - }, - { - p: 2372, - i: 'Reviews and review terminology', - }, - ], -} - -const objectId1 = ObjectId('53650ba27e62ca78520d9814') -const objectId2 = ObjectId('0b5a814a27e678520d92c536') - -describe('PackUtils', function () { - describe('packsAreDuplicated()', function () { - it('returns `false` when any of the packs is undefined', function () { - const pack = { ...examplePack, _id: objectId1 } - expect(packsAreDuplicated(pack, undefined)).to.be.false - expect(packsAreDuplicated(undefined, pack)).to.be.false - expect(packsAreDuplicated(undefined, undefined)).to.be.false - }) - - it('returns `true` for identical packs with same `_id`', function () { - const pack1 = { ...examplePack, _id: objectId1 } - const pack2 = { ...examplePack, _id: objectId1 } - expect(packsAreDuplicated(pack1, pack2)).to.be.true - }) - - it('returns `true` for identical packs with different `_id`', function () { - const pack1 = { ...examplePack, _id: objectId1 } - const pack2 = { ...examplePack, _id: objectId2 } - expect(packsAreDuplicated(pack1, pack2)).to.be.true - }) - - it('returns `false` for packs with different anidated properties', function () { - const pack1 = { ...examplePack, _id: objectId1 } - const pack2 = { ...examplePack, _id: 1 } - pack2.op = [...pack2.op, { p: 2800, i: 'char' }] - expect(packsAreDuplicated(pack1, pack2)).to.be.false - }) - }) -}) diff --git a/services/track-changes/test/unit/js/WebApiManager/WebApiManagerTests.js b/services/track-changes/test/unit/js/WebApiManager/WebApiManagerTests.js deleted file mode 100644 index 9caba5185e..0000000000 --- a/services/track-changes/test/unit/js/WebApiManager/WebApiManagerTests.js +++ /dev/null @@ -1,208 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const { expect } = require('chai') -const modulePath = '../../../../app/js/WebApiManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('WebApiManager', function () { - beforeEach(function () { - this.WebApiManager = SandboxedModule.require(modulePath, { - requires: { - requestretry: (this.request = {}), - '@overleaf/settings': (this.settings = { - apis: { - web: { - url: 'http://example.com', - user: 'sharelatex', - pass: 'password', - }, - }, - }), - }, - }) - this.callback = sinon.stub() - this.user_id = 'mock-user-id' - this.project_id = 'mock-project-id' - this.user_info = { - email: 'leo@sharelatex.com', - id: this.user_id, - first_name: 'Leo', - last_nane: 'Lion', - extra_param: 'blah', - } - return (this.project = { features: 'mock-features' }) - }) - - describe('getUserInfo', function () { - describe('successfully', function () { - beforeEach(function () { - this.body = JSON.stringify(this.user_info) - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 200 }, this.body) - return this.WebApiManager.getUserInfo(this.user_id, this.callback) - }) - - it('should get the user from the web api', function () { - return this.request.get - .calledWithMatch({ - url: `${this.settings.apis.web.url}/user/${this.user_id}/personal_info`, - auth: { - user: this.settings.apis.web.user, - pass: this.settings.apis.web.pass, - sendImmediately: true, - }, - }) - .should.equal(true) - }) - - return it('should call the callback with only the email, id and names', function () { - return this.callback - .calledWith(null, { - id: this.user_id, - email: this.user_info.email, - first_name: this.user_info.first_name, - last_name: this.user_info.last_name, - }) - .should.equal(true) - }) - }) - - describe('when the web API returns an error', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith( - 1, - (this.error = new Error('something went wrong')), - null, - null - ) - return this.WebApiManager.getUserInfo(this.user_id, this.callback) - }) - - return it('should return an error to the callback', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - - describe('when the web returns a failure error code', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 500, attempts: 42 }, '') - return this.WebApiManager.getUserInfo(this.user_id, this.callback) - }) - - return it('should return the callback with an error', function () { - return this.callback - .calledWith( - sinon.match.has( - 'message', - 'web returned a non-success status code: 500 (attempts: 42)' - ) - ) - .should.equal(true) - }) - }) - - return describe('when the user cannot be found', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 404 }, 'nothing') - return this.WebApiManager.getUserInfo(this.user_id, this.callback) - }) - - return it('should return a null value', function () { - return this.callback.calledWith(null, null).should.equal(true) - }) - }) - }) - - return describe('getProjectDetails', function () { - describe('successfully', function () { - beforeEach(function () { - this.body = JSON.stringify(this.project) - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 200 }, this.body) - return this.WebApiManager.getProjectDetails( - this.project_id, - this.callback - ) - }) - - it('should get the project from the web api', function () { - return this.request.get - .calledWithMatch({ - url: `${this.settings.apis.web.url}/project/${this.project_id}/details`, - auth: { - user: this.settings.apis.web.user, - pass: this.settings.apis.web.pass, - sendImmediately: true, - }, - }) - .should.equal(true) - }) - - return it('should call the callback with the project', function () { - return this.callback.calledWith(null, this.project).should.equal(true) - }) - }) - - describe('when the web API returns an error', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith( - 1, - (this.error = new Error('something went wrong')), - null, - null - ) - return this.WebApiManager.getProjectDetails( - this.project_id, - this.callback - ) - }) - - return it('should return an error to the callback', function () { - return this.callback.calledWith(this.error).should.equal(true) - }) - }) - - return describe('when the web returns a failure error code', function () { - beforeEach(function () { - this.request.get = sinon - .stub() - .callsArgWith(1, null, { statusCode: 500, attempts: 42 }, '') - return this.WebApiManager.getProjectDetails( - this.project_id, - this.callback - ) - }) - - return it('should return the callback with an error', function () { - return this.callback - .calledWith( - sinon.match.has( - 'message', - 'web returned a non-success status code: 500 (attempts: 42)' - ) - ) - .should.equal(true) - }) - }) - }) -})