Merge pull request #13327 from overleaf/msm-web-track-changes-cleanup

[web] Cleanup track-changes from web

GitOrigin-RevId: 8cef709ec5e91e4ffe8cd06826038ed84f36ef67
This commit is contained in:
Miguel Serrano 2023-06-07 13:10:02 +02:00 committed by Copybot
parent 0b864be76f
commit 7e6613a1a0
38 changed files with 44 additions and 4955 deletions

286
package-lock.json generated
View file

@ -379,6 +379,7 @@
"version": "3.0.0"
},
"libraries/stream-utils": {
"name": "@overleaf/stream-utils",
"version": "0.1.0",
"license": "AGPL-3.0-only",
"devDependencies": {
@ -14620,18 +14621,6 @@
"node": "*"
}
},
"node_modules/binary": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz",
"integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=",
"dependencies": {
"buffers": "~0.1.1",
"chainsaw": "~0.1.0"
},
"engines": {
"node": "*"
}
},
"node_modules/binary-extensions": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
@ -15036,14 +15025,6 @@
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
},
"node_modules/buffer-indexof-polyfill": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz",
"integrity": "sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A==",
"engines": {
"node": ">=0.10"
}
},
"node_modules/buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
@ -15611,17 +15592,6 @@
"node": ">=4"
}
},
"node_modules/chainsaw": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz",
"integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=",
"dependencies": {
"traverse": ">=0.3.0 <0.4"
},
"engines": {
"node": "*"
}
},
"node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
@ -19247,41 +19217,6 @@
"node": ">=0.10"
}
},
"node_modules/duplexer2": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz",
"integrity": "sha1-ixLauHjA1p4+eJEFFmKjL8a93ME=",
"dependencies": {
"readable-stream": "^2.0.2"
}
},
"node_modules/duplexer2/node_modules/isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"node_modules/duplexer2/node_modules/readable-stream": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"node_modules/duplexer2/node_modules/string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"dependencies": {
"safe-buffer": "~5.1.0"
}
},
"node_modules/duplexify": {
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz",
@ -22682,20 +22617,6 @@
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/fstream": {
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz",
"integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==",
"dependencies": {
"graceful-fs": "^4.1.2",
"inherits": "~2.0.0",
"mkdirp": ">=0.5 0",
"rimraf": "2"
},
"engines": {
"node": ">=0.6"
}
},
"node_modules/function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
@ -27720,11 +27641,6 @@
"node": ">=0.10.0"
}
},
"node_modules/listenercount": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz",
"integrity": "sha1-hMinKrWcRyUyFIDJdeZQg0LnCTc="
},
"node_modules/listr2": {
"version": "3.14.0",
"resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz",
@ -35128,11 +35044,6 @@
"node": ">=0.10.0"
}
},
"node_modules/setimmediate": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
"integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU="
},
"node_modules/setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
@ -37594,14 +37505,6 @@
"punycode": "^2.1.0"
}
},
"node_modules/traverse": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz",
"integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=",
"engines": {
"node": "*"
}
},
"node_modules/ts-dedent": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz",
@ -38059,55 +37962,6 @@
"node": ">=8"
}
},
"node_modules/unzipper": {
"version": "0.10.11",
"resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.11.tgz",
"integrity": "sha512-+BrAq2oFqWod5IESRjL3S8baohbevGcVA+teAIOYWM3pDVdseogqbzhhvvmiyQrUNKFUnDMtELW3X8ykbyDCJw==",
"dependencies": {
"big-integer": "^1.6.17",
"binary": "~0.3.0",
"bluebird": "~3.4.1",
"buffer-indexof-polyfill": "~1.0.0",
"duplexer2": "~0.1.4",
"fstream": "^1.0.12",
"graceful-fs": "^4.2.2",
"listenercount": "~1.0.1",
"readable-stream": "~2.3.6",
"setimmediate": "~1.0.4"
}
},
"node_modules/unzipper/node_modules/bluebird": {
"version": "3.4.7",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz",
"integrity": "sha1-9y12C+Cbf3bQjtj66Ysomo0F+rM="
},
"node_modules/unzipper/node_modules/isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"node_modules/unzipper/node_modules/readable-stream": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"node_modules/unzipper/node_modules/string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"dependencies": {
"safe-buffer": "~5.1.0"
}
},
"node_modules/update-browserslist-db": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz",
@ -41792,7 +41646,6 @@
"scroll-into-view-if-needed": "^2.2.25",
"tsscmp": "^1.0.6",
"underscore": "^1.13.1",
"unzipper": "^0.10.11",
"utf-8-validate": "^5.0.2",
"uuid": "^3.0.1",
"valid-data-url": "^2.0.0",
@ -50588,7 +50441,6 @@
"tsscmp": "^1.0.6",
"typescript": "^4.5.5",
"underscore": "^1.13.1",
"unzipper": "^0.10.11",
"utf-8-validate": "^5.0.2",
"uuid": "^3.0.1",
"val-loader": "^5.0.1",
@ -56100,15 +55952,6 @@
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.2.tgz",
"integrity": "sha512-GAcQvbpsM0pUb0zw1EI0KhQEZ+lRwR5fYaAp3vPOYuP7aDvGy6cVN6XHLauvF8SOga2y0dcLcjt3iQDTSEliyw=="
},
"binary": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz",
"integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=",
"requires": {
"buffers": "~0.1.1",
"chainsaw": "~0.1.0"
}
},
"binary-extensions": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
@ -56444,11 +56287,6 @@
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
},
"buffer-indexof-polyfill": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz",
"integrity": "sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A=="
},
"buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
@ -56881,14 +56719,6 @@
"superagent": "^3.7.0"
}
},
"chainsaw": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz",
"integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=",
"requires": {
"traverse": ">=0.3.0 <0.4"
}
},
"chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
@ -59543,43 +59373,6 @@
"nan": "^2.14.0"
}
},
"duplexer2": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz",
"integrity": "sha1-ixLauHjA1p4+eJEFFmKjL8a93ME=",
"requires": {
"readable-stream": "^2.0.2"
},
"dependencies": {
"isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"readable-stream": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
"requires": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"requires": {
"safe-buffer": "~5.1.0"
}
}
}
},
"duplexify": {
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz",
@ -62206,17 +61999,6 @@
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"fstream": {
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz",
"integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==",
"requires": {
"graceful-fs": "^4.1.2",
"inherits": "~2.0.0",
"mkdirp": ">=0.5 0",
"rimraf": "2"
}
},
"function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
@ -66088,11 +65870,6 @@
"repeat-string": "^1.5.2"
}
},
"listenercount": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz",
"integrity": "sha1-hMinKrWcRyUyFIDJdeZQg0LnCTc="
},
"listr2": {
"version": "3.14.0",
"resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz",
@ -72108,11 +71885,6 @@
"to-object-path": "^0.3.0"
}
},
"setimmediate": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
"integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU="
},
"setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
@ -74077,11 +73849,6 @@
"punycode": "^2.1.0"
}
},
"traverse": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz",
"integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk="
},
"ts-dedent": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz",
@ -74423,57 +74190,6 @@
"resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz",
"integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw=="
},
"unzipper": {
"version": "0.10.11",
"resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.11.tgz",
"integrity": "sha512-+BrAq2oFqWod5IESRjL3S8baohbevGcVA+teAIOYWM3pDVdseogqbzhhvvmiyQrUNKFUnDMtELW3X8ykbyDCJw==",
"requires": {
"big-integer": "^1.6.17",
"binary": "~0.3.0",
"bluebird": "~3.4.1",
"buffer-indexof-polyfill": "~1.0.0",
"duplexer2": "~0.1.4",
"fstream": "^1.0.12",
"graceful-fs": "^4.2.2",
"listenercount": "~1.0.1",
"readable-stream": "~2.3.6",
"setimmediate": "~1.0.4"
},
"dependencies": {
"bluebird": {
"version": "3.4.7",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz",
"integrity": "sha1-9y12C+Cbf3bQjtj66Ysomo0F+rM="
},
"isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"readable-stream": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
"requires": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"requires": {
"safe-buffer": "~5.1.0"
}
}
}
},
"update-browserslist-db": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz",

View file

@ -88,9 +88,6 @@ cypress/videos/
cypress/downloads/
cypress/results/
# Test fixture zip
!modules/history-migration/test/unit/src/data/track-changes-project.zip
# Ace themes for conversion
frontend/js/features/source-editor/themes/ace/

View file

@ -51,21 +51,11 @@ function getDocument(req, res, next) {
plainTextResponse(res, lines.join('\n'))
} else {
const projectHistoryId = _.get(project, 'overleaf.history.id')
const projectHistoryDisplay = _.get(
project,
'overleaf.history.display'
)
const sendToBothHistorySystems = _.get(
project,
'overleaf.history.allowDowngrade'
)
// if project has been switched but has 'allowDowngrade' set
// then leave projectHistoryType undefined to (temporarily)
// continue sending updates to both SL and full project history
const projectHistoryType =
projectHistoryDisplay && !sendToBothHistorySystems
? 'project-history'
: undefined // for backwards compatibility, don't send anything if the project is still on track-changes
// all projects are now migrated to Full Project History, keeping the field
// for API compatibility
const projectHistoryType = 'project-history'
res.json({
lines,
version,

View file

@ -16,35 +16,9 @@ const { prepareZipAttachment } = require('../../infrastructure/Response')
const Features = require('../../infrastructure/Features')
module.exports = HistoryController = {
selectHistoryApi(req, res, next) {
const { Project_id: projectId } = req.params
// find out which type of history service this project uses
ProjectDetailsHandler.getDetails(projectId, function (err, project) {
if (err) {
return next(err)
}
const history = project.overleaf && project.overleaf.history
if (history && history.id && history.display) {
req.useProjectHistory = true
} else {
req.useProjectHistory = false
}
next()
})
},
ensureProjectHistoryEnabled(req, res, next) {
if (req.useProjectHistory) {
next()
} else {
res.sendStatus(404)
}
},
proxyToHistoryApi(req, res, next) {
const userId = SessionManager.getLoggedInUserId(req.session)
const url =
HistoryController.buildHistoryServiceUrl(req.useProjectHistory) + req.url
const url = settings.apis.project_history.url + req.url
const getReq = request({
url,
@ -65,8 +39,7 @@ module.exports = HistoryController = {
proxyToHistoryApiAndInjectUserDetails(req, res, next) {
const userId = SessionManager.getLoggedInUserId(req.session)
const url =
HistoryController.buildHistoryServiceUrl(req.useProjectHistory) + req.url
const url = settings.apis.project_history.url + req.url
HistoryController._makeRequest(
{
url,
@ -90,16 +63,6 @@ module.exports = HistoryController = {
)
},
buildHistoryServiceUrl(useProjectHistory) {
// choose a history service, either document-level (trackchanges)
// or project-level (project_history)
if (useProjectHistory) {
return settings.apis.project_history.url
} else {
return settings.apis.trackchanges.url
}
},
resyncProjectHistory(req, res, next) {
// increase timeout to 6 minutes
res.setTimeout(6 * 60 * 1000)

View file

@ -5,14 +5,6 @@ const OError = require('@overleaf/o-error')
const UserGetter = require('../User/UserGetter')
async function initializeProject(projectId) {
if (
!(
settings.apis.project_history &&
settings.apis.project_history.initializeHistoryForNewProjects
)
) {
return null
}
const response = await fetch(`${settings.apis.project_history.url}/project`, {
method: 'POST',
headers: {

View file

@ -1064,10 +1064,6 @@ const ProjectController = {
editorThemes: THEME_LIST,
legacyEditorThemes: LEGACY_THEME_LIST,
maxDocLength: Settings.max_doc_length,
useV2History:
project.overleaf &&
project.overleaf.history &&
Boolean(project.overleaf.history.display),
brandVariation,
allowedImageNames,
gitBridgePublicBaseUrl: Settings.gitBridgePublicBaseUrl,

View file

@ -169,15 +169,10 @@ async function _createBlankProject(
}
}
// only display full project history when the project has the overleaf history id attribute
// (to allow scripted creation of projects without full project history)
const historyId = project.overleaf.history.id
if (
Settings.apis.project_history.displayHistoryForNewProjects &&
historyId != null
) {
project.overleaf.history.display = true
}
// All the projects are initialised with Full Project History. This property
// is still set for backwards compatibility: Server Pro requires all projects
// have it set to `true` since SP 4.0
project.overleaf.history.display = true
if (Settings.currentImageName) {
// avoid clobbering any imageName already set in attributes (e.g. importedImageName)

View file

@ -35,73 +35,6 @@ const ProjectHistoryHandler = {
})
},
unsetHistory(projectId, callback) {
Project.updateOne(
{ _id: projectId },
{ $unset: { 'overleaf.history': true } },
callback
)
},
upgradeHistory(projectId, allowDowngrade, callback) {
// project must have an overleaf.history.id before allowing display of new history
Project.updateOne(
{ _id: projectId, 'overleaf.history.id': { $exists: true } },
{
'overleaf.history.display': true,
'overleaf.history.upgradedAt': new Date(),
'overleaf.history.allowDowngrade': allowDowngrade,
},
function (err, result) {
if (err) {
return callback(err)
}
// return an error if overleaf.history.id wasn't present
if (result.matchedCount === 0) {
return callback(new Error('history not upgraded'))
}
callback()
}
)
},
downgradeHistory(projectId, callback) {
Project.updateOne(
{ _id: projectId, 'overleaf.history.upgradedAt': { $exists: true } },
{
'overleaf.history.display': false,
$unset: { 'overleaf.history.upgradedAt': 1 },
},
function (err, result) {
if (err) {
return callback(err)
}
if (result.matchedCount === 0) {
return callback(new Error('history not downgraded'))
}
callback()
}
)
},
setMigrationArchiveFlag(projectId, callback) {
Project.updateOne(
{ _id: projectId, version: { $exists: true } },
{
'overleaf.history.zipFileArchivedInProject': true,
},
function (err, result) {
if (err) {
return callback(err)
}
if (result.matchedCount === 0) {
return callback(new Error('migration flag not set'))
}
callback()
}
)
},
ensureHistoryExistsForProject(projectId, callback) {
// We can only set a history id for a project that doesn't have one. The
// history id is cached in the project history service, and changing an

View file

@ -711,34 +711,29 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
'/project/:Project_id/updates',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
HistoryController.selectHistoryApi,
HistoryController.proxyToHistoryApiAndInjectUserDetails
)
webRouter.get(
'/project/:Project_id/doc/:doc_id/diff',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
HistoryController.selectHistoryApi,
HistoryController.proxyToHistoryApi
)
webRouter.get(
'/project/:Project_id/diff',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
HistoryController.selectHistoryApi,
HistoryController.proxyToHistoryApiAndInjectUserDetails
)
webRouter.get(
'/project/:Project_id/filetree/diff',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
HistoryController.selectHistoryApi,
HistoryController.proxyToHistoryApi
)
webRouter.post(
'/project/:Project_id/doc/:doc_id/version/:version_id/restore',
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
HistoryController.selectHistoryApi,
HistoryController.proxyToHistoryApi
)
webRouter.post(
@ -768,22 +763,16 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
'/project/:Project_id/labels',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
HistoryController.selectHistoryApi,
HistoryController.ensureProjectHistoryEnabled,
HistoryController.getLabels
)
webRouter.post(
'/project/:Project_id/labels',
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
HistoryController.selectHistoryApi,
HistoryController.ensureProjectHistoryEnabled,
HistoryController.createLabel
)
webRouter.delete(
'/project/:Project_id/labels/:label_id',
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
HistoryController.selectHistoryApi,
HistoryController.ensureProjectHistoryEnabled,
HistoryController.deleteLabel
)

View file

@ -1,4 +1,3 @@
meta(name="ol-useV2History" data-type="boolean" content=useV2History)
meta(name="ol-project_id" content=project_id)
meta(name="ol-projectName" content=projectName)
meta(name="ol-userSettings" data-type="json" content=userSettings)

View file

@ -186,9 +186,6 @@ module.exports = {
url: `http://${process.env.SPELLING_HOST || 'localhost'}:3005`,
host: process.env.SPELLING_HOST,
},
trackchanges: {
url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`,
},
docstore: {
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
pubUrl: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
@ -207,8 +204,6 @@ module.exports = {
},
project_history: {
sendProjectStructureOps: true,
initializeHistoryForNewProjects: true,
displayHistoryForNewProjects: true,
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`,
},
realTime: {
@ -810,12 +805,7 @@ module.exports = {
oauth2Server: [],
},
moduleImportSequence: [
'launchpad',
'server-ce-scripts',
'user-activate',
'history-migration',
],
moduleImportSequence: ['launchpad', 'server-ce-scripts', 'user-activate'],
csp: {
enabled: process.env.CSP_ENABLED === 'true',

View file

@ -22,7 +22,6 @@ import LoadingManager from './ide/LoadingManager'
import ConnectionManager from './ide/connection/ConnectionManager'
import EditorManager from './ide/editor/EditorManager'
import OnlineUsersManager from './ide/online-users/OnlineUsersManager'
import HistoryManager from './ide/history/HistoryManager'
import HistoryV2Manager from './ide/history/HistoryV2Manager'
import PermissionsManager from './ide/permissions/PermissionsManager'
import BinaryFilesManager from './ide/binary-files/BinaryFilesManager'
@ -213,11 +212,7 @@ App.controller(
eventTracking
)
ide.onlineUsersManager = new OnlineUsersManager(ide, $scope)
if (window.data.useV2History) {
ide.historyManager = new HistoryV2Manager(ide, $scope, localStorage)
} else {
ide.historyManager = new HistoryManager(ide, $scope)
}
ide.historyManager = new HistoryV2Manager(ide, $scope, localStorage)
ide.permissionsManager = new PermissionsManager(ide, $scope)
ide.binaryFilesManager = new BinaryFilesManager(ide, $scope)
ide.metadataManager = new MetadataManager(ide, $scope, metadata)

View file

@ -1,394 +0,0 @@
const { ObjectId } = require('mongodb')
const {
db,
READ_PREFERENCE_SECONDARY,
} = require('../../../../app/src/infrastructure/mongodb')
const Settings = require('@overleaf/settings')
const ProjectHistoryHandler = require('../../../../app/src/Features/Project/ProjectHistoryHandler')
const HistoryManager = require('../../../../app/src/Features/History/HistoryManager')
const ProjectHistoryController = require('./ProjectHistoryController')
const ProjectEntityHandler = require('../../../../app/src/Features/Project/ProjectEntityHandler')
const ProjectEntityUpdateHandler = require('../../../../app/src/Features/Project/ProjectEntityUpdateHandler')
const DocumentUpdaterHandler = require('../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler')
// Timestamp of when 'Enable history for SL in background' release
const ID_WHEN_FULL_PROJECT_HISTORY_ENABLED =
Settings.apis.project_history?.idWhenFullProjectHistoryEnabled // was '5a8d8a370000000000000000'
const DATETIME_WHEN_FULL_PROJECT_HISTORY_ENABLED =
ID_WHEN_FULL_PROJECT_HISTORY_ENABLED
? new ObjectId(ID_WHEN_FULL_PROJECT_HISTORY_ENABLED).getTimestamp()
: null
async function countProjects(query = {}) {
const count = await db.projects.countDocuments(query)
return count
}
async function countDocHistory(query = {}) {
const count = await db.docHistory.countDocuments(query)
return count
}
async function findProjects(query = {}, projection = {}) {
const projects = await db.projects.find(query).project(projection).toArray()
return projects
}
async function determineProjectHistoryType(project) {
if (project.overleaf && project.overleaf.history) {
if (project.overleaf.history.upgradeFailed) {
return 'UpgradeFailed'
}
if (project.overleaf.history.conversionFailed) {
return 'ConversionFailed'
}
}
if (
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
) {
if (project.overleaf.history.display) {
// v2: full project history, do nothing
return 'V2'
} else {
if (projectCreatedAfterFullProjectHistoryEnabled(project)) {
// IF project initialised after full project history enabled for all projects
// THEN project history should contain all information we need, without intervention
return 'V1WithoutConversion'
} else {
// ELSE SL history may predate full project history
// THEN delete full project history and convert their SL history to full project history
// --
// TODO: how to verify this, can get rough start date of SL history, but not full project history
const preserveHistory = await shouldPreserveHistory(project)
const anyDocHistory = await anyDocHistoryExists(project)
const anyDocHistoryIndex = await anyDocHistoryIndexExists(project)
if (preserveHistory) {
if (anyDocHistory || anyDocHistoryIndex) {
// if SL history exists that we need to preserve, then we must convert
return 'V1WithConversion'
} else {
// otherwise just upgrade without conversion
return 'V1WithoutConversion'
}
} else {
// if preserveHistory false, then max 7 days of SL history
// but v1 already record to both histories, so safe to upgrade
return 'V1WithoutConversion'
}
}
}
} else {
const preserveHistory = await shouldPreserveHistory(project)
const anyDocHistory = await anyDocHistoryExists(project)
const anyDocHistoryIndex = await anyDocHistoryIndexExists(project)
if (anyDocHistory || anyDocHistoryIndex) {
// IF there is SL history ->
if (preserveHistory) {
// that needs to be preserved:
// THEN initialise full project history and convert SL history to full project history
return 'NoneWithConversion'
} else {
return 'NoneWithTemporaryHistory'
}
} else {
// ELSE there is not any SL history ->
// THEN initialise full project history and sync with current content
return 'NoneWithoutConversion'
}
}
}
async function upgradeProject(project, options) {
const historyType = await determineProjectHistoryType(project)
if (historyType === 'V2') {
return { historyType, upgraded: true }
}
const upgradeFn = getUpgradeFunctionForType(historyType)
if (!upgradeFn) {
return { error: 'unsupported history type' }
}
if (options.forceClean) {
try {
const projectId = project._id
// delete any existing history stored in the mongo backend
await HistoryManager.promises.deleteProject(projectId, projectId)
// unset overleaf.history.id to prevent the migration script from failing on checks
await db.projects.updateOne(
{ _id: projectId },
{ $unset: { 'overleaf.history.id': '' } }
)
} catch (err) {
// failed to delete existing history, but we can try to continue
}
}
const result = await upgradeFn(project, options)
result.historyType = historyType
return result
}
// Do upgrades/conversion:
function getUpgradeFunctionForType(historyType) {
return UpgradeFunctionMapping[historyType]
}
const UpgradeFunctionMapping = {
NoneWithoutConversion: doUpgradeForNoneWithoutConversion,
UpgradeFailed: doUpgradeForNoneWithoutConversion,
ConversionFailed: doUpgradeForNoneWithConversion,
V1WithoutConversion: doUpgradeForV1WithoutConversion,
V1WithConversion: doUpgradeForV1WithConversion,
NoneWithConversion: doUpgradeForNoneWithConversion,
NoneWithTemporaryHistory: doUpgradeForNoneWithConversion,
}
async function doUpgradeForV1WithoutConversion(project) {
await db.projects.updateOne(
{ _id: project._id },
{
$set: {
'overleaf.history.display': true,
'overleaf.history.upgradedAt': new Date(),
'overleaf.history.upgradeReason': `v1-without-sl-history`,
},
}
)
return { upgraded: true }
}
async function doUpgradeForV1WithConversion(project) {
const result = {}
const projectId = project._id
// migrateProjectHistory expects project id as a string
const projectIdString = project._id.toString()
try {
// We treat these essentially as None projects, the V1 history is irrelevant,
// so we will delete it, and do a conversion as if we're a None project
await ProjectHistoryController.deleteProjectHistory(projectIdString)
await ProjectHistoryController.migrateProjectHistory(projectIdString)
} catch (err) {
// if migrateProjectHistory fails, it cleans up by deleting
// the history and unsetting the history id
// therefore a failed project will still look like a 'None with conversion' project
result.error = err
await db.projects.updateOne(
{ _id: projectId },
{
$set: {
'overleaf.history.conversionFailed': true,
},
}
)
return result
}
await db.projects.updateOne(
{ _id: projectId },
{
$set: {
'overleaf.history.upgradeReason': `v1-with-conversion`,
},
$unset: {
'overleaf.history.upgradeFailed': true,
'overleaf.history.conversionFailed': true,
},
}
)
result.upgraded = true
return result
}
async function doUpgradeForNoneWithoutConversion(project) {
const result = {}
const projectId = project._id
try {
// Logic originally from ProjectHistoryHandler.ensureHistoryExistsForProject
// However sends a force resync project to project history instead
// of a resync request to doc-updater
let historyId = await ProjectHistoryHandler.promises.getHistoryId(projectId)
if (historyId == null) {
historyId = await HistoryManager.promises.initializeProject(projectId)
if (historyId != null) {
await ProjectHistoryHandler.promises.setHistoryId(projectId, historyId)
}
}
// tell document updater to clear the docs, they will be reloaded with any new history id
await DocumentUpdaterHandler.promises.flushProjectToMongoAndDelete(
projectId
)
// now resync the project
await HistoryManager.promises.resyncProject(projectId, {
force: true,
origin: { kind: 'history-migration' },
})
await HistoryManager.promises.flushProject(projectId)
} catch (err) {
result.error = err
await db.projects.updateOne(
{ _id: project._id },
{
$set: {
'overleaf.history.upgradeFailed': true,
},
}
)
return result
}
await db.projects.updateOne(
{ _id: project._id },
{
$set: {
'overleaf.history.display': true,
'overleaf.history.upgradedAt': new Date(),
'overleaf.history.upgradeReason': `none-without-conversion`,
},
}
)
result.upgraded = true
return result
}
async function doUpgradeForNoneWithConversion(project, options = {}) {
const result = {}
const projectId = project._id
// migrateProjectHistory expects project id as a string
const projectIdString = project._id.toString()
try {
if (options.convertLargeDocsToFile) {
result.convertedDocCount = await convertLargeDocsToFile(
projectId,
options.userId
)
}
await ProjectHistoryController.migrateProjectHistory(
projectIdString,
options.migrationOptions
)
} catch (err) {
// if migrateProjectHistory fails, it cleans up by deleting
// the history and unsetting the history id
// therefore a failed project will still look like a 'None with conversion' project
result.error = err
// We set a failed flag so future runs of the script don't automatically retry
await db.projects.updateOne(
{ _id: projectId },
{
$set: {
'overleaf.history.conversionFailed': true,
},
}
)
return result
}
await db.projects.updateOne(
{ _id: projectId },
{
$set: {
'overleaf.history.upgradeReason':
`none-with-conversion` + options.reason ? `/${options.reason}` : ``,
},
$unset: {
'overleaf.history.upgradeFailed': true,
'overleaf.history.conversionFailed': true,
},
}
)
result.upgraded = true
return result
}
// Util
function projectCreatedAfterFullProjectHistoryEnabled(project) {
if (DATETIME_WHEN_FULL_PROJECT_HISTORY_ENABLED == null) {
return false
} else {
return (
project._id.getTimestamp() >= DATETIME_WHEN_FULL_PROJECT_HISTORY_ENABLED
)
}
}
async function shouldPreserveHistory(project) {
return await db.projectHistoryMetaData.findOne(
{
$and: [
{ project_id: { $eq: project._id } },
{ preserveHistory: { $eq: true } },
],
},
{ readPreference: READ_PREFERENCE_SECONDARY }
)
}
async function anyDocHistoryExists(project) {
return await db.docHistory.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function anyDocHistoryIndexExists(project) {
return await db.docHistoryIndex.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function convertLargeDocsToFile(projectId, userId) {
const docs = await ProjectEntityHandler.promises.getAllDocs(projectId)
let convertedDocCount = 0
for (const doc of Object.values(docs)) {
const sizeBound = JSON.stringify(doc.lines)
if (docIsTooLarge(sizeBound, doc.lines, Settings.max_doc_length)) {
await ProjectEntityUpdateHandler.promises.convertDocToFile(
projectId,
doc._id,
userId,
null
)
convertedDocCount++
}
}
return convertedDocCount
}
// check whether the total size of the document in characters exceeds the
// maxDocLength.
//
// Copied from document-updater:
// https://github.com/overleaf/internal/blob/74adfbebda5f3c2c37d9937f0db5c4106ecde492/services/document-updater/app/js/Limits.js#L18
function docIsTooLarge(estimatedSize, lines, maxDocLength) {
if (estimatedSize <= maxDocLength) {
return false // definitely under the limit, no need to calculate the total size
}
// calculate the total size, bailing out early if the size limit is reached
let size = 0
for (const line of lines) {
size += line.length + 1 // include the newline
if (size > maxDocLength) return true
}
// since we didn't hit the limit in the loop, the document is within the allowed length
return false
}
module.exports = {
countProjects,
countDocHistory,
findProjects,
determineProjectHistoryType,
getUpgradeFunctionForType,
upgradeProject,
convertLargeDocsToFile,
anyDocHistoryExists,
anyDocHistoryIndexExists,
doUpgradeForNoneWithConversion,
}

View file

@ -1 +0,0 @@
module.exports = {}

View file

@ -1,346 +0,0 @@
const sinon = require('sinon')
const nock = require('nock')
const { expect } = require('chai')
const fs = require('fs')
const path = require('path')
const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongodb')
const unzipper = require('unzipper')
const modulePath = '../../../app/src/ProjectHistoryController'
describe('ProjectHistoryController', function () {
const projectId = ObjectId('611bd20c5d76a3c1bd0c7c13')
const deletedFileId = ObjectId('60f6e92c6c14d84fb7a71ae1')
const historyId = 123
let clock
const now = new Date(Date.UTC(2021, 1, 1, 0, 0)).getTime()
before(async function () {
clock = sinon.useFakeTimers({
now,
shouldAdvanceTime: true,
})
})
after(function () {
// clock.runAll()
clock.restore()
})
beforeEach(function () {
this.db = {
users: {
countDocuments: sinon.stub().yields(),
},
}
this.project = {
_id: ObjectId('611bd20c5d76a3c1bd0c7c13'),
name: 'My Test Project',
rootDoc_id: ObjectId('611bd20c5d76a3c1bd0c7c15'),
rootFolder: [
{
_id: ObjectId('611bd20c5d76a3c1bd0c7c12'),
name: 'rootFolder',
folders: [
{
_id: ObjectId('611bd242e64281c13303d6b5'),
name: 'a folder',
folders: [
{
_id: ObjectId('611bd247e64281c13303d6b7'),
name: 'a subfolder',
folders: [],
fileRefs: [],
docs: [
{
_id: ObjectId('611bd24ee64281c13303d6b9'),
name: 'a renamed file in a subfolder.tex',
},
],
},
],
fileRefs: [],
docs: [],
},
{
_id: ObjectId('611bd34ee64281c13303d6be'),
name: 'images',
folders: [],
fileRefs: [
{
_id: ObjectId('611bd2bce64281c13303d6bb'),
name: 'overleaf-white.svg',
linkedFileData: {
provider: 'url',
url: 'https://cdn.overleaf.com/img/ol-brand/overleaf-white.svg',
},
created: '2021-08-17T15:16:12.753Z',
},
],
docs: [],
},
],
fileRefs: [
{
_id: ObjectId('611bd20c5d76a3c1bd0c7c19'),
name: 'universe.jpg',
linkedFileData: null,
created: '2021-08-17T15:13:16.400Z',
},
],
docs: [
{
_id: ObjectId('611bd20c5d76a3c1bd0c7c15'),
name: 'main.tex',
},
{
_id: ObjectId('611bd20c5d76a3c1bd0c7c17'),
name: 'references.bib',
},
],
},
],
compiler: 'pdflatex',
description: '',
deletedDocs: [],
members: [],
invites: [],
owner: {
_id: ObjectId('611572e24bff88527f61dccd'),
first_name: 'Test',
last_name: 'User',
email: 'test@example.com',
privileges: 'owner',
signUpDate: '2021-08-12T19:13:38.462Z',
},
features: {},
}
this.multi = {
del: sinon.stub(),
rpush: sinon.stub(),
exec: sinon.stub().yields(null, 1),
}
const { docs, folders } = this.project.rootFolder[0]
const allDocs = [...docs]
const processFolders = folders => {
for (const folder of folders) {
for (const doc of folder.docs) {
allDocs.push(doc)
}
if (folder.folders) {
processFolders(folder.folders)
}
}
}
processFolders(folders)
allDocs.forEach(doc => {
doc.lines = [`this is the contents of ${doc.name}`]
})
// handle Doc.find().lean().cursor()
this.findDocs = sinon.stub().returns({
lean: sinon.stub().returns({
cursor: sinon.stub().returns(allDocs),
}),
})
// handle await Doc.findOne().lean() - single result, no cursor required
this.findOneDoc = sinon.stub().callsFake(id => {
const result = allDocs.find(doc => {
return doc._id.toString() === id.toString()
})
return { lean: sinon.stub().resolves(result) }
})
this.deletedFiles = [
{
_id: deletedFileId,
name: 'testing.tex',
deletedAt: new Date(),
},
]
// handle DeletedFile.find().lean().cursor()
this.findDeletedFiles = sinon.stub().returns({
lean: sinon
.stub()
.returns({ cursor: sinon.stub().returns(this.deletedFiles) }),
})
this.ProjectGetter = {
promises: {
getProject: sinon.stub().resolves(this.project),
},
}
this.FileStoreHandler = {
_buildUrl: (projectId, fileId) =>
`http://filestore.test/${projectId}/${fileId}`,
}
this.ProjectHistoryHandler = {
promises: {
setHistoryId: sinon.stub(),
upgradeHistory: sinon.stub(),
},
}
this.ProjectEntityUpdateHandler = {
promises: {
resyncProjectHistory: sinon.stub(),
},
}
this.DocumentUpdaterHandler = {
promises: {
flushProjectToMongoAndDelete: sinon.stub(),
},
}
this.HistoryManager = {
promises: {
resyncProject: sinon.stub(),
flushProject: sinon.stub(),
initializeProject: sinon.stub().resolves(historyId),
},
}
this.settings = {
redis: {
project_history_migration: {
key_schema: {
projectHistoryOps({ projectId }) {
return `ProjectHistory:Ops:{${projectId}}` // NOTE: the extra braces are intentional
},
},
},
},
apis: {
documentupdater: {
url: 'http://document-updater',
},
trackchanges: {
url: 'http://track-changes',
},
project_history: {
url: 'http://project-history',
},
},
path: {
projectHistories: 'data/projectHistories',
},
}
this.ProjectHistoryController = SandboxedModule.require(modulePath, {
requires: {
'../../../../app/src/Features/Project/ProjectGetter':
this.ProjectGetter,
'../../../../app/src/Features/FileStore/FileStoreHandler':
this.FileStoreHandler,
'../../../../app/src/Features/Project/ProjectHistoryHandler':
this.ProjectHistoryHandler,
'../../../../app/src/Features/Project/ProjectUpdateHandler':
this.ProjectUpdateHandler,
'../../../../app/src/Features/Project/ProjectEntityUpdateHandler':
this.ProjectEntityUpdateHandler,
'../../../../app/src/Features/History/HistoryManager':
this.HistoryManager,
'../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler':
this.DocumentUpdaterHandler,
'../../../../app/src/models/Doc': {
Doc: {
find: this.findDocs,
findOne: this.findOneDoc,
},
},
'../../../../app/src/models/DeletedFile': {
DeletedFile: {
find: this.findDeletedFiles,
},
},
'../../../../app/src/infrastructure/mongodb': {
db: this.db,
},
'../../../../app/src/infrastructure/Mongoose': {
Schema: {
ObjectId: sinon.stub(),
Types: {
Mixed: sinon.stub(),
},
},
},
'../../../../app/src/infrastructure/RedisWrapper': {
client: () => ({
multi: () => this.multi,
llen: sinon.stub().resolves(0),
}),
},
unzipper: {
Open: {
file: () =>
unzipper.Open.file(
path.join(__dirname, 'data/track-changes-project.zip')
),
},
},
'@overleaf/settings': this.settings,
},
})
})
afterEach(function () {
nock.cleanAll()
})
it('migrates a project history', async function () {
const readStream = fs.createReadStream(
path.join(__dirname, 'data/track-changes-project.zip')
)
nock(this.settings.apis.trackchanges.url)
.get(`/project/${projectId}/zip`)
.reply(200, readStream)
nock(this.settings.apis.project_history.url)
.post(`/project`)
.reply(200, { project: { id: historyId } })
await this.ProjectHistoryController.migrateProjectHistory(
projectId.toString(),
5
)
expect(this.multi.exec).to.have.been.calledOnce
expect(this.ProjectHistoryHandler.promises.setHistoryId).to.have.been
.calledOnce
// expect(this.ProjectEntityUpdateHandler.promises.resyncProjectHistory).to
// .have.been.calledOnce
expect(this.HistoryManager.promises.flushProject).to.have.been.calledTwice
expect(this.multi.rpush).to.have.callCount(12)
const args = this.multi.rpush.args
const snapshotPath = path.join(
__dirname,
'data/migrate-project-history.snapshot.json'
)
// const snapshot = JSON.stringify(args, null, 2)
// await fs.promises.writeFile(snapshotPath, snapshot)
const json = await fs.promises.readFile(snapshotPath, 'utf-8')
const expected = JSON.parse(json)
expect(args).to.deep.equal(expected)
})
})

View file

@ -1,50 +0,0 @@
[
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"file\":\"60f6e92c6c14d84fb7a71ae1\",\"pathname\":\"/_deleted/60f6e92c6c14d84fb7a71ae1/testing.tex\",\"meta\":{\"user_id\":null,\"ts\":\"2021-07-20T15:18:04.000Z\",\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123,\"url\":\"http://filestore.test/611bd20c5d76a3c1bd0c7c13/60f6e92c6c14d84fb7a71ae1\"}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"file\":\"60f6e92c6c14d84fb7a71ae1\",\"pathname\":\"/_deleted/60f6e92c6c14d84fb7a71ae1/testing.tex\",\"new_pathname\":\"\",\"meta\":{\"user_id\":null,\"ts\":\"2021-02-01T00:00:00.000Z\",\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd20c5d76a3c1bd0c7c15\",\"pathname\":\"/main.tex\",\"meta\":{\"user_id\":null,\"ts\":\"2021-08-17T15:13:16.000Z\",\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123,\"docLines\":\"\\\\documentclass{article}\\n\\\\usepackage[utf8]{inputenc}\\n\\n\\\\title{My Test Project}\\n\\\\author{alf.eaton+dev }\\n\\\\date{7 2021}\\n\\n\\\\usepackage{natbib}\\n\\\\usepackage{graphicx}\\n\\n\\\\begin{document}\\n\\n\\\\maketitle\\n\\n\\\\section{Introduction}\\nThere is a theory which states that if ever anyone discovers exactly what the Universe is for and why it is here, it will instantly disappear and be replaced by something even more bizarre and inexplicable.\\nThere is another theory which states that this has already happened.\\n\\n\\\\begin{figure}[h!]\\n\\\\centering\\n\\\\includegraphics[scale=1.7]{universe}\\n\\\\caption{The Universe}\\n\\\\label{fig:universe}\\n\\\\end{figure}\\n\\n\\\\section{Conclusion}\\n``I always thought something was fundamentally wrong with the universe'' \\\\citep{adams1995hitchhiker}\\n\\n\\\\bibliographystyle{plain}\\n\\\\bibliography{references}\\n\\\\end{document}\\n\"}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd20c5d76a3c1bd0c7c17\",\"pathname\":\"/references.bib\",\"meta\":{\"user_id\":null,\"ts\":\"2021-08-17T15:13:16.000Z\",\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123,\"docLines\":\"this is the contents of references.bib\"}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"file\":\"611bd20c5d76a3c1bd0c7c19\",\"pathname\":\"/universe.jpg\",\"meta\":{\"user_id\":null,\"ts\":\"2021-08-17T15:13:16.000Z\",\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123,\"url\":\"http://filestore.test/611bd20c5d76a3c1bd0c7c13/611bd20c5d76a3c1bd0c7c19\"}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd20c5d76a3c1bd0c7c15\",\"op\":[{\"p\":487,\"i\":\"\\n\\nAdding some text here.\"}],\"v\":1,\"lastV\":0,\"meta\":{\"user_id\":\"611572e24bff88527f61dccd\",\"ts\":1629213228148,\"pathname\":\"/main.tex\",\"doc_length\":805,\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd20c5d76a3c1bd0c7c15\",\"op\":[{\"p\":678,\"d\":\" something\"}],\"v\":2,\"lastV\":1,\"meta\":{\"user_id\":\"611572e24bff88527f61dccd\",\"ts\":1629213235181,\"pathname\":\"/main.tex\",\"doc_length\":829,\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd20c5d76a3c1bd0c7c15\",\"op\":[{\"d\":\" \",\"p\":722},{\"i\":\"\\n\",\"p\":722}],\"v\":3,\"lastV\":2,\"meta\":{\"user_id\":\"611572e24bff88527f61dccd\",\"ts\":1629213239472,\"pathname\":\"/main.tex\",\"doc_length\":819,\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd20c5d76a3c1bd0c7c15\",\"op\":[{\"p\":750,\"i\":\"\\n\\nAdding some text after deleting some text.\"}],\"v\":7,\"lastV\":6,\"meta\":{\"user_id\":\"611572e24bff88527f61dccd\",\"ts\":1629213241498,\"pathname\":\"/main.tex\",\"doc_length\":819,\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd24ee64281c13303d6b9\",\"pathname\":\"/a folder/a subfolder/a renamed file in a subfolder.tex\",\"meta\":{\"user_id\":null,\"ts\":\"2021-08-17T15:14:22.000Z\",\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123,\"docLines\":\"\"}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"doc\":\"611bd24ee64281c13303d6b9\",\"op\":[{\"p\":0,\"i\":\"Adding some content to the file in the subfolder.\"}],\"v\":2,\"lastV\":1,\"meta\":{\"user_id\":\"611572e24bff88527f61dccd\",\"ts\":1629213266076,\"pathname\":\"/a folder/a subfolder/a renamed file in a subfolder.tex\",\"doc_length\":0,\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123}"
],
[
"ProjectHistory:Ops:{611bd20c5d76a3c1bd0c7c13}",
"{\"file\":\"611bd2bce64281c13303d6bb\",\"pathname\":\"/images/overleaf-white.svg\",\"meta\":{\"user_id\":null,\"ts\":\"2021-08-17T15:16:12.000Z\",\"origin\":{\"kind\":\"history-migration\"}},\"projectHistoryId\":123,\"url\":\"http://filestore.test/611bd20c5d76a3c1bd0c7c13/611bd2bce64281c13303d6bb\"}"
]
]

View file

@ -243,7 +243,6 @@
"scroll-into-view-if-needed": "^2.2.25",
"tsscmp": "^1.0.6",
"underscore": "^1.13.1",
"unzipper": "^0.10.11",
"utf-8-validate": "^5.0.2",
"uuid": "^3.0.1",
"valid-data-url": "^2.0.0",

View file

@ -147,7 +147,6 @@ async function deleteDocFromRedis(projectId, docId) {
`UnflushedTime:{${docId}}`,
`Pathname:{${docId}}`,
`ProjectHistoryId:{${docId}}`,
`ProjectHistoryType:{${docId}}`,
`PendingUpdates:{${docId}}`,
`lastUpdatedAt:{${docId}}`,
`lastUpdatedBy:{${docId}}`

View file

@ -1,60 +0,0 @@
const { waitForDb, db } = require('../../app/src/infrastructure/mongodb')
async function main() {
await checkAllProjectsAreMigrated()
await setAllowDowngradeToFalse()
await deleteHistoryCollections()
console.log('Legacy history data cleaned up successfully')
process.exit(0)
}
async function checkAllProjectsAreMigrated() {
console.log('checking all projects are migrated to Full Project History')
const count = await db.projects.countDocuments({
'overleaf.history.display': { $ne: true },
})
if (count === 0) {
console.log('All projects are migrated to Full Project History')
} else {
console.error(
`There are ${count} projects that are not migrated to Full Project History` +
` please complete the migration before running this script again.`
)
process.exit(1)
}
}
async function setAllowDowngradeToFalse() {
console.log('unsetting `allowDowngrade` flag in all projects')
await db.projects.updateMany(
{
'overleaf.history.id': { $exists: true },
'overleaf.history.allowDowngrade': true,
},
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
console.log('unsetting `allowDowngrade` flag in all projects - Done')
}
async function deleteHistoryCollections() {
console.log('removing `docHistory` data')
await db.docHistory.deleteMany({})
console.log('removing `docHistory` data - Done')
console.log('removing `docHistoryIndex` data')
await db.docHistoryIndex.deleteMany({})
console.log('removing `docHistoryIndex` data - Done')
console.log('removing `projectHistoryMetaData` data')
await db.projectHistoryMetaData.deleteMany({})
console.log('removing `projectHistoryMetaData` data - Done')
}
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View file

@ -1,84 +0,0 @@
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const VERBOSE_PROJECT_NAMES = process.env.VERBOSE_PROJECT_NAMES === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 50
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 500
const USE_QUERY_HINT = process.env.USE_QUERY_HINT !== 'false'
// persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE
// raise mongo timeout to 1hr if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const { promiseMapWithLimit } = require('../../app/src/util/promises')
const { batchedUpdate } = require('../helpers/batchedUpdate')
const {
determineProjectHistoryType,
countProjects,
} = require('../../modules/history-migration/app/src/HistoryUpgradeHelper')
const COUNT = {
V2: 0,
V1WithoutConversion: 0,
V1WithConversion: 0,
NoneWithoutConversion: 0,
NoneWithConversion: 0,
NoneWithTemporaryHistory: 0,
UpgradeFailed: 0,
ConversionFailed: 0,
MigratedProjects: 0,
TotalProjects: 0,
}
async function processBatch(projects) {
await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject)
console.log(COUNT)
}
async function processProject(project) {
const historyType = await determineProjectHistoryType(project)
if (VERBOSE_LOGGING) {
console.log(
`project ${
project[VERBOSE_PROJECT_NAMES ? 'name' : '_id']
} is type ${historyType}`
)
}
COUNT[historyType] += 1
}
async function main() {
const projection = {
_id: 1,
overleaf: 1,
}
const options = {}
if (USE_QUERY_HINT) {
options.hint = { _id: 1 }
}
if (VERBOSE_PROJECT_NAMES) {
projection.name = 1
}
await batchedUpdate(
'projects',
{ 'overleaf.history.display': { $ne: true } },
processBatch,
projection,
options
)
COUNT.MigratedProjects = await countProjects({
'overleaf.history.display': true,
})
COUNT.TotalProjects = await countProjects()
console.log('Final')
console.log(COUNT)
}
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -1,328 +0,0 @@
// Script to debug the track-changes history of the documents in a project.
// Usage:
// node debug_history.js --project-id=<project_id>
// node debug_history.js --all # to check all unmigrated projects
//
// Example output:
// $ node scripts/debug_history.js --project-id=63ff3adc06177192f18a6b38
// Using default settings from /overleaf/services/track-changes/config/settings.defaults.js
// Set UV_THREADPOOL_SIZE=16
// project 63ff3adc06177192f18a6b38 docId 63ff3adc06177192f18a6b3d OK
// project 63ff3adc06177192f18a6b38 docId 63ff3b08de41e3b0989c1720 FAILED
// {"action":"rewinding","version":7,"meta":{"start_ts":1677671465447,"end_ts":1677671465447,"user_id":"632ae106f9a6dd002505765b"},
// "ops":[{"action":"rewindOp","contentLength":24,"op":{"p":32,"d":6},"errors":[{"message":"invalid offset rewinding delete,
// truncating to content length","op":{"p":32,"d":6},"contentLength":24}]}],"status":"failed"}
/* eslint-disable camelcase */
const TrackChangesMongoDb = require('../../../track-changes/app/js/mongodb')
const { waitForDb } = require('../../app/src/infrastructure/mongodb')
const {
findProjects,
} = require('../../modules/history-migration/app/src/HistoryUpgradeHelper')
const PackManager = require('../../../track-changes/app/js/PackManager')
const {
packsAreDuplicated,
} = require('../../../track-changes/app/js/util/PackUtils')
const {
ConsistencyError,
} = require('../../../track-changes/app/js/DiffGenerator')
const DocumentUpdaterManager = require('../../../track-changes/app/js/DocumentUpdaterManager')
const DocstoreManager = require('../../../track-changes/app/js/DocstoreManager')
const Errors = require('../../../track-changes/app/js/Errors')
const minimist = require('minimist')
const util = require('util')
const logger = require('@overleaf/logger')
logger.initialize('debug-history')
// disable logging to stdout from internal modules
logger.logger.streams = []
const options = {
boolean: ['all', 'verbose', 'raw', 'help'],
string: ['project-id'],
alias: {
'project-id': 'p',
verbose: 'v',
raw: 'r',
help: 'h',
all: 'a',
},
default: {},
}
const argv = minimist(process.argv.slice(2), options)
function usage() {
console.log(
`Usage: ${process.argv[1]} [--project-id=<project_id> | --all] [--verbose] [--raw]`
)
process.exit(1)
}
// look in docstore or docupdater for the latest version of the document
async function getLatestContent(projectId, docId, lastUpdateVersion) {
const [docstoreContent, docstoreVersion] =
await DocstoreManager.promises.peekDocument(projectId, docId)
// if docstore is out of date, check for a newer version in docupdater
// and return that instead
if (docstoreVersion <= lastUpdateVersion) {
const [docupdaterContent, docupdaterVersion] =
await DocumentUpdaterManager.promises.peekDocument(projectId, docId)
if (docupdaterVersion > docstoreVersion) {
return [docupdaterContent, docupdaterVersion]
}
}
return [docstoreContent, docstoreVersion]
}
// This class is used to write a record of all the operations that have been applied to a document
class LogAppliedOps {
constructor() {
this.result = []
}
// used to log the initial state of the document
start(action, latestContent, version) {
this.result.push({
action,
latestContentLength: latestContent.length,
latestContent: argv.raw ? latestContent : undefined,
version,
})
}
// used to log a new document update
update(action, update) {
this._finalize()
this.opResults = []
this.currentResult = {
action,
version: update.v,
meta: update.meta,
ops: this.opResults,
}
this.result.push(this.currentResult)
}
// used to log an operation that has been applied to the document
op(action, content, op) {
this.currentOp = {
action,
contentLength: content.length,
content: argv.raw ? content : undefined,
op: this._filterOp(op),
}
this.opResults.push(this.currentOp)
}
// used to log an error that occurred while applying an operation
opError(message, content, op, err) {
this.currentOp.errors = this.currentOp.errors || []
this.currentOp.errors.push({
message,
op: this._filterOp(op),
contentLength: content.length,
content: argv.raw ? content : undefined,
err,
})
}
// sets the status of the current update to 'success' or 'failed'
// depending on whether any errors were logged
_finalize() {
if (!this.currentResult) {
return
}
const errors = this.opResults.some(op => op.errors)
this.currentResult.status = errors ? 'failed' : 'success'
}
// returns the final result of the log
end() {
this._finalize()
return this.result
}
// Returns a new object with the same keys as op, but with the i and d
// fields replaced by their lengths when present. This is used to filter
// out the contents of the i and d fields of an operation, to redact
// document content.
_filterOp(op) {
const newOp = {}
for (const key of Object.keys(op)) {
if (!argv.raw && (key === 'i' || key === 'd')) {
newOp[key] = op[key].length
} else {
newOp[key] = op[key]
}
}
return newOp
}
}
// This is the rewindOp function from track-changes, modified to log
// the operation and any errors.
function rewindOp(content, op, log) {
if (op.i != null) {
// ShareJS will accept an op where p > content.length when applied,
// and it applies as though p == content.length. However, the op is
// passed to us with the original p > content.length. Detect if that
// is the case with this op, and shift p back appropriately to match
// ShareJS if so.
let { p } = op
const maxP = content.length - op.i.length
if (p > maxP) {
log.opError(
'invalid offset rewinding insert, truncating to content length',
content,
op
)
p = maxP
}
const textToBeRemoved = content.slice(p, p + op.i.length)
if (op.i !== textToBeRemoved) {
log.opError(
'inserted content does not match text to be removed',
content,
op
)
throw new ConsistencyError(
`Inserted content, '${op.i}', does not match text to be removed, '${textToBeRemoved}'`
)
}
return content.slice(0, p) + content.slice(p + op.i.length)
} else if (op.d != null) {
if (op.p > content.length) {
log.opError(
'invalid offset rewinding delete, truncating to content length',
content,
op
)
}
return content.slice(0, op.p) + op.d + content.slice(op.p)
} else {
return content
}
}
// This is the rewindDoc function from track-changes, modified to log all
// operations that are applied to the document.
async function rewindDoc(projectId, docId) {
const log = new LogAppliedOps()
// Prepare to rewind content
const docIterator = await PackManager.promises.makeDocIterator(docId)
const getUpdate = util.promisify(docIterator.next).bind(docIterator)
const lastUpdate = await getUpdate()
if (!lastUpdate) {
return null
}
const lastUpdateVersion = lastUpdate.v
let latestContent
let version
try {
;[latestContent, version] = await getLatestContent(
projectId,
docId,
lastUpdateVersion
)
} catch (err) {
if (err instanceof Errors.NotFoundError) {
// Doc not found in docstore. We can't build its history
return null
} else {
throw err
}
}
log.start('load-doc', latestContent, version)
let content = latestContent
let update = lastUpdate
let previousUpdate = null
while (update) {
if (packsAreDuplicated(update, previousUpdate)) {
previousUpdate = update
update = await getUpdate()
continue
}
log.update('rewinding', update)
for (let i = update.op.length - 1; i >= 0; i--) {
const op = update.op[i]
if (op.broken === true) {
log.op('skipped', content, op)
continue
}
try {
log.op('rewindOp', content, op)
content = rewindOp(content, op, log)
} catch (e) {
if (e instanceof ConsistencyError && (i = update.op.length - 1)) {
// catch known case where the last op in an array has been
// merged into a later op
op.broken = true
log.opError('marking broken', content, op)
} else {
log.opError('failed', content, op, e)
}
}
}
previousUpdate = update
update = await getUpdate()
}
return log.end()
}
async function main() {
// Get a list of projects to migrate
let projectIds = []
if (argv.all) {
const projectsToMigrate = await findProjects(
{ 'overleaf.history.display': { $ne: true } },
{ _id: 1, overleaf: 1 }
)
projectIds = projectsToMigrate.map(p => p._id.toString())
console.log('Unmigrated projects', projectIds.length)
} else if (argv['project-id']) {
projectIds = [argv['project-id']]
} else {
usage()
process.exit(1)
}
let errorCount = 0
for (const projectId of projectIds) {
const docIds = await PackManager.promises.findAllDocsInProject(projectId)
if (!docIds.length) {
console.log('No docs found for project', projectId)
}
let projectErrorCount = 0
for (const docId of docIds) {
const result = await rewindDoc(projectId, docId)
const failed = result.filter(r => r.status === 'failed')
errorCount += failed.length
if (argv.verbose) {
console.log(JSON.stringify({ projectId, docId, result }, null, 2))
} else if (failed.length > 0) {
console.log('project', projectId, 'docId', docId, 'FAILED')
for (const f of failed) {
console.log(JSON.stringify(f))
}
projectErrorCount += failed.length
}
}
if (projectErrorCount === 0 && !argv.verbose) {
console.log('project', projectId, 'docs', docIds.length, 'OK')
}
}
process.exit(errorCount > 0 ? 1 : 0)
}
waitForDb()
.then(TrackChangesMongoDb.waitForDb)
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View file

@ -1,81 +0,0 @@
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const DRY_RUN = process.env.DRY_RUN !== 'false'
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const PROJECT_ID = process.env.PROJECT_ID
const { ObjectId } = require('mongodb')
const {
db,
waitForDb,
READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb')
const ProjectHistoryHandler = require('../../app/src/Features/Project/ProjectHistoryHandler')
console.log({
DRY_RUN,
VERBOSE_LOGGING,
PROJECT_ID,
})
let INTERRUPT = false
async function processProject(project) {
if (INTERRUPT) {
return
}
if (!shouldPreserveHistory(project)) {
console.log(
`project ${project._id} does not have preserveHistory:true, skipping`
)
return
}
if (!DRY_RUN) {
await ProjectHistoryHandler.promises.downgradeHistory(project._id)
}
if (VERBOSE_LOGGING) {
console.log(`project ${project._id} downgraded to track-changes`)
}
}
async function shouldPreserveHistory(project) {
return await db.projectHistoryMetaData.findOne(
{
$and: [
{ project_id: { $eq: project._id } },
{ preserveHistory: { $eq: true } },
],
},
{ readPreference: READ_PREFERENCE_SECONDARY }
)
}
async function main() {
if (PROJECT_ID) {
await waitForDb()
const project = await db.projects.findOne({ _id: ObjectId(PROJECT_ID) })
await processProject(project)
} else {
console.log('PROJECT_ID environment value is needed.')
process.exit(1)
}
}
// Upgrading history is not atomic, if we quit out mid-initialisation
// then history could get into a broken state
// Instead, skip any unprocessed projects and exit() at end of the batch.
process.on('SIGINT', function () {
console.log('Caught SIGINT, waiting for in process downgrades to complete')
INTERRUPT = true
})
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -1,287 +0,0 @@
// raise mongo timeout to 1hr if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const fs = require('fs')
if (fs.existsSync('/etc/container_environment.json')) {
try {
const envData = JSON.parse(
fs.readFileSync('/etc/container_environment.json', 'utf8')
)
for (const [key, value] of Object.entries(envData)) {
process.env[key] = value
}
} catch (err) {
console.error(
'cannot read /etc/container_environment.json, the script needs to be run as root',
err
)
process.exit(1)
}
}
const VERSION = '0.9.0-cli'
const {
countProjects,
countDocHistory,
upgradeProject,
findProjects,
} = require('../../modules/history-migration/app/src/HistoryUpgradeHelper')
const { waitForDb } = require('../../app/src/infrastructure/mongodb')
const minimist = require('minimist')
const util = require('util')
const pLimit = require('p-limit')
const logger = require('@overleaf/logger')
logger.initialize('history-migration')
// disable logging to stdout from internal modules
logger.logger.streams = []
const DEFAULT_OUTPUT_FILE = `history-migration-${new Date()
.toISOString()
.replace(/[:.]/g, '_')}.log`
const argv = minimist(process.argv.slice(2), {
boolean: [
'verbose',
'fix-invalid-characters',
'convert-large-docs-to-file',
'import-broken-history-as-zip',
'force-upgrade-on-failure',
'dry-run',
'use-query-hint',
'retry-failed',
'archive-on-failure',
'force-clean',
],
string: ['output', 'user-id'],
alias: {
verbose: 'v',
output: 'o',
'dry-run': 'd',
concurrency: 'j',
'use-query-hint': 'q',
'retry-failed': 'r',
'archive-on-failure': 'a',
},
default: {
output: DEFAULT_OUTPUT_FILE,
concurrency: 1,
'batch-size': 100,
'max-upgrades-to-attempt': false,
'max-failures': 50,
},
})
let INTERRUPT = false
async function findProjectsToMigrate() {
console.log('History Migration Statistics')
// Show statistics about the number of projects to migrate
const migratedProjects = await countProjects({
'overleaf.history.display': true,
})
const totalProjects = await countProjects()
console.log('Migrated Projects : ', migratedProjects)
console.log('Total Projects : ', totalProjects)
console.log('Remaining Projects : ', totalProjects - migratedProjects)
if (migratedProjects === totalProjects) {
console.log('All projects have been migrated')
process.exit(0)
}
// Get a list of projects to migrate
const projectsToMigrate = await findProjects(
{ 'overleaf.history.display': { $ne: true } },
{ _id: 1, overleaf: 1 }
)
// Show statistics for docHistory collection
const docHistoryWithoutProjectId = await countDocHistory({
project_id: { $exists: false },
})
if (docHistoryWithoutProjectId > 0) {
console.log(
`WARNING: docHistory collection contains ${docHistoryWithoutProjectId} records without project_id`
)
process.exit(1)
}
return projectsToMigrate
}
function createProgressBar() {
const startTime = new Date()
return function progressBar(current, total, msg) {
const barLength = 20
const percentage = Math.floor((current / total) * 100)
const bar = '='.repeat(percentage / (100 / barLength))
const empty = ' '.repeat(barLength - bar.length)
const elapsed = new Date() - startTime
// convert elapsed time to hours, minutes, seconds
const ss = Math.floor((elapsed / 1000) % 60)
.toString()
.padStart(2, '0')
const mm = Math.floor((elapsed / (1000 * 60)) % 60)
.toString()
.padStart(2, '0')
const hh = Math.floor(elapsed / (1000 * 60 * 60))
.toString()
.padStart(2, '0')
process.stdout.write(
`\r${hh}:${mm}:${ss} |${bar}${empty}| ${percentage}% (${current}/${total}) ${msg}`
)
}
}
async function migrateProjects(projectsToMigrate) {
let projectsMigrated = 0
let projectsFailed = 0
console.log('Starting migration...')
if (argv.concurrency > 1) {
console.log(`Using ${argv.concurrency} concurrent migrations`)
}
// send log output for each migration to a file
const output = fs.createWriteStream(argv.output, { flags: 'a' })
console.log(`Writing log output to ${process.cwd()}/${argv.output}`)
const logger = new console.Console({ stdout: output })
function logJson(obj) {
logger.log(JSON.stringify(obj))
}
// limit the number of concurrent migrations
const limit = pLimit(argv.concurrency)
const jobs = []
// throttle progress reporting to 2x per second
const progressBar = createProgressBar()
let i = 0
const N = projectsToMigrate.length
const progressBarTimer = setInterval(() => {
if (INTERRUPT) {
return // don't update the progress bar if we're shutting down
}
progressBar(
i,
N,
`Migrated: ${projectsMigrated}, Failed: ${projectsFailed}`
)
}, 500)
const options = {
migrationOptions: {
archiveOnFailure: argv['import-broken-history-as-zip'],
fixInvalidCharacters: argv['fix-invalid-characters'],
forceNewHistoryOnFailure: argv['force-upgrade-on-failure'],
},
convertLargeDocsToFile: argv['convert-large-docs-to-file'],
userId: argv['user-id'],
reason: VERSION,
forceClean: argv['force-clean'],
}
async function _migrateProject(project) {
if (INTERRUPT) {
return // don't start any new jobs if we're shutting down
}
const startTime = new Date()
try {
const result = await upgradeProject(project._id, options)
i++
if (INTERRUPT && limit.activeCount > 1) {
// an interrupt was requested while this job was running
// report that we're waiting for the remaining jobs to finish
console.log(
`Waiting for remaining ${
limit.activeCount - 1
} active jobs to finish\r`
)
}
if (result.error) {
// failed to migrate this project
logJson({
project_id: project._id,
result,
stack: result.error.stack,
startTime,
endTime: new Date(),
})
projectsFailed++
} else {
// successfully migrated this project
logJson({
project_id: project._id,
result,
startTime,
endTime: new Date(),
})
projectsMigrated++
}
} catch (err) {
// unexpected error from the migration
projectsFailed++
logJson({
project_id: project._id,
exception: util.inspect(err),
startTime,
endTime: new Date(),
})
}
}
for (const project of projectsToMigrate) {
jobs.push(limit(_migrateProject, project))
}
// wait for all the queued jobs to complete
await Promise.all(jobs)
clearInterval(progressBarTimer)
progressBar(i, N, `Migrated: ${projectsMigrated}, Failed: ${projectsFailed}`)
process.stdout.write('\n')
return { projectsMigrated, projectsFailed }
}
async function main() {
const projectsToMigrate = await findProjectsToMigrate()
if (argv['dry-run']) {
console.log('Dry run, exiting')
process.exit(0)
}
const { projectsMigrated, projectsFailed } = await migrateProjects(
projectsToMigrate
)
console.log('Projects migrated: ', projectsMigrated)
console.log('Projects failed: ', projectsFailed)
if (projectsFailed > 0) {
console.log('------------------------------------------------------')
console.log(`Log output written to ${process.cwd()}/${argv.output}`)
console.log(
'Please check the log for errors. Attach the content of the file when contacting support.'
)
console.log('------------------------------------------------------')
}
if (INTERRUPT) {
console.log('Migration interrupted, please run again to continue.')
} else if (projectsFailed === 0) {
console.log(`All projects migrated successfully.`)
}
console.log('Done.')
process.exit(projectsFailed > 0 ? 1 : 0)
}
// Upgrading history is not atomic, if we quit out mid-initialisation
// then history could get into a broken state
// Instead, skip any unprocessed projects and exit() at end of the batch.
process.on('SIGINT', function () {
console.log(
'\nCaught SIGINT, waiting for all in-progess upgrades to complete'
)
INTERRUPT = true
})
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View file

@ -1,111 +0,0 @@
const DRY_RUN = process.env.DRY_RUN !== 'false'
const PROJECT_ID = process.env.PROJECT_ID
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const VERBOSE_PROJECT_NAMES = process.env.VERBOSE_PROJECT_NAMES === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 50
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 500
// persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE
process.env.VERBOSE_LOGGING = VERBOSE_LOGGING
const { ObjectId } = require('mongodb')
const { db, waitForDb } = require('../../app/src/infrastructure/mongodb')
const { batchedUpdate } = require('../helpers/batchedUpdate')
const { promiseMapWithLimit } = require('../../app/src/util/promises')
const count = {
projects: 0,
projectsWithIncorrectRevDocs: 0,
totalIncorrectRevDocs: 0,
totalNanRevDocs: 0,
totalNullRevDocs: 0,
totalUndefinedRevDocs: 0,
convertedRevs: 0,
}
async function main() {
const projection = {
_id: 1,
}
if (VERBOSE_PROJECT_NAMES) {
projection.name = 1
}
const options = {}
if (PROJECT_ID) {
const project = await db.projects.findOne({ _id: ObjectId(PROJECT_ID) })
await processProject(project)
} else {
await batchedUpdate(
'projects',
{ 'overleaf.history.display': { $ne: true } },
processBatch,
projection,
options
)
}
console.log('Final')
}
async function processBatch(projects) {
await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject)
}
async function processProject(project) {
count.projects++
const docs = await db.docs
.find(
{
project_id: project._id,
$or: [{ rev: null }, { rev: NaN }],
},
{ _id: 1, rev: 1 }
)
.toArray()
if (!docs || docs.length <= 0) {
return
}
if (VERBOSE_LOGGING) {
console.log(
`Found ${docs.length} incorrect doc.rev for project ${
project[VERBOSE_PROJECT_NAMES ? 'name' : '_id']
}`
)
}
count.projectsWithIncorrectRevDocs++
count.totalIncorrectRevDocs += docs.length
for (const doc of docs) {
if (doc.rev === undefined) {
count.totalUndefinedRevDocs++
} else if (doc.rev === null) {
count.totalNullRevDocs++
} else if (isNaN(doc.rev)) {
count.totalNanRevDocs++
} else {
console.error(`unknown 'rev' value: ${doc.rev}`)
}
if (!DRY_RUN) {
console.log(`fixing rev of doc ${doc.id} from '${doc.rev}' to 0`)
await db.docs.updateOne({ _id: doc._id }, { $set: { rev: 0 } })
count.convertedRevs++
}
}
}
waitForDb()
.then(main)
.then(() => {
console.log(count)
process.exit(0)
})
.catch(err => {
console.log('Something went wrong!', err)
process.exit(1)
})

View file

@ -1,133 +0,0 @@
const { promisify } = require('util')
const { ObjectId } = require('mongodb')
const {
db,
waitForDb,
READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb')
const sleep = promisify(setTimeout)
const _ = require('lodash')
const NOW_IN_S = Date.now() / 1000
const ONE_WEEK_IN_S = 60 * 60 * 24 * 7
const TEN_SECONDS = 10 * 1000
function getSecondsFromObjectId(id) {
return id.getTimestamp().getTime() / 1000
}
async function main(options) {
if (!options) {
options = {}
}
_.defaults(options, {
projectId: process.env.PROJECT_ID,
dryRun: process.env.DRY_RUN !== 'false',
verboseLogging: process.env.VERBOSE_LOGGING === 'true',
firstProjectId: process.env.FIRST_PROJECT_ID
? ObjectId(process.env.FIRST_PROJECT_ID)
: ObjectId('4b3d3b3d0000000000000000'), // timestamped to 2010-01-01T00:01:01.000Z
incrementByS: parseInt(process.env.INCREMENT_BY_S, 10) || ONE_WEEK_IN_S,
batchSize: parseInt(process.env.BATCH_SIZE, 10) || 1000,
stopAtS: parseInt(process.env.STOP_AT_S, 10) || NOW_IN_S,
letUserDoubleCheckInputsFor:
parseInt(process.env.LET_USER_DOUBLE_CHECK_INPUTS_FOR, 10) || TEN_SECONDS,
})
if (options.projectId) {
await waitForDb()
const { modifiedCount } = await db.projects.updateOne(
{
_id: ObjectId(options.projectId),
'overleaf.history.allowDowngrade': true,
},
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
console.log(`modifiedCount: ${modifiedCount}`)
process.exit(0)
}
await letUserDoubleCheckInputs(options)
await waitForDb()
let startId = options.firstProjectId
let totalProcessed = 0
while (getSecondsFromObjectId(startId) <= options.stopAtS) {
let batchProcessed = 0
const end = getSecondsFromObjectId(startId) + options.incrementByS
let endId = ObjectId.createFromTime(end)
const query = {
_id: {
// include edge
$gte: startId,
// exclude edge
$lt: endId,
},
'overleaf.history.allowDowngrade': true,
}
const projects = await db.projects
.find(query, { readPreference: READ_PREFERENCE_SECONDARY })
.project({ _id: 1 })
.limit(options.batchSize)
.toArray()
if (projects.length) {
const projectIds = projects.map(project => project._id)
if (options.verboseLogging) {
console.log(
`Processing projects with ids: ${JSON.stringify(projectIds)}`
)
} else {
console.log(`Processing ${projects.length} projects`)
}
if (!options.dryRun) {
await db.projects.updateMany(
{ _id: { $in: projectIds } },
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
} else {
console.log(
`skipping update of ${projectIds.length} projects in dry-run mode`
)
}
totalProcessed += projectIds.length
batchProcessed += projectIds.length
if (projects.length === options.batchSize) {
endId = projects[projects.length - 1]._id
}
}
console.error(
`Processed ${batchProcessed} from ${startId} until ${endId} (${totalProcessed} processed in total)`
)
startId = endId
}
}
async function letUserDoubleCheckInputs(options) {
console.error('Options:', JSON.stringify(options, null, 2))
console.error(
'Waiting for you to double check inputs for',
options.letUserDoubleCheckInputsFor,
'ms'
)
await sleep(options.letUserDoubleCheckInputsFor)
}
module.exports = main
if (require.main === module) {
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})
}

View file

@ -1,208 +0,0 @@
const SCRIPT_VERSION = 4
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100
const DRY_RUN = process.env.DRY_RUN !== 'false'
const USE_QUERY_HINT = process.env.USE_QUERY_HINT !== 'false'
const RETRY_FAILED = process.env.RETRY_FAILED === 'true'
const MAX_UPGRADES_TO_ATTEMPT =
parseInt(process.env.MAX_UPGRADES_TO_ATTEMPT, 10) || false
const MAX_FAILURES = parseInt(process.env.MAX_FAILURES, 10) || 50
const ARCHIVE_ON_FAILURE = process.env.ARCHIVE_ON_FAILURE === 'true'
const FIX_INVALID_CHARACTERS = process.env.FIX_INVALID_CHARACTERS === 'true'
const FORCE_NEW_HISTORY_ON_FAILURE =
process.env.FORCE_NEW_HISTORY_ON_FAILURE === 'true'
const IMPORT_ZIP_FILE_PATH = process.env.IMPORT_ZIP_FILE_PATH
const CUTOFF_DATE = process.env.CUTOFF_DATE
? new Date(process.env.CUTOFF_DATE)
: undefined
// persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE
// raise mongo timeout to 1hr if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const PROJECT_ID = process.env.PROJECT_ID
// User id is required to move large documents to filestore
const USER_ID = process.env.USER_ID
const CONVERT_LARGE_DOCS_TO_FILE =
process.env.CONVERT_LARGE_DOCS_TO_FILE === 'true'
const { ObjectId } = require('mongodb')
const { db, waitForDb } = require('../../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('../../app/src/util/promises')
const { batchedUpdate } = require('../helpers/batchedUpdate')
const {
anyDocHistoryExists,
anyDocHistoryIndexExists,
doUpgradeForNoneWithConversion,
} = require('../../modules/history-migration/app/src/HistoryUpgradeHelper')
console.log({
DRY_RUN,
VERBOSE_LOGGING,
WRITE_CONCURRENCY,
BATCH_SIZE,
MAX_UPGRADES_TO_ATTEMPT,
MAX_FAILURES,
USE_QUERY_HINT,
RETRY_FAILED,
ARCHIVE_ON_FAILURE,
PROJECT_ID,
FIX_INVALID_CHARACTERS,
FORCE_NEW_HISTORY_ON_FAILURE,
CONVERT_LARGE_DOCS_TO_FILE,
USER_ID,
IMPORT_ZIP_FILE_PATH,
CUTOFF_DATE,
})
const RESULT = {
DRY_RUN,
attempted: 0,
projectsUpgraded: 0,
failed: 0,
continueFrom: null,
}
let INTERRUPT = false
async function processBatch(projects) {
if (projects.length && projects[0]._id) {
RESULT.continueFrom = projects[0]._id
}
await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject)
console.log(RESULT)
if (INTERRUPT) {
// ctrl+c
console.log('Terminated by SIGINT')
process.exit(0)
}
if (RESULT.failed >= MAX_FAILURES) {
console.log(`MAX_FAILURES limit (${MAX_FAILURES}) reached. Stopping.`)
process.exit(0)
}
if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) {
console.log(
`MAX_UPGRADES_TO_ATTEMPT limit (${MAX_UPGRADES_TO_ATTEMPT}) reached. Stopping.`
)
process.exit(0)
}
}
async function processProject(project) {
if (INTERRUPT) {
return
}
if (project.overleaf && project.overleaf.history) {
// projects we're upgrading like this should never have a history id
if (project.overleaf.history.id) {
return
}
if (
project.overleaf.history.conversionFailed ||
project.overleaf.history.upgradeFailed
) {
if (project.overleaf.history.zipFileArchivedInProject) {
return // always give up if we have uploaded the zipfile to the project
}
if (!RETRY_FAILED) {
// we don't want to attempt upgrade on projects
// that have been previously attempted and failed
return
}
}
}
if (RESULT.failed >= MAX_FAILURES) {
return
}
if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) {
return
}
const anyDocHistoryOrIndex =
(await anyDocHistoryExists(project)) ||
(await anyDocHistoryIndexExists(project))
if (anyDocHistoryOrIndex) {
RESULT.attempted += 1
if (DRY_RUN) {
return
}
const result = await doUpgradeForNoneWithConversion(project, {
migrationOptions: {
archiveOnFailure: ARCHIVE_ON_FAILURE,
fixInvalidCharacters: FIX_INVALID_CHARACTERS,
forceNewHistoryOnFailure: FORCE_NEW_HISTORY_ON_FAILURE,
importZipFilePath: IMPORT_ZIP_FILE_PATH,
cutoffDate: CUTOFF_DATE,
},
convertLargeDocsToFile: CONVERT_LARGE_DOCS_TO_FILE,
userId: USER_ID,
reason: `${SCRIPT_VERSION}`,
})
if (result.convertedDocCount) {
console.log(
`project ${project._id} converted ${result.convertedDocCount} docs to filestore`
)
}
if (result.error) {
console.error(`project ${project._id} FAILED with error: `, result.error)
RESULT.failed += 1
} else if (result.upgraded) {
if (VERBOSE_LOGGING) {
console.log(
`project ${project._id} converted and upgraded to full project history`
)
}
RESULT.projectsUpgraded += 1
}
}
}
async function main() {
if (PROJECT_ID) {
await waitForDb()
const project = await db.projects.findOne({ _id: ObjectId(PROJECT_ID) })
await processProject(project)
} else {
const projection = {
_id: 1,
overleaf: 1,
}
const options = {}
if (USE_QUERY_HINT) {
options.hint = { _id: 1 }
}
await batchedUpdate(
'projects',
// we originally used
// 'overleaf.history.id': { $exists: false }
// but display false is indexed and contains all the above,
// it can be faster to skip projects with a history ID than to use a query
{ 'overleaf.history.display': { $ne: true } },
processBatch,
projection,
options
)
}
console.log('Final')
console.log(RESULT)
}
// Upgrading history is not atomic, if we quit out mid-initialisation
// then history could get into a broken state
// Instead, skip any unprocessed projects and exit() at end of the batch.
process.on('SIGINT', function () {
console.log('Caught SIGINT, waiting for in process upgrades to complete')
INTERRUPT = true
})
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -1,232 +0,0 @@
const SCRIPT_VERSION = 3
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100
const DRY_RUN = process.env.DRY_RUN !== 'false'
const USE_QUERY_HINT = process.env.USE_QUERY_HINT !== 'false'
const RETRY_FAILED = process.env.RETRY_FAILED === 'true'
const MAX_UPGRADES_TO_ATTEMPT =
parseInt(process.env.MAX_UPGRADES_TO_ATTEMPT, 10) || false
const MAX_FAILURES = parseInt(process.env.MAX_FAILURES, 10) || 50
// persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE
// raise mongo timeout to 1hr if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const {
db,
READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('../../app/src/util/promises')
const { batchedUpdate } = require('../helpers/batchedUpdate')
const ProjectHistoryHandler = require('../../app/src/Features/Project/ProjectHistoryHandler')
const HistoryManager = require('../../app/src/Features/History/HistoryManager')
console.log({
DRY_RUN,
VERBOSE_LOGGING,
WRITE_CONCURRENCY,
BATCH_SIZE,
MAX_UPGRADES_TO_ATTEMPT,
MAX_FAILURES,
USE_QUERY_HINT,
RETRY_FAILED,
})
const RESULT = {
DRY_RUN,
attempted: 0,
projectsUpgraded: 0,
failed: 0,
continueFrom: null,
}
let INTERRUPT = false
async function processBatch(projects) {
if (projects.length && projects[0]._id) {
RESULT.continueFrom = projects[0]._id
}
await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject)
console.log(RESULT)
if (INTERRUPT) {
// ctrl+c
console.log('Terminated by SIGINT')
process.exit(0)
}
if (RESULT.failed >= MAX_FAILURES) {
console.log(`MAX_FAILURES limit (${MAX_FAILURES}) reached. Stopping.`)
process.exit(0)
}
if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) {
console.log(
`MAX_UPGRADES_TO_ATTEMPT limit (${MAX_UPGRADES_TO_ATTEMPT}) reached. Stopping.`
)
process.exit(0)
}
}
async function processProject(project) {
if (INTERRUPT) {
return
}
// If upgradeFailed, skip unless we're explicitly retrying failed upgrades
if (
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.upgradeFailed
) {
if (RETRY_FAILED) {
return await doUpgradeForNoneWithoutConversion(project)
} else {
return
}
}
// Skip any projects with a history ID, these are v1
if (
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
) {
return
}
const anyDocHistory = await anyDocHistoryExists(project)
if (anyDocHistory) {
return
}
const anyDocHistoryIndex = await anyDocHistoryIndexExists(project)
if (anyDocHistoryIndex) {
return
}
await doUpgradeForNoneWithoutConversion(project)
}
async function doUpgradeForNoneWithoutConversion(project) {
if (RESULT.failed >= MAX_FAILURES) {
return
}
if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) {
return
}
RESULT.attempted += 1
const projectId = project._id
if (!DRY_RUN) {
// ensureHistoryExistsForProject resyncs project
// Change to 'peek'ing the doc when resyncing should
// be rolled out prior to using this script
try {
// Logic originally from ProjectHistoryHandler.ensureHistoryExistsForProject
// However sends a force resync project to project history instead
// of a resync request to doc-updater
let historyId = await ProjectHistoryHandler.promises.getHistoryId(
projectId
)
if (historyId == null) {
historyId = await HistoryManager.promises.initializeProject(projectId)
if (historyId != null) {
await ProjectHistoryHandler.promises.setHistoryId(
projectId,
historyId
)
}
}
await HistoryManager.promises.resyncProject(projectId, {
force: true,
origin: { kind: 'history-migration' },
})
await HistoryManager.promises.flushProject(projectId)
} catch (err) {
RESULT.failed += 1
console.error(`project ${project._id} FAILED with error: `, err)
await db.projects.updateOne(
{ _id: project._id },
{
$set: {
'overleaf.history.upgradeFailed': true,
},
}
)
return
}
await db.projects.updateOne(
{ _id: project._id },
{
$set: {
'overleaf.history.display': true,
'overleaf.history.upgradedAt': new Date(),
'overleaf.history.upgradeReason': `none-without-sl-history/${SCRIPT_VERSION}`,
},
$unset: {
'overleaf.history.upgradeFailed': true,
},
}
)
}
if (VERBOSE_LOGGING) {
console.log(`project ${project._id} converted to full project history`)
}
RESULT.projectsUpgraded += 1
}
async function anyDocHistoryExists(project) {
return await db.docHistory.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function anyDocHistoryIndexExists(project) {
return await db.docHistoryIndex.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function main() {
const projection = {
_id: 1,
overleaf: 1,
}
const options = {}
if (USE_QUERY_HINT) {
options.hint = { _id: 1 }
}
await batchedUpdate(
'projects',
// we originally used
// 'overleaf.history.id': { $exists: false }
// but display false is indexed and contains all the above,
// plus we want to be able to retry failed upgrades with a history id
{ 'overleaf.history.display': { $ne: true } },
processBatch,
projection,
options
)
console.log('Final')
console.log(RESULT)
}
// Upgrading history is not atomic, if we quit out mid-initialisation
// then history could get into a broken state
// Instead, skip any unprocessed projects and exit() at end of the batch.
process.on('SIGINT', function () {
console.log('Caught SIGINT, waiting for in process upgrades to complete')
INTERRUPT = true
})
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -1,44 +0,0 @@
const { ObjectId } = require('mongodb')
const {
db,
waitForDb,
READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb')
const {
upgradeProject,
} = require('../../modules/history-migration/app/src/HistoryUpgradeHelper')
async function processProject(project) {
const result = await upgradeProject(project)
console.log(result)
}
async function main() {
await waitForDb()
const args = process.argv.slice(2)
const projectId = args[0]
const query = { _id: ObjectId(projectId) }
const projection = {
_id: 1,
overleaf: 1,
}
const options = {
projection,
readPreference: READ_PREFERENCE_SECONDARY,
}
const project = await db.projects.findOne(query, options)
if (project) {
await processProject(project)
} else {
console.error(`project ${projectId} not found`)
}
}
main()
.then(() => {
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -1,243 +0,0 @@
const SCRIPT_VERSION = 1
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100
const DRY_RUN = process.env.DRY_RUN !== 'false'
const USE_QUERY_HINT = process.env.USE_QUERY_HINT !== 'false'
const RETRY_FAILED = process.env.RETRY_FAILED === 'true'
const MAX_UPGRADES_TO_ATTEMPT =
parseInt(process.env.MAX_UPGRADES_TO_ATTEMPT, 10) || false
const MAX_FAILURES = parseInt(process.env.MAX_FAILURES, 10) || 50
// persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE
// raise mongo timeout to 1hr if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const PROJECT_ID = process.env.PROJECT_ID
const { ObjectId } = require('mongodb')
const {
db,
waitForDb,
READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('../../app/src/util/promises')
const { batchedUpdate } = require('../helpers/batchedUpdate')
const ProjectHistoryController = require('../../modules/history-migration/app/src/ProjectHistoryController')
console.log({
DRY_RUN,
VERBOSE_LOGGING,
WRITE_CONCURRENCY,
BATCH_SIZE,
MAX_UPGRADES_TO_ATTEMPT,
MAX_FAILURES,
USE_QUERY_HINT,
RETRY_FAILED,
PROJECT_ID,
})
const RESULT = {
DRY_RUN,
attempted: 0,
projectsUpgraded: 0,
failed: 0,
continueFrom: null,
}
let INTERRUPT = false
async function processBatch(projects) {
if (projects.length && projects[0]._id) {
RESULT.continueFrom = projects[0]._id
}
await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject)
console.log(RESULT)
if (INTERRUPT) {
// ctrl+c
console.log('Terminated by SIGINT')
process.exit(0)
}
if (RESULT.failed >= MAX_FAILURES) {
console.log(`MAX_FAILURES limit (${MAX_FAILURES}) reached. Stopping.`)
process.exit(0)
}
if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) {
console.log(
`MAX_UPGRADES_TO_ATTEMPT limit (${MAX_UPGRADES_TO_ATTEMPT}) reached. Stopping.`
)
process.exit(0)
}
}
async function processProject(project) {
if (INTERRUPT) {
return
}
// skip safety check if we want to retry failed upgrades
if (!RETRY_FAILED) {
if (project.overleaf && project.overleaf.history) {
if (
project.overleaf.history.conversionFailed ||
project.overleaf.history.upgradeFailed
) {
// we don't want to attempt upgrade on projects
// that have been previously attempted and failed
return
}
}
}
const preserveHistory = await shouldPreserveHistory(project)
if (preserveHistory) {
const anyDocHistory = await anyDocHistoryExists(project)
if (anyDocHistory) {
return await doUpgradeForV1WithConversion(project)
}
const anyDocHistoryIndex = await anyDocHistoryIndexExists(project)
if (anyDocHistoryIndex) {
return await doUpgradeForV1WithConversion(project)
}
}
}
async function doUpgradeForV1WithConversion(project) {
if (RESULT.failed >= MAX_FAILURES) {
return
}
if (MAX_UPGRADES_TO_ATTEMPT && RESULT.attempted >= MAX_UPGRADES_TO_ATTEMPT) {
return
}
RESULT.attempted += 1
const projectId = project._id
// migrateProjectHistory expects project id as a string
const projectIdString = project._id.toString()
if (!DRY_RUN) {
try {
// We treat these essentially as None projects, the V1 history is irrelevant,
// so we will delete it, and do a conversion as if we're a None project
await ProjectHistoryController.deleteProjectHistory(projectIdString)
if (VERBOSE_LOGGING) {
console.log(
`project ${projectId} existing full project history deleted`
)
}
await ProjectHistoryController.migrateProjectHistory(projectIdString)
} catch (err) {
// if migrateProjectHistory fails, it cleans up by deleting
// the history and unsetting the history id
// therefore a failed project will still look like a 'None with conversion' project
RESULT.failed += 1
console.error(`project ${projectId} FAILED with error: `, err)
// We set a failed flag so future runs of the script don't automatically retry
await db.projects.updateOne(
{ _id: projectId },
{
$set: {
'overleaf.history.conversionFailed': true,
},
}
)
return
}
await db.projects.updateOne(
{ _id: projectId },
{
$set: {
'overleaf.history.upgradeReason': `v1-with-conversion/${SCRIPT_VERSION}`,
},
$unset: {
'overleaf.history.upgradeFailed': true,
'overleaf.history.conversionFailed': true,
},
}
)
}
if (VERBOSE_LOGGING) {
console.log(
`project ${projectId} converted and upgraded to full project history`
)
}
RESULT.projectsUpgraded += 1
}
async function shouldPreserveHistory(project) {
return await db.projectHistoryMetaData.findOne(
{
$and: [
{ project_id: { $eq: project._id } },
{ preserveHistory: { $eq: true } },
],
},
{ readPreference: READ_PREFERENCE_SECONDARY }
)
}
async function anyDocHistoryExists(project) {
return await db.docHistory.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function anyDocHistoryIndexExists(project) {
return await db.docHistoryIndex.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function main() {
if (PROJECT_ID) {
await waitForDb()
const project = await db.projects.findOne({ _id: ObjectId(PROJECT_ID) })
await processProject(project)
} else {
const projection = {
_id: 1,
overleaf: 1,
}
const options = {}
if (USE_QUERY_HINT) {
options.hint = { _id: 1 }
}
await batchedUpdate(
'projects',
{
$and: [
{ 'overleaf.history.display': { $ne: true } },
{ 'overleaf.history.id': { $exists: true } },
],
},
processBatch,
projection,
options
)
console.log('Final')
console.log(RESULT)
}
}
// Upgrading history is not atomic, if we quit out mid-initialisation
// then history could get into a broken state
// Instead, skip any unprocessed projects and exit() at end of the batch.
process.on('SIGINT', function () {
console.log('Caught SIGINT, waiting for in process upgrades to complete')
INTERRUPT = true
})
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -1,165 +0,0 @@
const SCRIPT_VERSION = 2
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100
const DRY_RUN = process.env.DRY_RUN !== 'false'
const USE_QUERY_HINT = process.env.USE_QUERY_HINT !== 'false'
// persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE
// raise mongo timeout to 1hr if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const { ObjectId } = require('mongodb')
const {
db,
READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('../../app/src/util/promises')
const { batchedUpdate } = require('../helpers/batchedUpdate')
console.log({
DRY_RUN,
VERBOSE_LOGGING,
WRITE_CONCURRENCY,
BATCH_SIZE,
USE_QUERY_HINT,
})
const RESULT = {
DRY_RUN,
projectsUpgraded: 0,
}
const ID_WHEN_FULL_PROJECT_HISTORY_ENABLED = '5a8d8a370000000000000000'
const OBJECT_ID_WHEN_FULL_PROJECT_HISTORY_ENABLED = new ObjectId(
ID_WHEN_FULL_PROJECT_HISTORY_ENABLED
)
const DATETIME_WHEN_FULL_PROJECT_HISTORY_ENABLED =
OBJECT_ID_WHEN_FULL_PROJECT_HISTORY_ENABLED.getTimestamp()
// set a default BATCH_LAST_ID at our cutoff point if none set
// we still check against this cut off point later, even if
// BATCH_LAST_ID is set to something problematic
if (!process.env.BATCH_LAST_ID) {
process.env.BATCH_LAST_ID = ID_WHEN_FULL_PROJECT_HISTORY_ENABLED
}
async function processBatch(projects) {
await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject)
console.log(RESULT)
}
async function processProject(project) {
// safety check
if (
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.upgradeFailed
) {
// a failed history upgrade might look like a v1 project, but history may be broken
return
}
if (!projectCreatedAfterFullProjectHistoryEnabled(project)) {
return
}
// if they have SL history, continue to send to both history systems (for now)
const anyDocHistory = await anyDocHistoryExists(project)
if (anyDocHistory) {
return await doUpgradeForV1WithoutConversion(project, true)
}
const anyDocHistoryIndex = await anyDocHistoryIndexExists(project)
if (anyDocHistoryIndex) {
return await doUpgradeForV1WithoutConversion(project, true)
}
// or if no sl history, nothing to 'downgrade' to
return await doUpgradeForV1WithoutConversion(project, false)
}
function projectCreatedAfterFullProjectHistoryEnabled(project) {
return (
project._id.getTimestamp() >= DATETIME_WHEN_FULL_PROJECT_HISTORY_ENABLED
)
}
async function doUpgradeForV1WithoutConversion(project, allowDowngrade) {
const setProperties = {
'overleaf.history.display': true,
'overleaf.history.upgradedAt': new Date(),
'overleaf.history.upgradeReason': `v1-after-fph/${SCRIPT_VERSION}`,
}
if (allowDowngrade) {
setProperties['overleaf.history.allowDowngrade'] = true
}
if (!DRY_RUN) {
await db.projects.updateOne(
{ _id: project._id },
{
$set: setProperties,
}
)
}
if (VERBOSE_LOGGING) {
console.log(
`project ${project._id} converted to full project history${
allowDowngrade ? ', with allowDowngrade' : ''
}`
)
}
RESULT.projectsUpgraded += 1
}
async function anyDocHistoryExists(project) {
return await db.docHistory.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function anyDocHistoryIndexExists(project) {
return await db.docHistoryIndex.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function main() {
const projection = {
_id: 1,
overleaf: 1,
}
const options = {}
if (USE_QUERY_HINT) {
options.hint = { _id: 1 }
}
await batchedUpdate(
'projects',
{
$and: [
{ 'overleaf.history.display': { $ne: true } },
{ 'overleaf.history.id': { $exists: true } },
],
},
processBatch,
projection,
options
)
console.log('Final')
console.log(RESULT)
}
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -1,161 +0,0 @@
const SCRIPT_VERSION = 3
const VERBOSE_LOGGING = process.env.VERBOSE_LOGGING === 'true'
const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY, 10) || 10
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 100
const DRY_RUN = process.env.DRY_RUN !== 'false'
const USE_QUERY_HINT = process.env.USE_QUERY_HINT !== 'false'
const UPGRADE_FAILED_WITH_EMPTY_HISTORY =
process.env.UPGRADE_FAILED_WITH_EMPTY_HISTORY === 'true'
// persist fallback in order to keep batchedUpdate in-sync
process.env.BATCH_SIZE = BATCH_SIZE
// raise mongo timeout to 1hr if otherwise unspecified
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
const {
db,
READ_PREFERENCE_SECONDARY,
} = require('../../app/src/infrastructure/mongodb')
const { promiseMapWithLimit } = require('../../app/src/util/promises')
const { batchedUpdate } = require('../helpers/batchedUpdate')
console.log({
DRY_RUN,
VERBOSE_LOGGING,
WRITE_CONCURRENCY,
BATCH_SIZE,
USE_QUERY_HINT,
UPGRADE_FAILED_WITH_EMPTY_HISTORY,
})
const RESULT = {
DRY_RUN,
projectsUpgraded: 0,
}
async function processBatch(projects) {
await promiseMapWithLimit(WRITE_CONCURRENCY, projects, processProject)
console.log(RESULT)
}
async function processProject(project) {
// safety check if history exists and there was a failed upgrade
const anyDocHistory = await anyDocHistoryExists(project)
const anyDocHistoryIndex = await anyDocHistoryIndexExists(project)
if (
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.upgradeFailed
) {
const emptyHistory = !anyDocHistory && !anyDocHistoryIndex
if (emptyHistory && UPGRADE_FAILED_WITH_EMPTY_HISTORY) {
console.log(
`upgrading previously failed project ${project._id} with empty history`
)
} else {
// a failed history upgrade might look like a v1 project, but history may be broken
return
}
}
const preserveHistory = await shouldPreserveHistory(project)
if (preserveHistory) {
// if we need to preserve history, then we must bail out if history exists
if (anyDocHistory) {
return
}
if (anyDocHistoryIndex) {
return
}
return await doUpgradeForV1WithoutConversion(project)
} else {
// if preserveHistory false, then max 7 days of SL history
// but v1 already record to both histories, so safe to upgrade
return await doUpgradeForV1WithoutConversion(project)
}
}
async function doUpgradeForV1WithoutConversion(project) {
if (!DRY_RUN) {
await db.projects.updateOne(
{ _id: project._id },
{
$set: {
'overleaf.history.display': true,
'overleaf.history.upgradedAt': new Date(),
'overleaf.history.upgradeReason': `v1-without-sl-history/${SCRIPT_VERSION}`,
},
}
)
}
if (VERBOSE_LOGGING) {
console.log(`project ${project._id} converted to full project history`)
}
RESULT.projectsUpgraded += 1
}
async function shouldPreserveHistory(project) {
return await db.projectHistoryMetaData.findOne(
{
$and: [
{ project_id: { $eq: project._id } },
{ preserveHistory: { $eq: true } },
],
},
{ readPreference: READ_PREFERENCE_SECONDARY }
)
}
async function anyDocHistoryExists(project) {
return await db.docHistory.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function anyDocHistoryIndexExists(project) {
return await db.docHistoryIndex.findOne(
{ project_id: { $eq: project._id } },
{
projection: { _id: 1 },
readPreference: READ_PREFERENCE_SECONDARY,
}
)
}
async function main() {
const projection = {
_id: 1,
overleaf: 1,
}
const options = {}
if (USE_QUERY_HINT) {
options.hint = { _id: 1 }
}
await batchedUpdate(
'projects',
{
$and: [
{ 'overleaf.history.display': { $ne: true } },
{ 'overleaf.history.id': { $exists: true } },
],
},
processBatch,
projection,
options
)
console.log('Final')
console.log(RESULT)
}
main()
.then(() => {
console.error('Done.')
process.exit(0)
})
.catch(error => {
console.error({ error })
process.exit(1)
})

View file

@ -165,7 +165,6 @@ async function deleteDocFromRedis(projectId, docId) {
`UnflushedTime:{${docId}}`,
`Pathname:{${docId}}`,
`ProjectHistoryId:{${docId}}`,
`ProjectHistoryType:{${docId}}`,
`PendingUpdates:{${docId}}`,
`lastUpdatedAt:{${docId}}`,
`lastUpdatedBy:{${docId}}`

View file

@ -51,9 +51,6 @@ module.exports = {
url: 'http://localhost:23005',
host: 'localhost',
},
trackchanges: {
url: 'http://localhost:23015',
},
docstore: {
url: 'http://localhost:23016',
pubUrl: 'http://localhost:23016',
@ -78,8 +75,6 @@ module.exports = {
},
project_history: {
sendProjectStructureOps: true,
initializeHistoryForNewProjects: true,
displayHistoryForNewProjects: true,
url: `http://localhost:23054`,
},
v1_history: {

View file

@ -39,77 +39,6 @@ describe('DocumentController', function () {
}
})
describe('when the project exists without project history enabled', function () {
beforeEach(function () {
this.project = { _id: this.project_id }
this.ProjectGetter.getProject = sinon
.stub()
.callsArgWith(2, null, this.project)
})
describe('when the document exists', function () {
beforeEach(function () {
this.doc = { _id: this.doc_id }
this.ProjectLocator.findElement = sinon
.stub()
.callsArgWith(1, null, this.doc, { fileSystem: this.pathname })
this.ProjectEntityHandler.getDoc = sinon
.stub()
.yields(null, this.doc_lines, this.rev, this.version, this.ranges)
this.DocumentController.getDocument(this.req, this.res, this.next)
})
it('should get the project', function () {
this.ProjectGetter.getProject
.calledWith(this.project_id, { rootFolder: true, overleaf: true })
.should.equal(true)
})
it('should get the pathname of the document', function () {
this.ProjectLocator.findElement
.calledWith({
project: this.project,
element_id: this.doc_id,
type: 'doc',
})
.should.equal(true)
})
it('should get the document content', function () {
this.ProjectEntityHandler.getDoc
.calledWith(this.project_id, this.doc_id)
.should.equal(true)
})
it('should return the document data to the client as JSON', function () {
this.res.type.should.equal('application/json')
this.res.body.should.equal(
JSON.stringify({
lines: this.doc_lines,
version: this.version,
ranges: this.ranges,
pathname: this.pathname,
})
)
})
})
describe("when the document doesn't exist", function () {
beforeEach(function () {
this.ProjectLocator.findElement = sinon
.stub()
.callsArgWith(1, new Errors.NotFoundError('not found'))
this.DocumentController.getDocument(this.req, this.res, this.next)
})
it('should call next with the NotFoundError', function () {
this.next
.calledWith(sinon.match.instanceOf(Errors.NotFoundError))
.should.equal(true)
})
})
})
describe('when project exists with project history enabled', function () {
beforeEach(function () {
this.doc = { _id: this.doc_id }
@ -152,53 +81,6 @@ describe('DocumentController', function () {
})
})
describe('when project exists that was migrated with downgrades allowed', function () {
beforeEach(function () {
this.doc = { _id: this.doc_id }
this.projectHistoryId = 1234
this.projectHistoryDisplay = true
this.projectHistoryType = undefined
this.project = {
_id: this.project_id,
overleaf: {
history: {
id: this.projectHistoryId,
display: this.projectHistoryDisplay,
allowDowngrade: true,
},
},
}
this.ProjectGetter.getProject = sinon
.stub()
.callsArgWith(2, null, this.project)
this.ProjectLocator.findElement = sinon
.stub()
.callsArgWith(1, null, this.doc, { fileSystem: this.pathname })
this.ProjectEntityHandler.getDoc = sinon
.stub()
.yields(null, this.doc_lines, this.rev, this.version, this.ranges)
return this.DocumentController.getDocument(
this.req,
this.res,
this.next
)
})
it('should return the history id in the JSON but not history type, sending history to both services', function () {
this.res.type.should.equal('application/json')
return this.res.body.should.equal(
JSON.stringify({
lines: this.doc_lines,
version: this.version,
ranges: this.ranges,
pathname: this.pathname,
projectHistoryId: this.projectHistoryId,
projectHistoryType: this.projectHistoryType,
})
)
})
})
describe('when the project does not exist', function () {
beforeEach(function () {
this.ProjectGetter.getProject = sinon.stub().callsArgWith(2, null, null)

View file

@ -46,60 +46,12 @@ describe('HistoryController', function () {
},
})
return (this.settings.apis = {
trackchanges: {
enabled: false,
url: 'http://trackchanges.example.com',
},
project_history: {
url: 'http://project_history.example.com',
},
})
})
describe('selectHistoryApi', function () {
beforeEach(function () {
this.req = { url: '/mock/url', method: 'POST', params: {} }
this.res = 'mock-res'
return (this.next = sinon.stub())
})
describe('for a project with project history', function () {
beforeEach(function () {
this.ProjectDetailsHandler.getDetails = sinon
.stub()
.callsArgWith(1, null, {
overleaf: { history: { id: 42, display: true } },
})
return this.HistoryController.selectHistoryApi(
this.req,
this.res,
this.next
)
})
it('should set the flag for project history to true', function () {
return this.req.useProjectHistory.should.equal(true)
})
})
describe('for any other project ', function () {
beforeEach(function () {
this.ProjectDetailsHandler.getDetails = sinon
.stub()
.callsArgWith(1, null, {})
return this.HistoryController.selectHistoryApi(
this.req,
this.res,
this.next
)
})
it('should not set the flag for project history to false', function () {
return this.req.useProjectHistory.should.equal(false)
})
})
})
describe('proxyToHistoryApi', function () {
beforeEach(function () {
this.req = { url: '/mock/url', method: 'POST' }
@ -161,18 +113,6 @@ describe('HistoryController', function () {
.should.equal(true)
})
it('should call the track changes api', function () {
return this.request
.calledWith({
url: `${this.settings.apis.trackchanges.url}${this.req.url}`,
method: this.req.method,
headers: {
'X-User-Id': this.user_id,
},
})
.should.equal(true)
})
it('should pipe the response to the client', function () {
expect(this.Stream.pipeline).to.have.been.calledWith(
this.proxy,
@ -249,19 +189,6 @@ describe('HistoryController', function () {
.should.equal(true)
})
it('should call the track changes api', function () {
return this.request
.calledWith({
url: `${this.settings.apis.trackchanges.url}${this.req.url}`,
method: this.req.method,
json: true,
headers: {
'X-User-Id': this.user_id,
},
})
.should.equal(true)
})
it('should inject the user data', function () {
return this.HistoryManager.injectUserDetails
.calledWith(this.data)

View file

@ -23,10 +23,6 @@ describe('HistoryManager', function () {
this.v1HistoryPassword = 'verysecret'
this.settings = {
apis: {
trackchanges: {
enabled: false,
url: 'http://trackchanges.example.com',
},
project_history: {
url: this.projectHistoryUrl,
},
@ -55,56 +51,45 @@ describe('HistoryManager', function () {
})
describe('initializeProject', function () {
describe('with project history enabled', function () {
beforeEach(function () {
this.settings.apis.project_history.initializeHistoryForNewProjects = true
beforeEach(function () {
this.settings.apis.project_history.initializeHistoryForNewProjects = true
})
describe('project history returns a successful response', function () {
beforeEach(async function () {
this.response.json.resolves({ project: { id: this.historyId } })
this.result = await this.HistoryManager.promises.initializeProject(
this.historyId
)
})
describe('project history returns a successful response', function () {
beforeEach(async function () {
this.response.json.resolves({ project: { id: this.historyId } })
this.result = await this.HistoryManager.promises.initializeProject(
this.historyId
)
})
it('should call the project history api', function () {
this.fetch.should.have.been.calledWithMatch(
`${this.settings.apis.project_history.url}/project`,
{ method: 'POST' }
)
})
it('should return the overleaf id', function () {
expect(this.result).to.equal(this.historyId)
})
it('should call the project history api', function () {
this.fetch.should.have.been.calledWithMatch(
`${this.settings.apis.project_history.url}/project`,
{ method: 'POST' }
)
})
describe('project history returns a response without the project id', function () {
it('should throw an error', async function () {
this.response.json.resolves({ project: {} })
await expect(
this.HistoryManager.promises.initializeProject(this.historyId)
).to.be.rejected
})
})
describe('project history errors', function () {
it('should propagate the error', async function () {
this.fetch.rejects(new Error('problem connecting'))
await expect(
this.HistoryManager.promises.initializeProject(this.historyId)
).to.be.rejected
})
it('should return the overleaf id', function () {
expect(this.result).to.equal(this.historyId)
})
})
describe('with project history disabled', function () {
it('should return without errors', async function () {
this.settings.apis.project_history.initializeHistoryForNewProjects = false
describe('project history returns a response without the project id', function () {
it('should throw an error', async function () {
this.response.json.resolves({ project: {} })
await expect(
this.HistoryManager.promises.initializeProject(this.historyId)
).to.be.fulfilled
).to.be.rejected
})
})
describe('project history errors', function () {
it('should propagate the error', async function () {
this.fetch.rejects(new Error('problem connecting'))
await expect(
this.HistoryManager.promises.initializeProject(this.historyId)
).to.be.rejected
})
})
})