Merge pull request #18906 from overleaf/em-migrate-existing-histories-2

History ranges migration script - second attempt

GitOrigin-RevId: 60a2c04e2a72e76a58e9e179fefc4186a96fde32
This commit is contained in:
Eric Mc Sween 2024-06-18 10:00:00 -04:00 committed by Copybot
parent 9f0f42a012
commit e73fdfba63
28 changed files with 379 additions and 187 deletions

View file

@ -1,3 +1,4 @@
const { callbackify, promisify } = require('util')
const metrics = require('@overleaf/metrics')
const logger = require('@overleaf/logger')
const os = require('os')
@ -31,6 +32,14 @@ module.exports = class RedisWebLocker {
this.SLOW_EXECUTION_THRESHOLD = options.slowExecutionThreshold || 5000
// read-only copy for unit tests
this.unlockScript = UNLOCK_SCRIPT
const promisifiedRunWithLock = promisify(this.runWithLock).bind(this)
this.promises = {
runWithLock(namespace, id, runner) {
const cbRunner = callbackify(runner)
return promisifiedRunWithLock(namespace, id, cbRunner)
},
}
}
// Use a signed lock value as described in

View file

@ -384,7 +384,7 @@ const DocumentManager = {
return { lines, version }
},
async resyncDocContents(projectId, docId, path) {
async resyncDocContents(projectId, docId, path, opts = {}) {
logger.debug({ projectId, docId, path }, 'start resyncing doc contents')
let {
lines,
@ -422,6 +422,10 @@ const DocumentManager = {
)
}
if (opts.historyRangesMigration) {
historyRangesSupport = opts.historyRangesMigration === 'forwards'
}
await ProjectHistoryRedisManager.promises.queueResyncDocContent(
projectId,
projectHistoryId,
@ -434,6 +438,13 @@ const DocumentManager = {
path,
historyRangesSupport
)
if (opts.historyRangesMigration) {
await RedisManager.promises.setHistoryRangesSupportFlag(
docId,
historyRangesSupport
)
}
},
async getDocWithLock(projectId, docId) {
@ -547,14 +558,14 @@ const DocumentManager = {
)
},
async resyncDocContentsWithLock(projectId, docId, path, callback) {
async resyncDocContentsWithLock(projectId, docId, path, opts) {
const UpdateManager = require('./UpdateManager')
await UpdateManager.promises.lockUpdatesAndDo(
DocumentManager.resyncDocContents,
projectId,
docId,
path,
callback
opts
)
},
}

View file

@ -93,7 +93,14 @@ const HistoryManager = {
MAX_PARALLEL_REQUESTS: 4,
resyncProjectHistory(projectId, projectHistoryId, docs, files, callback) {
resyncProjectHistory(
projectId,
projectHistoryId,
docs,
files,
opts,
callback
) {
ProjectHistoryRedisManager.queueResyncProjectStructure(
projectId,
projectHistoryId,
@ -109,6 +116,7 @@ const HistoryManager = {
projectId,
doc.doc,
doc.path,
opts,
cb
)
}

View file

@ -11,27 +11,6 @@ const DeleteQueueManager = require('./DeleteQueueManager')
const { getTotalSizeOfLines } = require('./Limits')
const async = require('async')
module.exports = {
getDoc,
peekDoc,
getProjectDocsAndFlushIfOld,
clearProjectState,
setDoc,
flushDocIfLoaded,
deleteDoc,
flushProject,
deleteProject,
deleteMultipleProjects,
acceptChanges,
resolveComment,
reopenComment,
deleteComment,
updateProject,
resyncProjectHistory,
flushAllProjects,
flushQueuedProjects,
}
function getDoc(req, res, next) {
let fromVersion
const docId = req.params.doc_id
@ -401,17 +380,24 @@ function updateProject(req, res, next) {
function resyncProjectHistory(req, res, next) {
const projectId = req.params.project_id
const { projectHistoryId, docs, files } = req.body
const { projectHistoryId, docs, files, historyRangesMigration } = req.body
logger.debug(
{ projectId, docs, files },
'queuing project history resync via http'
)
const opts = {}
if (historyRangesMigration) {
opts.historyRangesMigration = historyRangesMigration
}
HistoryManager.resyncProjectHistory(
projectId,
projectHistoryId,
docs,
files,
opts,
error => {
if (error) {
return next(error)
@ -456,3 +442,24 @@ function flushQueuedProjects(req, res, next) {
}
})
}
module.exports = {
getDoc,
peekDoc,
getProjectDocsAndFlushIfOld,
clearProjectState,
setDoc,
flushDocIfLoaded,
deleteDoc,
flushProject,
deleteProject,
deleteMultipleProjects,
acceptChanges,
resolveComment,
reopenComment,
deleteComment,
updateProject,
resyncProjectHistory,
flushAllProjects,
flushQueuedProjects,
}

View file

@ -8,18 +8,6 @@ const Metrics = require('./Metrics')
const Errors = require('./Errors')
const { promisifyAll } = require('@overleaf/promise-utils')
module.exports = {
flushProjectWithLocks,
flushAndDeleteProjectWithLocks,
queueFlushAndDeleteProject,
getProjectDocsTimestamps,
getProjectDocsAndFlushIfOld,
clearProjectState,
updateProjectWithLocks,
}
module.exports.promises = promisifyAll(module.exports)
function flushProjectWithLocks(projectId, _callback) {
const timer = new Metrics.Timer('projectManager.flushProjectWithLocks')
const callback = function (...args) {
@ -339,3 +327,15 @@ function updateProjectWithLocks(
callback()
})
}
module.exports = {
flushProjectWithLocks,
flushAndDeleteProjectWithLocks,
queueFlushAndDeleteProject,
getProjectDocsTimestamps,
getProjectDocsAndFlushIfOld,
clearProjectState,
updateProjectWithLocks,
}
module.exports.promises = promisifyAll(module.exports)

View file

@ -77,66 +77,70 @@ const RedisManager = {
logger.error({ err: error, docId, projectId }, error.message)
return callback(error)
}
setHistoryRangesSupportFlag(docId, historyRangesSupport, err => {
if (err) {
return callback(err)
}
// update docsInProject set before writing doc contents
rclient.sadd(
keys.docsInProject({ project_id: projectId }),
docId,
error => {
if (error) return callback(error)
RedisManager.setHistoryRangesSupportFlag(
docId,
historyRangesSupport,
err => {
if (err) {
return callback(err)
}
// update docsInProject set before writing doc contents
rclient.sadd(
keys.docsInProject({ project_id: projectId }),
docId,
error => {
if (error) return callback(error)
if (!pathname) {
metrics.inc('pathname', 1, {
path: 'RedisManager.setDoc',
status: pathname === '' ? 'zero-length' : 'undefined',
if (!pathname) {
metrics.inc('pathname', 1, {
path: 'RedisManager.setDoc',
status: pathname === '' ? 'zero-length' : 'undefined',
})
}
// Make sure that this MULTI operation only operates on doc
// specific keys, i.e. keys that have the doc id in curly braces.
// The curly braces identify a hash key for Redis and ensures that
// the MULTI's operations are all done on the same node in a
// cluster environment.
const multi = rclient.multi()
multi.mset({
[keys.docLines({ doc_id: docId })]: docLines,
[keys.projectKey({ doc_id: docId })]: projectId,
[keys.docVersion({ doc_id: docId })]: version,
[keys.docHash({ doc_id: docId })]: docHash,
[keys.ranges({ doc_id: docId })]: ranges,
[keys.pathname({ doc_id: docId })]: pathname,
[keys.projectHistoryId({ doc_id: docId })]: projectHistoryId,
})
if (historyRangesSupport) {
multi.del(keys.resolvedCommentIds({ doc_id: docId }))
if (resolvedCommentIds.length > 0) {
multi.sadd(
keys.resolvedCommentIds({ doc_id: docId }),
...resolvedCommentIds
)
}
}
multi.exec(err => {
if (err) {
callback(
OError.tag(err, 'failed to write doc to Redis in MULTI', {
previousErrors: err.previousErrors.map(e => ({
name: e.name,
message: e.message,
command: e.command,
})),
})
)
} else {
callback()
}
})
}
// Make sure that this MULTI operation only operates on doc
// specific keys, i.e. keys that have the doc id in curly braces.
// The curly braces identify a hash key for Redis and ensures that
// the MULTI's operations are all done on the same node in a
// cluster environment.
const multi = rclient.multi()
multi.mset({
[keys.docLines({ doc_id: docId })]: docLines,
[keys.projectKey({ doc_id: docId })]: projectId,
[keys.docVersion({ doc_id: docId })]: version,
[keys.docHash({ doc_id: docId })]: docHash,
[keys.ranges({ doc_id: docId })]: ranges,
[keys.pathname({ doc_id: docId })]: pathname,
[keys.projectHistoryId({ doc_id: docId })]: projectHistoryId,
})
if (historyRangesSupport) {
multi.del(keys.resolvedCommentIds({ doc_id: docId }))
if (resolvedCommentIds.length > 0) {
multi.sadd(
keys.resolvedCommentIds({ doc_id: docId }),
...resolvedCommentIds
)
}
}
multi.exec(err => {
if (err) {
callback(
OError.tag(err, 'failed to write doc to Redis in MULTI', {
previousErrors: err.previousErrors.map(e => ({
name: e.name,
message: e.message,
command: e.command,
})),
})
)
} else {
callback()
}
})
}
)
})
)
}
)
})
},
@ -672,6 +676,14 @@ const RedisManager = {
)
},
setHistoryRangesSupportFlag(docId, historyRangesSupport, callback) {
if (historyRangesSupport) {
rclient.sadd(keys.historyRangesSupport(), docId, callback)
} else {
rclient.srem(keys.historyRangesSupport(), docId, callback)
}
},
_serializeRanges(ranges, callback) {
let jsonRanges = JSON.stringify(ranges)
if (jsonRanges && jsonRanges.length > MAX_RANGES_SIZE) {
@ -701,14 +713,6 @@ const RedisManager = {
},
}
function setHistoryRangesSupportFlag(docId, historyRangesSupport, callback) {
if (historyRangesSupport) {
rclient.sadd(keys.historyRangesSupport(), docId, callback)
} else {
rclient.srem(keys.historyRangesSupport(), docId, callback)
}
}
module.exports = RedisManager
module.exports.promises = promisifyAll(RedisManager, {
without: ['_deserializeRanges', '_computeHash'],

View file

@ -1066,7 +1066,7 @@ describe('HttpController', function () {
describe('successfully', function () {
beforeEach(function () {
this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4)
this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(5)
this.HttpController.resyncProjectHistory(this.req, this.res, this.next)
})
@ -1076,13 +1076,14 @@ describe('HttpController', function () {
this.project_id,
this.projectHistoryId,
this.docs,
this.files
this.files,
{}
)
.should.equal(true)
})
it('should return a successful No Content response', function () {
this.res.sendStatus.calledWith(204).should.equal(true)
this.res.sendStatus.should.have.been.calledWith(204)
})
})
@ -1090,7 +1091,7 @@ describe('HttpController', function () {
beforeEach(function () {
this.HistoryManager.resyncProjectHistory = sinon
.stub()
.callsArgWith(4, new Error('oops'))
.callsArgWith(5, new Error('oops'))
this.HttpController.resyncProjectHistory(this.req, this.res, this.next)
})

View file

@ -18,6 +18,10 @@ const SandboxedModule = require('sandboxed-module')
describe('ProjectManager - flushAndDeleteProject', function () {
beforeEach(function () {
let Timer
this.LockManager = {
getLock: sinon.stub().yields(),
releaseLock: sinon.stub().yields(),
}
this.ProjectManager = SandboxedModule.require(modulePath, {
requires: {
'./RedisManager': (this.RedisManager = {}),
@ -26,6 +30,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
'./HistoryManager': (this.HistoryManager = {
flushProjectChanges: sinon.stub().callsArg(2),
}),
'./LockManager': this.LockManager,
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
Timer = class Timer {

View file

@ -19,12 +19,17 @@ const SandboxedModule = require('sandboxed-module')
describe('ProjectManager - flushProject', function () {
beforeEach(function () {
let Timer
this.LockManager = {
getLock: sinon.stub().yields(),
releaseLock: sinon.stub().yields(),
}
this.ProjectManager = SandboxedModule.require(modulePath, {
requires: {
'./RedisManager': (this.RedisManager = {}),
'./ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryManager': (this.HistoryManager = {}),
'./LockManager': this.LockManager,
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
Timer = class Timer {

View file

@ -18,12 +18,17 @@ const Errors = require('../../../../app/js/Errors.js')
describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
beforeEach(function () {
let Timer
this.LockManager = {
getLock: sinon.stub().yields(),
releaseLock: sinon.stub().yields(),
}
this.ProjectManager = SandboxedModule.require(modulePath, {
requires: {
'./RedisManager': (this.RedisManager = {}),
'./ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryManager': (this.HistoryManager = {}),
'./LockManager': this.LockManager,
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
Timer = class Timer {

View file

@ -17,6 +17,10 @@ describe('ProjectManager', function () {
flushProjectChangesAsync: sinon.stub(),
shouldFlushHistoryOps: sinon.stub().returns(false),
}
this.LockManager = {
getLock: sinon.stub().yields(),
releaseLock: sinon.stub().yields(),
}
this.Metrics = {
Timer: class Timer {},
}
@ -28,6 +32,7 @@ describe('ProjectManager', function () {
'./ProjectHistoryRedisManager': this.ProjectHistoryRedisManager,
'./DocumentManager': this.DocumentManager,
'./HistoryManager': this.HistoryManager,
'./LockManager': this.LockManager,
'./Metrics': this.Metrics,
},
})

View file

@ -258,6 +258,9 @@ export function resyncProject(req, res, next) {
if (req.body.origin) {
options.origin = req.body.origin
}
if (req.body.historyRangesMigration) {
options.historyRangesMigration = req.body.historyRangesMigration
}
if (req.query.force || req.body.force) {
// this will delete the queue and clear the sync state
// use if the project is completely broken

View file

@ -79,6 +79,9 @@ export function initialize(app) {
origin: Joi.object({
kind: Joi.string().required(),
}),
historyRangesMigration: Joi.string()
.optional()
.valid('forwards', 'backwards'),
},
}),
HttpController.resyncProject

View file

@ -103,7 +103,11 @@ async function _startResyncWithoutLock(projectId, options) {
syncState.setOrigin(options.origin || { kind: 'history-resync' })
syncState.startProjectStructureSync()
await WebApiManager.promises.requestResync(projectId)
const webOpts = {}
if (options.historyRangesMigration) {
webOpts.historyRangesMigration = options.historyRangesMigration
}
await WebApiManager.promises.requestResync(projectId, webOpts)
await setResyncState(projectId, syncState)
}

View file

@ -33,8 +33,12 @@ async function getHistoryId(projectId) {
}
}
async function requestResync(projectId) {
async function requestResync(projectId, opts = {}) {
try {
const body = {}
if (opts.historyRangesMigration) {
body.historyRangesMigration = opts.historyRangesMigration
}
await fetchNothing(
`${Settings.apis.web.url}/project/${projectId}/history/resync`,
{
@ -44,6 +48,7 @@ async function requestResync(projectId) {
user: Settings.apis.web.user,
password: Settings.apis.web.pass,
},
json: body,
}
)
} catch (err) {

View file

@ -83,6 +83,10 @@ function getAllDeletedDocs(projectId, callback) {
})
}
/**
* @param {string} projectId
* @param {Callback} callback
*/
function getAllRanges(projectId, callback) {
const url = `${settings.apis.docstore.url}/project/${projectId}/ranges`
request.get(

View file

@ -9,46 +9,6 @@ const { promisify } = require('util')
const { promisifyMultiResult } = require('@overleaf/promise-utils')
const ProjectGetter = require('../Project/ProjectGetter')
module.exports = {
flushProjectToMongo,
flushMultipleProjectsToMongo,
flushProjectToMongoAndDelete,
flushDocToMongo,
deleteDoc,
getDocument,
setDocument,
getProjectDocsIfMatch,
clearProjectState,
acceptChanges,
resolveThread,
reopenThread,
deleteThread,
resyncProjectHistory,
updateProjectStructure,
promises: {
flushProjectToMongo: promisify(flushProjectToMongo),
flushMultipleProjectsToMongo: promisify(flushMultipleProjectsToMongo),
flushProjectToMongoAndDelete: promisify(flushProjectToMongoAndDelete),
flushDocToMongo: promisify(flushDocToMongo),
deleteDoc: promisify(deleteDoc),
getDocument: promisifyMultiResult(getDocument, [
'lines',
'version',
'ranges',
'ops',
]),
setDocument: promisify(setDocument),
getProjectDocsIfMatch: promisify(getProjectDocsIfMatch),
clearProjectState: promisify(clearProjectState),
acceptChanges: promisify(acceptChanges),
resolveThread: promisify(resolveThread),
reopenThread: promisify(reopenThread),
deleteThread: promisify(deleteThread),
resyncProjectHistory: promisify(resyncProjectHistory),
updateProjectStructure: promisify(updateProjectStructure),
},
}
function flushProjectToMongo(projectId, callback) {
_makeRequest(
{
@ -267,12 +227,17 @@ function resyncProjectHistory(
projectHistoryId,
docs,
files,
opts,
callback
) {
const body = { docs, files, projectHistoryId }
if (opts.historyRangesMigration) {
body.historyRangesMigration = opts.historyRangesMigration
}
_makeRequest(
{
path: `/project/${projectId}/history/resync`,
json: { docs, files, projectHistoryId },
json: body,
method: 'POST',
timeout: 6 * 60 * 1000, // allow 6 minutes for resync
},
@ -479,3 +444,43 @@ function _getUpdates(
return { deletes, adds, renames }
}
module.exports = {
flushProjectToMongo,
flushMultipleProjectsToMongo,
flushProjectToMongoAndDelete,
flushDocToMongo,
deleteDoc,
getDocument,
setDocument,
getProjectDocsIfMatch,
clearProjectState,
acceptChanges,
resolveThread,
reopenThread,
deleteThread,
resyncProjectHistory,
updateProjectStructure,
promises: {
flushProjectToMongo: promisify(flushProjectToMongo),
flushMultipleProjectsToMongo: promisify(flushMultipleProjectsToMongo),
flushProjectToMongoAndDelete: promisify(flushProjectToMongoAndDelete),
flushDocToMongo: promisify(flushDocToMongo),
deleteDoc: promisify(deleteDoc),
getDocument: promisifyMultiResult(getDocument, [
'lines',
'version',
'ranges',
'ops',
]),
setDocument: promisify(setDocument),
getProjectDocsIfMatch: promisify(getProjectDocsIfMatch),
clearProjectState: promisify(clearProjectState),
acceptChanges: promisify(acceptChanges),
resolveThread: promisify(resolveThread),
reopenThread: promisify(reopenThread),
deleteThread: promisify(deleteThread),
resyncProjectHistory: promisify(resyncProjectHistory),
updateProjectStructure: promisify(updateProjectStructure),
},
}

View file

@ -68,15 +68,24 @@ module.exports = HistoryController = {
// increase timeout to 6 minutes
res.setTimeout(6 * 60 * 1000)
const projectId = req.params.Project_id
ProjectEntityUpdateHandler.resyncProjectHistory(projectId, function (err) {
if (err instanceof Errors.ProjectHistoryDisabledError) {
return res.sendStatus(404)
const opts = {}
const historyRangesMigration = req.body.historyRangesMigration
if (historyRangesMigration) {
opts.historyRangesMigration = historyRangesMigration
}
ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
opts,
function (err) {
if (err instanceof Errors.ProjectHistoryDisabledError) {
return res.sendStatus(404)
}
if (err) {
return next(err)
}
res.sendStatus(204)
}
if (err) {
return next(err)
}
res.sendStatus(204)
})
)
},
restoreFileFromV2(req, res, next) {

View file

@ -51,6 +51,9 @@ async function resyncProject(projectId, options = {}) {
if (options.origin) {
body.origin = options.origin
}
if (options.historyRangesMigration) {
body.historyRangesMigration = options.historyRangesMigration
}
try {
await fetchNothing(
`${settings.apis.project_history.url}/project/${projectId}/resync`,

View file

@ -0,0 +1,22 @@
// @ts-check
const { callbackify } = require('util')
const HistoryManager = require('../History/HistoryManager')
/**
* Migrate a single project
*
* @param {string} projectId
* @param {"forwards" | "backwards"} direction
*/
async function migrateProject(projectId, direction = 'forwards') {
await HistoryManager.promises.flushProject(projectId)
await HistoryManager.promises.resyncProject(projectId, {
historyRangesMigration: direction,
})
}
module.exports = {
migrateProject: callbackify(migrateProject),
promises: { migrateProject },
}

View file

@ -15,6 +15,7 @@ const { Project } = require('../../models/Project')
const ProjectEntityHandler = require('./ProjectEntityHandler')
const ProjectGetter = require('./ProjectGetter')
const ProjectLocator = require('./ProjectLocator')
const ProjectOptionsHandler = require('./ProjectOptionsHandler')
const ProjectUpdateHandler = require('./ProjectUpdateHandler')
const ProjectEntityMongoUpdateHandler = require('./ProjectEntityMongoUpdateHandler')
const SafePath = require('./SafePath')
@ -154,6 +155,8 @@ function getDocContext(projectId, docId, callback) {
}
const ProjectEntityUpdateHandler = {
LOCK_NAMESPACE,
updateDocLines(
projectId,
docId,
@ -1394,7 +1397,7 @@ const ProjectEntityUpdateHandler = {
// This doesn't directly update project structure but we need to take the lock
// to prevent anything else being queued before the resync update
resyncProjectHistory: wrapWithLock(
(projectId, callback) =>
(projectId, opts, callback) =>
ProjectGetter.getProject(
projectId,
{ rootFolder: true, overleaf: true },
@ -1449,7 +1452,21 @@ const ProjectEntityUpdateHandler = {
projectHistoryId,
docs,
files,
callback
opts,
err => {
if (err) {
return callback(err)
}
if (opts.historyRangesMigration) {
ProjectOptionsHandler.setHistoryRangesSupport(
projectId,
opts.historyRangesMigration === 'forwards',
callback
)
} else {
callback()
}
}
)
}
)

View file

@ -46,7 +46,10 @@ const ProjectHistoryHandler = {
await ProjectHistoryHandler.setHistoryId(projectId, historyId)
await ProjectEntityUpdateHandler.promises.resyncProjectHistory(projectId)
await ProjectEntityUpdateHandler.promises.resyncProjectHistory(
projectId,
{}
)
await HistoryManager.promises.flushProject(projectId)
},

View file

@ -64,9 +64,11 @@ const ProjectOptionsHandler = {
return Project.updateOne(conditions, update, {})
},
async enableHistoryRangesSupport(projectId) {
async setHistoryRangesSupport(projectId, enabled) {
const conditions = { _id: new ObjectId(projectId) }
const update = { $set: { 'overleaf.history.rangesSupportEnabled': true } }
const update = {
$set: { 'overleaf.history.rangesSupportEnabled': enabled },
}
// NOTE: Updating the Mongoose model with the same query doesn't work. Maybe
// because rangesSupportEnabled is not part of the schema?
return db.projects.updateOne(conditions, update)
@ -83,8 +85,8 @@ module.exports = {
unsetBrandVariationId: callbackify(
ProjectOptionsHandler.unsetBrandVariationId
),
enableHistoryRangesSupport: callbackify(
ProjectOptionsHandler.enableHistoryRangesSupport
setHistoryRangesSupport: callbackify(
ProjectOptionsHandler.setHistoryRangesSupport
),
promises: ProjectOptionsHandler,
}

View file

@ -1,7 +1,6 @@
const settings = require('@overleaf/settings')
const RedisWrapper = require('./RedisWrapper')
const rclient = RedisWrapper.client('lock')
const { callbackify, promisify } = require('util')
const RedisWebLocker = require('@overleaf/redis-wrapper/RedisWebLocker')
@ -34,16 +33,4 @@ LockManager.withTimeout = function (timeout) {
return createLockManager(lockManagerSettingsWithTimeout)
}
// need to bind the promisified function when it is part of a class
// see https://nodejs.org/dist/latest-v16.x/docs/api/util.html#utilpromisifyoriginal
const promisifiedRunWithLock = promisify(LockManager.runWithLock).bind(
LockManager
)
LockManager.promises = {
runWithLock(namespace, id, runner) {
const cbRunner = callbackify(runner)
return promisifiedRunWithLock(namespace, id, cbRunner)
},
}
module.exports = LockManager

View file

@ -0,0 +1,41 @@
const HistoryRangesSupportMigration = require('../../app/src/Features/History/HistoryRangesSupportMigration')
const { waitForDb } = require('../../app/src/infrastructure/mongodb')
const minimist = require('minimist')
async function main() {
await waitForDb()
const { projectId, direction } = parseArgs()
await HistoryRangesSupportMigration.promises.migrateProject(
projectId,
direction
)
}
function usage() {
console.log('Usage: migrate_ranges_support.js PROJECT_ID [--backwards]')
}
function parseArgs() {
const args = minimist(process.argv.slice(2), {
boolean: ['backwards'],
})
if (args._.length !== 1) {
usage()
process.exit(1)
}
return {
direction: args.backwards ? 'backwards' : 'forwards',
projectId: args._[0],
}
}
main()
.then(() => {
process.exit(0)
})
.catch(err => {
console.error(err)
process.exit(1)
})

View file

@ -233,7 +233,7 @@ describe('HistoryController', function () {
describe('for a project without project-history enabled', function () {
beforeEach(function () {
this.project_id = 'mock-project-id'
this.req = { params: { Project_id: this.project_id } }
this.req = { params: { Project_id: this.project_id }, body: {} }
this.res = { setTimeout: sinon.stub(), sendStatus: sinon.stub() }
this.next = sinon.stub()
@ -257,7 +257,7 @@ describe('HistoryController', function () {
describe('for a project with project-history enabled', function () {
beforeEach(function () {
this.project_id = 'mock-project-id'
this.req = { params: { Project_id: this.project_id } }
this.req = { params: { Project_id: this.project_id }, body: {} }
this.res = { setTimeout: sinon.stub(), sendStatus: sinon.stub() }
this.next = sinon.stub()

View file

@ -149,6 +149,9 @@ describe('ProjectEntityUpdateHandler', function () {
this.EditorRealTimeController = {
emitToRoom: sinon.stub(),
}
this.ProjectOptionsHandler = {
setHistoryRangesSupport: sinon.stub().yields(),
}
this.ProjectEntityUpdateHandler = SandboxedModule.require(MODULE_PATH, {
requires: {
'@overleaf/settings': { validRootDocExtensions: ['tex'] },
@ -167,6 +170,7 @@ describe('ProjectEntityUpdateHandler', function () {
'./ProjectEntityHandler': this.ProjectEntityHandler,
'./ProjectEntityMongoUpdateHandler':
this.ProjectEntityMongoUpdateHandler,
'./ProjectOptionsHandler': this.ProjectOptionsHandler,
'../ThirdPartyDataStore/TpdsUpdateSender': this.TpdsUpdateSender,
'../Editor/EditorRealTimeController': this.EditorRealTimeController,
'../../infrastructure/FileWriter': this.FileWriter,
@ -1962,6 +1966,7 @@ describe('ProjectEntityUpdateHandler', function () {
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
this.callback
)
})
@ -1987,6 +1992,7 @@ describe('ProjectEntityUpdateHandler', function () {
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
this.callback
)
})
@ -2025,6 +2031,7 @@ describe('ProjectEntityUpdateHandler', function () {
})
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
this.callback
)
})
@ -2113,7 +2120,11 @@ describe('ProjectEntityUpdateHandler', function () {
files: this.files,
folders: [],
})
this.ProjectEntityUpdateHandler.resyncProjectHistory(projectId, done)
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
done
)
})
it('renames the duplicate files', function () {
@ -2214,7 +2225,11 @@ describe('ProjectEntityUpdateHandler', function () {
files: this.files,
folders: [],
})
this.ProjectEntityUpdateHandler.resyncProjectHistory(projectId, done)
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
done
)
})
it('renames the files', function () {
@ -2301,7 +2316,11 @@ describe('ProjectEntityUpdateHandler', function () {
files,
folders,
})
this.ProjectEntityUpdateHandler.resyncProjectHistory(projectId, done)
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
done
)
})
it('renames the folder', function () {
@ -2352,7 +2371,11 @@ describe('ProjectEntityUpdateHandler', function () {
files,
folders,
})
this.ProjectEntityUpdateHandler.resyncProjectHistory(projectId, done)
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
done
)
})
it('renames the doc', function () {
@ -2385,6 +2408,7 @@ describe('ProjectEntityUpdateHandler', function () {
this.ProjectEntityHandler.getAllEntitiesFromProject.throws()
this.ProjectEntityUpdateHandler.resyncProjectHistory(
projectId,
{},
this.callback
)
})

View file

@ -191,7 +191,7 @@ describe('ProjectOptionsHandler', function () {
describe('setting the rangesSupportEnabled', function () {
it('should perform and update on mongo', async function () {
await this.handler.promises.enableHistoryRangesSupport(projectId)
await this.handler.promises.setHistoryRangesSupport(projectId, true)
sinon.assert.calledWith(
this.db.projects.updateOne,
{ _id: new ObjectId(projectId) },
@ -205,8 +205,8 @@ describe('ProjectOptionsHandler', function () {
})
it('should be rejected', async function () {
expect(this.handler.promises.enableHistoryRangesSupport(projectId)).to
.be.rejected
expect(this.handler.promises.setHistoryRangesSupport(projectId, true))
.to.be.rejected
})
})
})