Merge pull request #17690 from overleaf/em-promisify-project-history-redis-manager

Promisify ProjectHistoryRedisManager

GitOrigin-RevId: 11908c6cc102653b34e2014af230e3912cd8b432
This commit is contained in:
Eric Mc Sween 2024-04-03 10:08:16 -04:00 committed by Copybot
parent caf715f539
commit 9d78fd0945
2 changed files with 67 additions and 92 deletions

View file

@ -1,5 +1,5 @@
const Settings = require('@overleaf/settings')
const { promisifyAll } = require('@overleaf/promise-utils')
const { callbackifyAll } = require('@overleaf/promise-utils')
const projectHistoryKeys = Settings.redis?.project_history?.key_schema
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.project_history
@ -7,12 +7,11 @@ const rclient = require('@overleaf/redis-wrapper').createClient(
const logger = require('@overleaf/logger')
const metrics = require('./Metrics')
const { docIsTooLarge } = require('./Limits')
const OError = require('@overleaf/o-error')
const ProjectHistoryRedisManager = {
queueOps(projectId, ...rest) {
async queueOps(projectId, ...ops) {
// Record metric for ops pushed onto queue
const callback = rest.pop()
const ops = rest
for (const op of ops) {
metrics.summary('redis.projectHistoryOps', op.length, { status: 'push' })
}
@ -30,24 +29,18 @@ const ProjectHistoryRedisManager = {
}),
Date.now()
)
multi.exec(function (error, result) {
if (error) {
return callback(error)
}
// return the number of entries pushed onto the project history queue
callback(null, result[0])
})
const result = await multi.exec()
return result[0]
},
queueRenameEntity(
async queueRenameEntity(
projectId,
projectHistoryId,
entityType,
entityId,
userId,
projectUpdate,
source,
callback
source
) {
projectUpdate = {
pathname: projectUpdate.pathname,
@ -73,18 +66,17 @@ const ProjectHistoryRedisManager = {
)
const jsonUpdate = JSON.stringify(projectUpdate)
ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate, callback)
return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate)
},
queueAddEntity(
async queueAddEntity(
projectId,
projectHistoryId,
entityType,
entityId,
userId,
projectUpdate,
source,
callback
source
) {
projectUpdate = {
pathname: projectUpdate.pathname,
@ -111,16 +103,10 @@ const ProjectHistoryRedisManager = {
)
const jsonUpdate = JSON.stringify(projectUpdate)
ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate, callback)
return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate)
},
queueResyncProjectStructure(
projectId,
projectHistoryId,
docs,
files,
callback
) {
async queueResyncProjectStructure(projectId, projectHistoryId, docs, files) {
logger.debug({ projectId, docs, files }, 'queue project structure resync')
const projectUpdate = {
resyncProjectStructure: { docs, files },
@ -130,17 +116,16 @@ const ProjectHistoryRedisManager = {
},
}
const jsonUpdate = JSON.stringify(projectUpdate)
ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate, callback)
return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate)
},
queueResyncDocContent(
async queueResyncDocContent(
projectId,
projectHistoryId,
docId,
lines,
version,
pathname,
callback
pathname
) {
logger.debug(
{ projectId, docId, lines, version, pathname },
@ -163,15 +148,16 @@ const ProjectHistoryRedisManager = {
// project update length as an upper bound
const sizeBound = jsonUpdate.length
if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) {
const err = new Error(
'blocking resync doc content insert into project history queue: doc is too large'
throw new OError(
'blocking resync doc content insert into project history queue: doc is too large',
{ projectId, docId, docSize: sizeBound }
)
logger.error({ projectId, docId, err, docSize: sizeBound }, err.message)
return callback(err)
}
ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate, callback)
return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate)
},
}
module.exports = ProjectHistoryRedisManager
module.exports.promises = promisifyAll(ProjectHistoryRedisManager)
module.exports = {
...callbackifyAll(ProjectHistoryRedisManager),
promises: ProjectHistoryRedisManager,
}

View file

@ -1,4 +1,5 @@
const sinon = require('sinon')
const { expect } = require('chai')
const modulePath = '../../../../app/js/ProjectHistoryRedisManager.js'
const SandboxedModule = require('sandboxed-module')
const tk = require('timekeeper')
@ -8,7 +9,6 @@ describe('ProjectHistoryRedisManager', function () {
this.project_id = 'project-id-123'
this.projectHistoryId = 'history-id-123'
this.user_id = 'user-id-123'
this.callback = sinon.stub()
this.rclient = {}
this.source = 'editor'
tk.freeze(new Date())
@ -48,17 +48,15 @@ describe('ProjectHistoryRedisManager', function () {
})
describe('queueOps', function () {
beforeEach(function () {
beforeEach(async function () {
this.ops = ['mock-op-1', 'mock-op-2']
this.multi = { exec: sinon.stub() }
this.multi = { exec: sinon.stub().resolves([1]) }
this.multi.rpush = sinon.stub()
this.multi.setnx = sinon.stub()
this.rclient.multi = () => this.multi
// @rclient = multi: () => @multi
this.ProjectHistoryRedisManager.queueOps(
await this.ProjectHistoryRedisManager.promises.queueOps(
this.project_id,
...this.ops,
this.callback
...this.ops
)
})
@ -83,7 +81,7 @@ describe('ProjectHistoryRedisManager', function () {
})
describe('queueRenameEntity', function () {
beforeEach(function () {
beforeEach(async function () {
this.file_id = 1234
this.rawUpdate = {
@ -92,16 +90,17 @@ describe('ProjectHistoryRedisManager', function () {
version: (this.version = 2),
}
this.ProjectHistoryRedisManager.queueOps = sinon.stub()
this.ProjectHistoryRedisManager.queueRenameEntity(
this.ProjectHistoryRedisManager.promises.queueOps = sinon
.stub()
.resolves()
await this.ProjectHistoryRedisManager.promises.queueRenameEntity(
this.project_id,
this.projectHistoryId,
'file',
this.file_id,
this.user_id,
this.rawUpdate,
this.source,
this.callback
this.source
)
})
@ -119,19 +118,14 @@ describe('ProjectHistoryRedisManager', function () {
file: this.file_id,
}
this.ProjectHistoryRedisManager.queueOps
.calledWithExactly(
this.project_id,
JSON.stringify(update),
this.callback
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
})
describe('queueAddEntity', function () {
beforeEach(function () {
this.rclient.rpush = sinon.stub().yields()
beforeEach(async function () {
this.doc_id = 1234
this.rawUpdate = {
@ -141,16 +135,17 @@ describe('ProjectHistoryRedisManager', function () {
url: (this.url = 'filestore.example.com'),
}
this.ProjectHistoryRedisManager.queueOps = sinon.stub()
this.ProjectHistoryRedisManager.queueAddEntity(
this.ProjectHistoryRedisManager.promises.queueOps = sinon
.stub()
.resolves()
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
'doc',
this.doc_id,
this.user_id,
this.rawUpdate,
this.source,
this.callback
this.source
)
})
@ -169,12 +164,8 @@ describe('ProjectHistoryRedisManager', function () {
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.queueOps
.calledWithExactly(
this.project_id,
JSON.stringify(update),
this.callback
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
@ -202,19 +193,20 @@ describe('ProjectHistoryRedisManager', function () {
},
}
this.ProjectHistoryRedisManager.queueOps = sinon.stub()
this.ProjectHistoryRedisManager.promises.queueOps = sinon
.stub()
.resolves()
})
describe('with a good doc', function () {
beforeEach(function () {
this.ProjectHistoryRedisManager.queueResyncDocContent(
beforeEach(async function () {
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.version,
this.pathname,
this.callback
this.pathname
)
})
it('should check if the doc is too large', function () {
@ -228,34 +220,31 @@ describe('ProjectHistoryRedisManager', function () {
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.queueOps
.calledWithExactly(
this.project_id,
JSON.stringify(this.update),
this.callback
)
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(this.update))
.should.equal(true)
})
})
describe('with a doc that is too large', function () {
beforeEach(function () {
beforeEach(async function () {
this.Limits.docIsTooLarge.returns(true)
this.ProjectHistoryRedisManager.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.version,
this.pathname,
this.callback
)
await expect(
this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.version,
this.pathname
)
).to.be.rejected
})
it('should not queue an update if the doc is too large', function () {
this.ProjectHistoryRedisManager.queueOps.called.should.equal(false)
this.callback
.calledWith(sinon.match.instanceOf(Error))
.should.equal(true)
this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal(
false
)
})
})
})