Merge pull request #12205 from overleaf/em-camel-case-docstore

Camel case variables in docstore

GitOrigin-RevId: e6c2015cfb63ce125fd32ba8b4f904712b2bb9aa
This commit is contained in:
Eric Mc Sween 2023-03-16 07:58:12 -04:00 committed by Copybot
parent df3c7e48ab
commit 49f1312b27
8 changed files with 157 additions and 188 deletions

View file

@ -48,10 +48,7 @@ async function archiveDoc(projectId, docId) {
return
}
logger.debug(
{ project_id: projectId, doc_id: doc._id },
'sending doc to persistor'
)
logger.debug({ projectId, docId: doc._id }, 'sending doc to persistor')
const key = `${projectId}/${doc._id}`
if (doc.lines == null) {
@ -66,7 +63,7 @@ async function archiveDoc(projectId, docId) {
rangesSize > Settings.max_doc_length
) {
logger.warn(
{ project_id: projectId, doc_id: doc._id, linesSize, rangesSize },
{ projectId, docId: doc._id, linesSize, rangesSize },
'large doc found when archiving project'
)
}

View file

@ -1,5 +1,4 @@
/* eslint-disable
camelcase,
no-dupe-keys,
no-undef,
*/
@ -25,7 +24,7 @@ module.exports = DocManager = {
// collection (which is all that this collection contains). In future, we should
// migrate this version property to be part of the docs collection, to guarantee
// consitency between lines and version when writing/reading, and for a simpler schema.
_getDoc(project_id, doc_id, filter, callback) {
_getDoc(projectId, docId, filter, callback) {
if (filter == null) {
filter = {}
}
@ -36,45 +35,37 @@ module.exports = DocManager = {
return callback(new Error('must include inS3 when getting doc'))
}
return MongoManager.findDoc(
project_id,
doc_id,
filter,
function (err, doc) {
if (err != null) {
return callback(err)
} else if (doc == null) {
return callback(
new Errors.NotFoundError(
`No such doc: ${doc_id} in project ${project_id}`
)
return MongoManager.findDoc(projectId, docId, filter, function (err, doc) {
if (err != null) {
return callback(err)
} else if (doc == null) {
return callback(
new Errors.NotFoundError(
`No such doc: ${docId} in project ${projectId}`
)
} else if (doc != null ? doc.inS3 : undefined) {
return DocArchive.unarchiveDoc(project_id, doc_id, function (err) {
if (err != null) {
logger.err({ err, project_id, doc_id }, 'error unarchiving doc')
return callback(err)
)
} else if (doc != null ? doc.inS3 : undefined) {
return DocArchive.unarchiveDoc(projectId, docId, function (err) {
if (err != null) {
logger.err({ err, projectId, docId }, 'error unarchiving doc')
return callback(err)
}
return DocManager._getDoc(projectId, docId, filter, callback)
})
} else {
if (filter.version) {
return MongoManager.getDocVersion(docId, function (error, version) {
if (error != null) {
return callback(error)
}
return DocManager._getDoc(project_id, doc_id, filter, callback)
doc.version = version
return callback(err, doc)
})
} else {
if (filter.version) {
return MongoManager.getDocVersion(
doc_id,
function (error, version) {
if (error != null) {
return callback(error)
}
doc.version = version
return callback(err, doc)
}
)
} else {
return callback(err, doc)
}
return callback(err, doc)
}
}
)
})
},
isDocDeleted(projectId, docId, callback) {
@ -99,13 +90,13 @@ module.exports = DocManager = {
)
},
getFullDoc(project_id, doc_id, callback) {
getFullDoc(projectId, docId, callback) {
if (callback == null) {
callback = function () {}
}
return DocManager._getDoc(
project_id,
doc_id,
projectId,
docId,
{
lines: true,
rev: true,
@ -124,10 +115,10 @@ module.exports = DocManager = {
},
// returns the doc without any version information
_peekRawDoc(project_id, doc_id, callback) {
_peekRawDoc(projectId, docId, callback) {
MongoManager.findDoc(
project_id,
doc_id,
projectId,
docId,
{
lines: true,
rev: true,
@ -141,7 +132,7 @@ module.exports = DocManager = {
if (doc == null) {
return callback(
new Errors.NotFoundError(
`No such doc: ${doc_id} in project ${project_id}`
`No such doc: ${docId} in project ${projectId}`
)
)
}
@ -149,10 +140,10 @@ module.exports = DocManager = {
return callback(null, doc)
}
// skip the unarchiving to mongo when getting a doc
DocArchive.getDoc(project_id, doc_id, function (err, archivedDoc) {
DocArchive.getDoc(projectId, docId, function (err, archivedDoc) {
if (err != null) {
logger.err(
{ err, project_id, doc_id },
{ err, projectId, docId },
'error getting doc from archive'
)
return callback(err)
@ -166,8 +157,8 @@ module.exports = DocManager = {
// get the doc from mongo if possible, or from the persistent store otherwise,
// without unarchiving it (avoids unnecessary writes to mongo)
peekDoc(project_id, doc_id, callback) {
DocManager._peekRawDoc(project_id, doc_id, (err, doc) => {
peekDoc(projectId, docId, callback) {
DocManager._peekRawDoc(projectId, docId, (err, doc) => {
if (err) {
return callback(err)
}
@ -187,13 +178,13 @@ module.exports = DocManager = {
})
},
getDocLines(project_id, doc_id, callback) {
getDocLines(projectId, docId, callback) {
if (callback == null) {
callback = function () {}
}
return DocManager._getDoc(
project_id,
doc_id,
projectId,
docId,
{ lines: true, inS3: true },
function (err, doc) {
if (err != null) {
@ -204,20 +195,20 @@ module.exports = DocManager = {
)
},
getAllDeletedDocs(project_id, filter, callback) {
MongoManager.getProjectsDeletedDocs(project_id, filter, callback)
getAllDeletedDocs(projectId, filter, callback) {
MongoManager.getProjectsDeletedDocs(projectId, filter, callback)
},
getAllNonDeletedDocs(project_id, filter, callback) {
getAllNonDeletedDocs(projectId, filter, callback) {
if (callback == null) {
callback = function () {}
}
return DocArchive.unArchiveAllDocs(project_id, function (error) {
return DocArchive.unArchiveAllDocs(projectId, function (error) {
if (error != null) {
return callback(error)
}
return MongoManager.getProjectsDocs(
project_id,
projectId,
{ include_deleted: false },
filter,
function (error, docs) {
@ -225,7 +216,7 @@ module.exports = DocManager = {
return callback(error)
} else if (docs == null) {
return callback(
new Errors.NotFoundError(`No docs for project ${project_id}`)
new Errors.NotFoundError(`No docs for project ${projectId}`)
)
} else {
return callback(null, docs)
@ -235,7 +226,7 @@ module.exports = DocManager = {
})
},
updateDoc(project_id, doc_id, lines, version, ranges, callback) {
updateDoc(projectId, docId, lines, version, ranges, callback) {
if (callback == null) {
callback = function () {}
}
@ -244,8 +235,8 @@ module.exports = DocManager = {
}
return DocManager._getDoc(
project_id,
doc_id,
projectId,
docId,
{
version: true,
rev: true,
@ -258,7 +249,7 @@ module.exports = DocManager = {
let updateLines, updateRanges, updateVersion
if (err != null && !(err instanceof Errors.NotFoundError)) {
logger.err(
{ project_id, doc_id, err },
{ projectId, docId, err },
'error getting document for update'
)
return callback(err)
@ -289,22 +280,19 @@ module.exports = DocManager = {
if (updateRanges) {
update.ranges = ranges
}
logger.debug(
{ project_id, doc_id },
'updating doc lines and ranges'
)
logger.debug({ projectId, docId }, 'updating doc lines and ranges')
modified = true
rev += 1 // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection
return MongoManager.upsertIntoDocCollection(
project_id,
doc_id,
projectId,
docId,
update,
cb
)
} else {
logger.debug(
{ project_id, doc_id },
{ projectId, docId },
'doc lines have not changed - not updating'
)
return cb()
@ -315,18 +303,18 @@ module.exports = DocManager = {
if (updateVersion) {
logger.debug(
{
project_id,
doc_id,
projectId,
docId,
oldVersion: doc != null ? doc.version : undefined,
newVersion: version,
},
'updating doc version'
)
modified = true
return MongoManager.setDocVersion(doc_id, version, cb)
return MongoManager.setDocVersion(docId, version, cb)
} else {
logger.debug(
{ project_id, doc_id, version },
{ projectId, docId, version },
'doc version has not changed - not updating'
)
return cb()
@ -348,33 +336,33 @@ module.exports = DocManager = {
)
},
patchDoc(project_id, doc_id, meta, callback) {
patchDoc(projectId, docId, meta, callback) {
const projection = { _id: 1, deleted: true }
MongoManager.findDoc(project_id, doc_id, projection, (error, doc) => {
MongoManager.findDoc(projectId, docId, projection, (error, doc) => {
if (error != null) {
return callback(error)
}
if (!doc) {
return callback(
new Errors.NotFoundError(
`No such project/doc to delete: ${project_id}/${doc_id}`
`No such project/doc to delete: ${projectId}/${docId}`
)
)
}
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
// The user will not read this doc anytime soon. Flush it out of mongo.
DocArchive.archiveDoc(project_id, doc_id, err => {
DocArchive.archiveDoc(projectId, docId, err => {
if (err) {
logger.warn(
{ project_id, doc_id, err },
{ projectId, docId, err },
'archiving a single doc in the background failed'
)
}
})
}
MongoManager.patchDoc(project_id, doc_id, meta, callback)
MongoManager.patchDoc(projectId, docId, meta, callback)
})
},
}

View file

@ -1,6 +1,3 @@
/* eslint-disable
camelcase,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
@ -20,9 +17,9 @@ const logger = require('@overleaf/logger')
module.exports = {
check(callback) {
const doc_id = ObjectId()
const project_id = ObjectId(settings.docstore.healthCheck.project_id)
const url = `http://localhost:${port}/project/${project_id}/doc/${doc_id}`
const docId = ObjectId()
const projectId = ObjectId(settings.docstore.healthCheck.project_id)
const url = `http://localhost:${port}/project/${projectId}/doc/${docId}`
const lines = [
'smoke test - delete me',
`${crypto.randomBytes(32).toString('hex')}`,
@ -31,7 +28,7 @@ module.exports = {
url,
timeout: 3000,
})
logger.debug({ lines, url, doc_id, project_id }, 'running health check')
logger.debug({ lines, url, docId, projectId }, 'running health check')
const jobs = [
function (cb) {
const opts = getOpts()
@ -51,7 +48,7 @@ module.exports = {
return cb(new Error(`status code not 200, its ${res.statusCode}`))
} else if (
_.isEqual(body != null ? body.lines : undefined, lines) &&
(body != null ? body._id : undefined) === doc_id.toString()
(body != null ? body._id : undefined) === docId.toString()
) {
return cb()
} else {
@ -63,8 +60,8 @@ module.exports = {
}
})
},
cb => db.docs.deleteOne({ _id: doc_id, project_id }, cb),
cb => db.docOps.deleteOne({ doc_id }, cb),
cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb),
cb => db.docOps.deleteOne({ doc_id: docId }, cb),
]
return async.series(jobs, callback)
},

View file

@ -1,5 +1,4 @@
/* eslint-disable
camelcase,
no-return-assign,
*/
// TODO: This file was created by bulk-decaffeinate.
@ -16,19 +15,19 @@ const _ = require('lodash')
const { ObjectId } = require('./mongodb')
module.exports = RangeManager = {
shouldUpdateRanges(doc_ranges, incoming_ranges) {
if (incoming_ranges == null) {
shouldUpdateRanges(docRanges, incomingRanges) {
if (incomingRanges == null) {
throw new Error('expected incoming_ranges')
}
// If the ranges are empty, we don't store them in the DB, so set
// doc_ranges to an empty object as default, since this is was the
// incoming_ranges will be for an empty range set.
if (doc_ranges == null) {
doc_ranges = {}
if (docRanges == null) {
docRanges = {}
}
return !_.isEqual(doc_ranges, incoming_ranges)
return !_.isEqual(docRanges, incomingRanges)
},
jsonRangesToMongo(ranges) {

View file

@ -1,5 +1,4 @@
/* eslint-disable
camelcase,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
@ -113,10 +112,10 @@ describe('Archiving', function () {
return DocstoreClient.getS3Doc(
this.project_id,
doc._id,
(error, s3_doc) => {
(error, s3Doc) => {
if (error) return callback(error)
s3_doc.lines.should.deep.equal(doc.lines)
s3_doc.ranges.should.deep.equal(doc.ranges)
s3Doc.lines.should.deep.equal(doc.lines)
s3Doc.ranges.should.deep.equal(doc.ranges)
callback()
}
)
@ -130,8 +129,8 @@ describe('Archiving', function () {
before(function (done) {
return DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error != null) {
throw error
}
@ -231,12 +230,12 @@ describe('Archiving', function () {
return DocstoreClient.getS3Doc(
this.project_id,
this.doc._id,
(error, s3_doc) => {
(error, s3Doc) => {
if (error != null) {
throw error
}
s3_doc.lines.should.deep.equal(this.doc.lines)
s3_doc.ranges.should.deep.equal(this.doc.ranges)
s3Doc.lines.should.deep.equal(this.doc.lines)
s3Doc.ranges.should.deep.equal(this.doc.ranges)
return done()
}
)
@ -246,8 +245,8 @@ describe('Archiving', function () {
beforeEach(function (done) {
return DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error != null) {
throw error
}
@ -291,8 +290,8 @@ describe('Archiving', function () {
beforeEach(function (done) {
DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error) {
return done(error)
}
@ -375,18 +374,14 @@ describe('Archiving', function () {
})
it('should set the doc in s3 correctly', function (done) {
DocstoreClient.getS3Doc(
this.project_id,
this.doc._id,
(error, s3_doc) => {
if (error) {
return done(error)
}
s3_doc.lines.should.deep.equal(this.doc.lines)
s3_doc.ranges.should.deep.equal(this.doc.ranges)
done()
DocstoreClient.getS3Doc(this.project_id, this.doc._id, (error, s3Doc) => {
if (error) {
return done(error)
}
)
s3Doc.lines.should.deep.equal(this.doc.lines)
s3Doc.ranges.should.deep.equal(this.doc.ranges)
done()
})
})
})
@ -395,12 +390,12 @@ describe('Archiving', function () {
this.project_id = ObjectId()
this.timeout(1000 * 30)
const quarterMegInBytes = 250000
const big_line = require('crypto')
const bigLine = require('crypto')
.randomBytes(quarterMegInBytes)
.toString('hex')
this.doc = {
_id: ObjectId(),
lines: [big_line, big_line, big_line, big_line],
lines: [bigLine, bigLine, bigLine, bigLine],
ranges: {},
version: 2,
}
@ -446,12 +441,12 @@ describe('Archiving', function () {
return DocstoreClient.getS3Doc(
this.project_id,
this.doc._id,
(error, s3_doc) => {
(error, s3Doc) => {
if (error != null) {
throw error
}
s3_doc.lines.should.deep.equal(this.doc.lines)
s3_doc.ranges.should.deep.equal(this.doc.ranges)
s3Doc.lines.should.deep.equal(this.doc.lines)
s3Doc.ranges.should.deep.equal(this.doc.ranges)
return done()
}
)
@ -461,8 +456,8 @@ describe('Archiving', function () {
before(function (done) {
return DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error != null) {
throw error
}
@ -920,12 +915,12 @@ describe('Archiving', function () {
return DocstoreClient.getS3Doc(
this.project_id,
this.doc._id,
(error, s3_doc) => {
(error, s3Doc) => {
if (error != null) {
throw error
}
s3_doc.lines.should.deep.equal(this.doc.lines)
s3_doc.ranges.should.deep.equal(this.doc.ranges)
s3Doc.lines.should.deep.equal(this.doc.lines)
s3Doc.ranges.should.deep.equal(this.doc.ranges)
return done()
}
)
@ -935,8 +930,8 @@ describe('Archiving', function () {
before(function (done) {
return DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error != null) {
throw error
}
@ -1039,13 +1034,13 @@ describe('Archiving', function () {
return DocstoreClient.getS3Doc(
this.project_id,
this.doc._id,
(error, s3_doc) => {
(error, s3Doc) => {
if (error != null) {
throw error
}
s3_doc.lines.should.deep.equal(this.doc.lines)
s3Doc.lines.should.deep.equal(this.doc.lines)
const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String
s3_doc.ranges.should.deep.equal(ranges)
s3Doc.ranges.should.deep.equal(ranges)
return done()
}
)
@ -1055,8 +1050,8 @@ describe('Archiving', function () {
before(function (done) {
return DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error != null) {
throw error
}
@ -1136,12 +1131,12 @@ describe('Archiving', function () {
return DocstoreClient.getS3Doc(
this.project_id,
this.doc._id,
(error, s3_doc) => {
(error, s3Doc) => {
if (error != null) {
throw error
}
s3_doc.lines.should.deep.equal(this.doc.lines)
s3_doc.ranges.should.deep.equal(this.doc.ranges)
s3Doc.lines.should.deep.equal(this.doc.lines)
s3Doc.ranges.should.deep.equal(this.doc.ranges)
return done()
}
)
@ -1151,8 +1146,8 @@ describe('Archiving', function () {
before(function (done) {
return DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error != null) {
throw error
}
@ -1202,8 +1197,8 @@ describe('Archiving', function () {
}
DocstoreClient.getAllDocs(
this.project_id,
(error, res, fetched_docs) => {
this.fetched_docs = fetched_docs
(error, res, fetchedDocs) => {
this.fetched_docs = fetchedDocs
if (error != null) {
throw error
}

View file

@ -1,5 +1,4 @@
/* eslint-disable
camelcase,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
@ -70,10 +69,10 @@ describe('Getting a doc', function () {
describe('when the doc does not exist', function () {
return it('should return a 404', function (done) {
const missing_doc_id = ObjectId()
const missingDocId = ObjectId()
return DocstoreClient.getDoc(
this.project_id,
missing_doc_id,
missingDocId,
{},
(error, res, doc) => {
if (error) return done(error)

View file

@ -1,5 +1,4 @@
/* eslint-disable
camelcase,
no-dupe-keys,
no-return-assign,
no-unused-vars,
@ -254,11 +253,7 @@ describe('DocManager', function () {
inS3: true,
}
this.MongoManager.findDoc.yields(null, this.doc)
this.DocArchiveManager.unarchiveDoc = (
project_id,
doc_id,
callback
) => {
this.DocArchiveManager.unarchiveDoc = (projectId, docId, callback) => {
this.doc.inS3 = false
return callback()
}
@ -497,8 +492,8 @@ describe('DocManager', function () {
it('should log a warning', function () {
expect(this.logger.warn).to.have.been.calledWith(
sinon.match({
project_id: this.project_id,
doc_id: this.doc_id,
projectId: this.project_id,
docId: this.doc_id,
err: this.err,
}),
'archiving a single doc in the background failed'

View file

@ -1,5 +1,4 @@
/* eslint-disable
camelcase,
no-return-assign,
no-unused-vars,
*/
@ -32,110 +31,110 @@ describe('RangeManager', function () {
describe('jsonRangesToMongo', function () {
it('should convert ObjectIds and dates to proper objects', function () {
const change_id = ObjectId().toString()
const comment_id = ObjectId().toString()
const user_id = ObjectId().toString()
const thread_id = ObjectId().toString()
const changeId = ObjectId().toString()
const commentId = ObjectId().toString()
const userId = ObjectId().toString()
const threadId = ObjectId().toString()
const ts = new Date().toJSON()
return this.RangeManager.jsonRangesToMongo({
changes: [
{
id: change_id,
id: changeId,
op: { i: 'foo', p: 3 },
metadata: {
user_id,
user_id: userId,
ts,
},
},
],
comments: [
{
id: comment_id,
op: { c: 'foo', p: 3, t: thread_id },
id: commentId,
op: { c: 'foo', p: 3, t: threadId },
},
],
}).should.deep.equal({
changes: [
{
id: ObjectId(change_id),
id: ObjectId(changeId),
op: { i: 'foo', p: 3 },
metadata: {
user_id: ObjectId(user_id),
user_id: ObjectId(userId),
ts: new Date(ts),
},
},
],
comments: [
{
id: ObjectId(comment_id),
op: { c: 'foo', p: 3, t: ObjectId(thread_id) },
id: ObjectId(commentId),
op: { c: 'foo', p: 3, t: ObjectId(threadId) },
},
],
})
})
it('should leave malformed ObjectIds as they are', function () {
const change_id = 'foo'
const comment_id = 'bar'
const user_id = 'baz'
const changeId = 'foo'
const commentId = 'bar'
const userId = 'baz'
return this.RangeManager.jsonRangesToMongo({
changes: [
{
id: change_id,
id: changeId,
metadata: {
user_id,
user_id: userId,
},
},
],
comments: [
{
id: comment_id,
id: commentId,
},
],
}).should.deep.equal({
changes: [
{
id: change_id,
id: changeId,
metadata: {
user_id,
user_id: userId,
},
},
],
comments: [
{
id: comment_id,
id: commentId,
},
],
})
})
return it('should be consistent when transformed through json -> mongo -> json', function () {
const change_id = ObjectId().toString()
const comment_id = ObjectId().toString()
const user_id = ObjectId().toString()
const thread_id = ObjectId().toString()
const changeId = ObjectId().toString()
const commentId = ObjectId().toString()
const userId = ObjectId().toString()
const threadId = ObjectId().toString()
const ts = new Date().toJSON()
const ranges1 = {
changes: [
{
id: change_id,
id: changeId,
op: { i: 'foo', p: 3 },
metadata: {
user_id,
user_id: userId,
ts,
},
},
],
comments: [
{
id: comment_id,
op: { c: 'foo', p: 3, t: thread_id },
id: commentId,
op: { c: 'foo', p: 3, t: threadId },
},
],
}
const ranges1_copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
const ranges1Copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
const ranges2 = JSON.parse(
JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1_copy))
JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1Copy))
)
return ranges1.should.deep.equal(ranges2)
})