Merge pull request #94 from overleaf/jpa-mongodb-native

[misc] migrate to the native mongo driver
This commit is contained in:
Jakob Ackermann 2020-09-29 11:55:11 +02:00 committed by GitHub
commit 712a4c11e2
24 changed files with 323 additions and 928 deletions

View file

@ -44,6 +44,7 @@ Metrics.memory.monitor(logger)
const childProcess = require('child_process') const childProcess = require('child_process')
const mongodb = require('./app/js/mongodb')
const HttpController = require('./app/js/HttpController') const HttpController = require('./app/js/HttpController')
const express = require('express') const express = require('express')
const bodyParser = require('body-parser') const bodyParser = require('body-parser')
@ -128,18 +129,26 @@ const host =
if (!module.parent) { if (!module.parent) {
// Called directly // Called directly
app.listen(port, host, function (error) { mongodb
if (error != null) { .waitForDb()
return logger.error( .then(() => {
{ err: error }, app.listen(port, host, function (error) {
'could not start track-changes server' if (error != null) {
) return logger.error(
} else { { err: error },
return logger.info( 'could not start track-changes server'
`trackchanges starting up, listening on ${host}:${port}` )
) } else {
} return logger.info(
}) `trackchanges starting up, listening on ${host}:${port}`
)
}
})
})
.catch((err) => {
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
process.exit(1)
})
} }
module.exports = app module.exports = app

View file

@ -10,7 +10,7 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
const { ObjectId } = require('mongojs') const { ObjectId } = require('./mongodb')
const request = require('request') const request = require('request')
const async = require('async') const async = require('async')
const settings = require('settings-sharelatex') const settings = require('settings-sharelatex')

View file

@ -18,7 +18,7 @@ const settings = require('settings-sharelatex')
const logger = require('logger-sharelatex') const logger = require('logger-sharelatex')
const AWS = require('aws-sdk') const AWS = require('aws-sdk')
const S3S = require('s3-streams') const S3S = require('s3-streams')
const { db, ObjectId } = require('./mongojs') const { db, ObjectId } = require('./mongodb')
const JSONStream = require('JSONStream') const JSONStream = require('JSONStream')
const ReadlineStream = require('byline') const ReadlineStream = require('byline')
const zlib = require('zlib') const zlib = require('zlib')
@ -187,7 +187,11 @@ module.exports = MongoAWS = {
// allow the object to expire, we can always retrieve it again // allow the object to expire, we can always retrieve it again
object.expiresAt = new Date(Date.now() + 7 * DAYS) object.expiresAt = new Date(Date.now() + 7 * DAYS)
logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3') logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3')
return db.docHistory.insert(object, callback) return db.docHistory.insertOne(object, (err, confirmation) => {
if (err) return callback(err)
object._id = confirmation.insertedId
callback(null, object)
})
}) })
} }
} }

View file

@ -12,7 +12,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let MongoManager let MongoManager
const { db, ObjectId } = require('./mongojs') const { db, ObjectId } = require('./mongodb')
const PackManager = require('./PackManager') const PackManager = require('./PackManager')
const async = require('async') const async = require('async')
const _ = require('underscore') const _ = require('underscore')
@ -25,7 +25,11 @@ module.exports = MongoManager = {
callback = function (error, update) {} callback = function (error, update) {}
} }
return db.docHistory return db.docHistory
.find({ doc_id: ObjectId(doc_id.toString()) }, { pack: { $slice: -1 } }) // only return the last entry in a pack .find(
{ doc_id: ObjectId(doc_id.toString()) },
// only return the last entry in a pack
{ projection: { pack: { $slice: -1 } } }
)
.sort({ v: -1 }) .sort({ v: -1 })
.limit(1) .limit(1)
.toArray(function (error, compressedUpdates) { .toArray(function (error, compressedUpdates) {
@ -96,7 +100,7 @@ module.exports = MongoManager = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
return db.docHistory.update( return db.docHistory.updateMany(
{ {
doc_id: ObjectId(doc_id.toString()), doc_id: ObjectId(doc_id.toString()),
project_id: { $exists: false } project_id: { $exists: false }
@ -104,9 +108,6 @@ module.exports = MongoManager = {
{ {
$set: { project_id: ObjectId(project_id.toString()) } $set: { project_id: ObjectId(project_id.toString()) }
}, },
{
multi: true
},
callback callback
) )
}, },
@ -115,16 +116,11 @@ module.exports = MongoManager = {
if (callback == null) { if (callback == null) {
callback = function (error, metadata) {} callback = function (error, metadata) {}
} }
return db.projectHistoryMetaData.find( return db.projectHistoryMetaData.findOne(
{ {
project_id: ObjectId(project_id.toString()) project_id: ObjectId(project_id.toString())
}, },
function (error, results) { callback
if (error != null) {
return callback(error)
}
return callback(null, results[0])
}
) )
}, },
@ -132,7 +128,7 @@ module.exports = MongoManager = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
return db.projectHistoryMetaData.update( return db.projectHistoryMetaData.updateOne(
{ {
project_id: ObjectId(project_id) project_id: ObjectId(project_id)
}, },
@ -151,7 +147,7 @@ module.exports = MongoManager = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
return db.docHistory.update( return db.docHistory.updateMany(
{ {
project_id: ObjectId(project_id), project_id: ObjectId(project_id),
temporary: true, temporary: true,
@ -161,9 +157,6 @@ module.exports = MongoManager = {
$set: { temporary: false }, $set: { temporary: false },
$unset: { expiresAt: '' } $unset: { expiresAt: '' }
}, },
{
multi: true
},
callback callback
) )
}, },

View file

@ -16,7 +16,9 @@
let PackManager let PackManager
const async = require('async') const async = require('async')
const _ = require('underscore') const _ = require('underscore')
const { db, ObjectId, BSON } = require('./mongojs') const Bson = require('bson')
const BSON = new Bson()
const { db, ObjectId } = require('./mongodb')
const logger = require('logger-sharelatex') const logger = require('logger-sharelatex')
const LockManager = require('./LockManager') const LockManager = require('./LockManager')
const MongoAWS = require('./MongoAWS') const MongoAWS = require('./MongoAWS')
@ -218,7 +220,7 @@ module.exports = PackManager = {
{ project_id, doc_id, newUpdates }, { project_id, doc_id, newUpdates },
'inserting updates into new pack' 'inserting updates into new pack'
) )
return db.docHistory.save(newPack, function (err, result) { return db.docHistory.insertOne(newPack, function (err) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
@ -273,10 +275,7 @@ module.exports = PackManager = {
'appending updates to existing pack' 'appending updates to existing pack'
) )
Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`) Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`)
return db.docHistory.findAndModify( return db.docHistory.updateOne(query, update, callback)
{ query, update, new: true, fields: { meta: 1, v_end: 1 } },
callback
)
}, },
// Retrieve all changes for a document // Retrieve all changes for a document
@ -301,7 +300,8 @@ module.exports = PackManager = {
// console.log "query:", query // console.log "query:", query
return db.docHistory return db.docHistory
.find(query) .find(query)
.sort({ v: -1 }, function (err, result) { .sort({ v: -1 })
.toArray(function (err, result) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
@ -380,20 +380,12 @@ module.exports = PackManager = {
fetchPacksIfNeeded(project_id, doc_id, pack_ids, callback) { fetchPacksIfNeeded(project_id, doc_id, pack_ids, callback) {
let id let id
return db.docHistory.find( return db.docHistory
{ .find(
_id: { { _id: { $in: pack_ids.map(ObjectId) } },
$in: (() => { { projection: { _id: 1 } }
const result = [] )
for (id of Array.from(pack_ids)) { .toArray(function (err, loadedPacks) {
result.push(ObjectId(id))
}
return result
})()
}
},
{ _id: 1 },
function (err, loadedPacks) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
@ -428,8 +420,7 @@ module.exports = PackManager = {
return callback() return callback()
} }
) )
} })
)
}, },
// Retrieve all changes across a project // Retrieve all changes across a project
@ -437,8 +428,12 @@ module.exports = PackManager = {
makeProjectIterator(project_id, before, callback) { makeProjectIterator(project_id, before, callback) {
// get all the docHistory Entries // get all the docHistory Entries
return db.docHistory return db.docHistory
.find({ project_id: ObjectId(project_id) }, { pack: false }) .find(
.sort({ 'meta.end_ts': -1 }, function (err, packs) { { project_id: ObjectId(project_id) },
{ projection: { pack: false } }
)
.sort({ 'meta.end_ts': -1 })
.toArray(function (err, packs) {
let pack let pack
if (err != null) { if (err != null) {
return callback(err) return callback(err)
@ -449,9 +444,9 @@ module.exports = PackManager = {
allPacks.push(pack) allPacks.push(pack)
seenIds[pack._id] = true seenIds[pack._id] = true
} }
return db.docHistoryIndex.find( return db.docHistoryIndex
{ project_id: ObjectId(project_id) }, .find({ project_id: ObjectId(project_id) })
function (err, indexes) { .toArray(function (err, indexes) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
@ -470,8 +465,7 @@ module.exports = PackManager = {
null, null,
new ProjectIterator(allPacks, before, PackManager.getPackById) new ProjectIterator(allPacks, before, PackManager.getPackById)
) )
} })
)
}) })
}, },
@ -497,11 +491,9 @@ module.exports = PackManager = {
increaseTTL(pack, callback) { increaseTTL(pack, callback) {
if (pack.expiresAt < new Date(Date.now() + 6 * DAYS)) { if (pack.expiresAt < new Date(Date.now() + 6 * DAYS)) {
// update cache expiry since we are using this pack // update cache expiry since we are using this pack
return db.docHistory.findAndModify( return db.docHistory.updateOne(
{ { _id: pack._id },
query: { _id: pack._id }, { $set: { expiresAt: new Date(Date.now() + 7 * DAYS) } },
update: { $set: { expiresAt: new Date(Date.now() + 7 * DAYS) } }
},
(err) => callback(err, pack) (err) => callback(err, pack)
) )
} else { } else {
@ -521,7 +513,7 @@ module.exports = PackManager = {
getPackFromIndex(doc_id, pack_id, callback) { getPackFromIndex(doc_id, pack_id, callback) {
return db.docHistoryIndex.findOne( return db.docHistoryIndex.findOne(
{ _id: ObjectId(doc_id.toString()), 'packs._id': pack_id }, { _id: ObjectId(doc_id.toString()), 'packs._id': pack_id },
{ 'packs.$': 1 }, { projection: { 'packs.$': 1 } },
callback callback
) )
}, },
@ -529,7 +521,7 @@ module.exports = PackManager = {
getLastPackFromIndex(doc_id, callback) { getLastPackFromIndex(doc_id, callback) {
return db.docHistoryIndex.findOne( return db.docHistoryIndex.findOne(
{ _id: ObjectId(doc_id.toString()) }, { _id: ObjectId(doc_id.toString()) },
{ packs: { $slice: -1 } }, { projection: { packs: { $slice: -1 } } },
function (err, indexPack) { function (err, indexPack) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
@ -616,8 +608,9 @@ module.exports = PackManager = {
expiresAt: { $exists: false } expiresAt: { $exists: false }
} }
return db.docHistory return db.docHistory
.find(query, { pack: false }) .find(query, { projection: { pack: false } })
.sort({ v: 1 }, function (err, packs) { .sort({ v: 1 })
.toArray(function (err, packs) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
@ -641,8 +634,9 @@ module.exports = PackManager = {
expiresAt: { $exists: false } expiresAt: { $exists: false }
} }
return db.docHistory return db.docHistory
.find(query, { pack: false }) .find(query, { projection: { pack: false } })
.sort({ v: 1 }, function (err, packs) { .sort({ v: 1 })
.toArray(function (err, packs) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
@ -727,15 +721,15 @@ module.exports = PackManager = {
}, },
_insertPacksIntoIndex(project_id, doc_id, newPacks, callback) { _insertPacksIntoIndex(project_id, doc_id, newPacks, callback) {
return db.docHistoryIndex.findAndModify( return db.docHistoryIndex.updateOne(
{ _id: ObjectId(doc_id.toString()) },
{
$setOnInsert: { project_id: ObjectId(project_id.toString()) },
$push: {
packs: { $each: newPacks, $sort: { v: 1 } }
}
},
{ {
query: { _id: ObjectId(doc_id.toString()) },
update: {
$setOnInsert: { project_id: ObjectId(project_id.toString()) },
$push: {
packs: { $each: newPacks, $sort: { v: 1 } }
}
},
upsert: true upsert: true
}, },
callback callback
@ -993,11 +987,9 @@ module.exports = PackManager = {
_markPackAsFinalised(project_id, doc_id, pack_id, callback) { _markPackAsFinalised(project_id, doc_id, pack_id, callback) {
logger.log({ project_id, doc_id, pack_id }, 'marking pack as finalised') logger.log({ project_id, doc_id, pack_id }, 'marking pack as finalised')
return db.docHistory.findAndModify( return db.docHistory.updateOne(
{ { _id: pack_id },
query: { _id: pack_id }, { $set: { finalised: true } },
update: { $set: { finalised: true } }
},
callback callback
) )
}, },
@ -1018,11 +1010,9 @@ module.exports = PackManager = {
markPackAsChecked(project_id, doc_id, pack_id, callback) { markPackAsChecked(project_id, doc_id, pack_id, callback) {
logger.log({ project_id, doc_id, pack_id }, 'marking pack as checked') logger.log({ project_id, doc_id, pack_id }, 'marking pack as checked')
return db.docHistory.findAndModify( return db.docHistory.updateOne(
{ { _id: pack_id },
query: { _id: pack_id }, { $currentDate: { last_checked: true } },
update: { $currentDate: { last_checked: true } }
},
callback callback
) )
}, },
@ -1085,20 +1075,18 @@ module.exports = PackManager = {
{ project_id, doc_id }, { project_id, doc_id },
'marking pack as archive in progress status' 'marking pack as archive in progress status'
) )
return db.docHistoryIndex.findAndModify( return db.docHistoryIndex.findOneAndUpdate(
{ {
query: { _id: ObjectId(doc_id.toString()),
_id: ObjectId(doc_id.toString()), packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } }
packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } }
},
fields: { 'packs.$': 1 },
update: { $set: { 'packs.$.inS3': false } }
}, },
{ $set: { 'packs.$.inS3': false } },
{ projection: { 'packs.$': 1 } },
function (err, result) { function (err, result) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
if (result == null) { if (!result.value) {
return callback(new Error('archive is already in progress')) return callback(new Error('archive is already in progress'))
} }
logger.log( logger.log(
@ -1115,35 +1103,30 @@ module.exports = PackManager = {
{ project_id, doc_id, pack_id }, { project_id, doc_id, pack_id },
'clearing as archive in progress' 'clearing as archive in progress'
) )
return db.docHistoryIndex.findAndModify( return db.docHistoryIndex.updateOne(
{ {
query: { _id: ObjectId(doc_id.toString()),
_id: ObjectId(doc_id.toString()), packs: { $elemMatch: { _id: pack_id, inS3: false } }
packs: { $elemMatch: { _id: pack_id, inS3: false } }
},
fields: { 'packs.$': 1 },
update: { $unset: { 'packs.$.inS3': true } }
}, },
{ $unset: { 'packs.$.inS3': true } },
callback callback
) )
}, },
markPackAsArchived(project_id, doc_id, pack_id, callback) { markPackAsArchived(project_id, doc_id, pack_id, callback) {
logger.log({ project_id, doc_id, pack_id }, 'marking pack as archived') logger.log({ project_id, doc_id, pack_id }, 'marking pack as archived')
return db.docHistoryIndex.findAndModify( return db.docHistoryIndex.findOneAndUpdate(
{ {
query: { _id: ObjectId(doc_id.toString()),
_id: ObjectId(doc_id.toString()), packs: { $elemMatch: { _id: pack_id, inS3: false } }
packs: { $elemMatch: { _id: pack_id, inS3: false } }
},
fields: { 'packs.$': 1 },
update: { $set: { 'packs.$.inS3': true } }
}, },
{ $set: { 'packs.$.inS3': true } },
{ projection: { 'packs.$': 1 } },
function (err, result) { function (err, result) {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
if (result == null) { if (!result.value) {
return callback(new Error('archive is not marked as progress')) return callback(new Error('archive is not marked as progress'))
} }
logger.log({ project_id, doc_id, pack_id }, 'marked as archived') logger.log({ project_id, doc_id, pack_id }, 'marked as archived')
@ -1153,12 +1136,13 @@ module.exports = PackManager = {
}, },
setTTLOnArchivedPack(project_id, doc_id, pack_id, callback) { setTTLOnArchivedPack(project_id, doc_id, pack_id, callback) {
return db.docHistory.findAndModify( return db.docHistory.updateOne(
{ { _id: pack_id },
query: { _id: pack_id }, { $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } },
update: { $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } }
},
function (err) { function (err) {
if (err) {
return callback(err)
}
logger.log({ project_id, doc_id, pack_id }, 'set expiry on pack') logger.log({ project_id, doc_id, pack_id }, 'set expiry on pack')
return callback() return callback()
} }

View file

@ -18,7 +18,7 @@ let project_id, doc_id
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const async = require('async') const async = require('async')
const _ = require('underscore') const _ = require('underscore')
const { db, ObjectId, BSON } = require('./mongojs') const { db, ObjectId } = require('./mongodb')
const fs = require('fs') const fs = require('fs')
const Metrics = require('metrics-sharelatex') const Metrics = require('metrics-sharelatex')
Metrics.initialize('track-changes') Metrics.initialize('track-changes')
@ -78,18 +78,6 @@ logger.log(
`checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}` `checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}`
) )
// work around for https://github.com/mafintosh/mongojs/issues/224
db.close = function (callback) {
return this._getServer(function (err, server) {
if (err != null) {
return callback(err)
}
server = server.destroy != null ? server : server.topology
server.destroy(true, true)
return callback()
})
}
const finish = function () { const finish = function () {
if (shutDownTimer != null) { if (shutDownTimer != null) {
logger.log('cancelling timeout') logger.log('cancelling timeout')
@ -186,12 +174,13 @@ if (pending != null) {
_id: { $lt: ObjectIdFromDate(oneWeekAgo) }, _id: { $lt: ObjectIdFromDate(oneWeekAgo) },
last_checked: { $lt: oneWeekAgo } last_checked: { $lt: oneWeekAgo }
}, },
{ _id: 1, doc_id: 1, project_id: 1 } { projection: { _id: 1, doc_id: 1, project_id: 1 } }
) )
.sort({ .sort({
last_checked: 1 last_checked: 1
}) })
.limit(LIMIT, function (err, results) { .limit(LIMIT)
.toArray(function (err, results) {
if (err != null) { if (err != null) {
logger.log({ err }, 'error checking for updates') logger.log({ err }, 'error checking for updates')
finish() finish()

View file

@ -0,0 +1,30 @@
const Settings = require('settings-sharelatex')
const { MongoClient, ObjectId } = require('mongodb')
const clientPromise = MongoClient.connect(
Settings.mongo.url,
Settings.mongo.options
)
let setupDbPromise
async function waitForDb() {
if (!setupDbPromise) {
setupDbPromise = setupDb()
}
await setupDbPromise
}
const db = {}
async function setupDb() {
const internalDb = (await clientPromise).db()
db.docHistory = internalDb.collection('docHistory')
db.docHistoryIndex = internalDb.collection('docHistoryIndex')
db.projectHistoryMetaData = internalDb.collection('projectHistoryMetaData')
}
module.exports = {
db,
ObjectId,
waitForDb
}

View file

@ -1,15 +0,0 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
const Settings = require('settings-sharelatex')
const mongojs = require('mongojs')
const bson = require('bson')
const db = mongojs(Settings.mongo.url, [
'docHistory',
'projectHistoryMetaData',
'docHistoryIndex'
])
module.exports = {
db,
ObjectId: mongojs.ObjectId,
BSON: new bson.BSONPure()
}

View file

@ -4,6 +4,10 @@ const TMP_DIR =
module.exports = { module.exports = {
mongo: { mongo: {
options: {
useUnifiedTopology:
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true'
},
url: url:
process.env.MONGO_CONNECTION_STRING || process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex` `mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`

File diff suppressed because it is too large Load diff

View file

@ -22,7 +22,7 @@
"async": "^2.6.3", "async": "^2.6.3",
"aws-sdk": "^2.643.0", "aws-sdk": "^2.643.0",
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"bson": "^0.4.20", "bson": "^1.1.5",
"byline": "^5.0.0", "byline": "^5.0.0",
"express": "4.17.1", "express": "4.17.1",
"heap": "^0.2.6", "heap": "^0.2.6",
@ -30,7 +30,7 @@
"logger-sharelatex": "^2.2.0", "logger-sharelatex": "^2.2.0",
"metrics-sharelatex": "^2.6.2", "metrics-sharelatex": "^2.6.2",
"mongo-uri": "^0.1.2", "mongo-uri": "^0.1.2",
"mongojs": "3.1.0", "mongodb": "^3.6.0",
"redis": "~0.10.1", "redis": "~0.10.1",
"redis-sharelatex": "^1.0.13", "redis-sharelatex": "^1.0.13",
"request": "~2.88.2", "request": "~2.88.2",

View file

@ -14,8 +14,7 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
chai.should() chai.should()
const { expect } = chai const { expect } = chai
const mongojs = require('../../../app/js/mongojs') const { ObjectId } = require('../../../app/js/mongodb')
const { ObjectId } = mongojs
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const request = require('request') const request = require('request')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now

View file

@ -18,9 +18,7 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
chai.should() chai.should()
const { expect } = chai const { expect } = chai
const mongojs = require('../../../app/js/mongojs') const { db, ObjectId } = require('../../../app/js/mongodb')
const { db } = mongojs
const { ObjectId } = mongojs
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const request = require('request') const request = require('request')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
@ -126,7 +124,7 @@ describe('Archiving updates', function () {
after(function (done) { after(function (done) {
MockWebApi.getUserInfo.restore() MockWebApi.getUserInfo.restore()
return db.docHistory.remove( return db.docHistory.deleteMany(
{ project_id: ObjectId(this.project_id) }, { project_id: ObjectId(this.project_id) },
() => { () => {
return db.docHistoryIndex.remove( return db.docHistoryIndex.remove(
@ -172,7 +170,7 @@ describe('Archiving updates', function () {
}) })
it('should have one remaining pack after cache is expired', function (done) { it('should have one remaining pack after cache is expired', function (done) {
return db.docHistory.remove( return db.docHistory.deleteMany(
{ {
doc_id: ObjectId(this.doc_id), doc_id: ObjectId(this.doc_id),
expiresAt: { $exists: true } expiresAt: { $exists: true }

View file

@ -14,8 +14,7 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
chai.should() chai.should()
const { expect } = chai const { expect } = chai
const mongojs = require('../../../app/js/mongojs') const { ObjectId } = require('../../../app/js/mongodb')
const { ObjectId } = mongojs
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const request = require('request') const request = require('request')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now

View file

@ -13,9 +13,7 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
chai.should() chai.should()
const { expect } = chai const { expect } = chai
const mongojs = require('../../../app/js/mongojs') const { ObjectId } = require('../../../app/js/mongodb')
const { db } = mongojs
const { ObjectId } = mongojs
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const TrackChangesApp = require('./helpers/TrackChangesApp') const TrackChangesApp = require('./helpers/TrackChangesApp')

View file

@ -14,9 +14,7 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
chai.should() chai.should()
const { expect } = chai const { expect } = chai
const mongojs = require('../../../app/js/mongojs') const { ObjectId } = require('../../../app/js/mongodb')
const { db } = mongojs
const { ObjectId } = mongojs
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const TrackChangesApp = require('./helpers/TrackChangesApp') const TrackChangesApp = require('./helpers/TrackChangesApp')

View file

@ -13,8 +13,6 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
chai.should() chai.should()
const { expect } = chai const { expect } = chai
const mongojs = require('../../../app/js/mongojs')
const { ObjectId } = mongojs
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const LockManager = require('../../../app/js/LockManager') const LockManager = require('../../../app/js/LockManager')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now

View file

@ -13,9 +13,7 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
chai.should() chai.should()
const { expect } = chai const { expect } = chai
const mongojs = require('../../../app/js/mongojs') const { ObjectId } = require('../../../app/js/mongodb')
const { db } = mongojs
const { ObjectId } = mongojs
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const TrackChangesApp = require('./helpers/TrackChangesApp') const TrackChangesApp = require('./helpers/TrackChangesApp')

View file

@ -13,6 +13,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
const app = require('../../../../app') const app = require('../../../../app')
const { waitForDb } = require('../../../../app/js/mongodb')
require('logger-sharelatex') require('logger-sharelatex')
const logger = require('logger-sharelatex') const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
@ -29,9 +30,10 @@ module.exports = {
return callback() return callback()
} else if (this.initing) { } else if (this.initing) {
return this.callbacks.push(callback) return this.callbacks.push(callback)
} else { }
this.initing = true this.initing = true
this.callbacks.push(callback) this.callbacks.push(callback)
waitForDb().then(() => {
return app.listen( return app.listen(
__guard__( __guard__(
Settings.internal != null Settings.internal != null
@ -56,7 +58,7 @@ module.exports = {
})() })()
} }
) )
} })
} }
} }
function __guard__(value, transform) { function __guard__(value, transform) {

View file

@ -19,7 +19,7 @@ const request = require('request')
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const rclient = require('redis-sharelatex').createClient(Settings.redis.history) // Only works locally for now const rclient = require('redis-sharelatex').createClient(Settings.redis.history) // Only works locally for now
const Keys = Settings.redis.history.key_schema const Keys = Settings.redis.history.key_schema
const { db, ObjectId } = require('../../../../app/js/mongojs') const { db, ObjectId } = require('../../../../app/js/mongodb')
const aws = require('aws-sdk') const aws = require('aws-sdk')
const s3 = new aws.S3({ const s3 = new aws.S3({
@ -87,11 +87,11 @@ module.exports = TrackChangesClient = {
if (callback == null) { if (callback == null) {
callback = function (error, updates) {} callback = function (error, updates) {}
} }
return db.projectHistoryMetaData.find( return db.projectHistoryMetaData.findOne(
{ {
project_id: ObjectId(project_id) project_id: ObjectId(project_id)
}, },
(error, projects) => callback(error, projects[0]) callback
) )
}, },
@ -99,7 +99,7 @@ module.exports = TrackChangesClient = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
return db.projectHistoryMetaData.update( return db.projectHistoryMetaData.updateOne(
{ {
project_id: ObjectId(project_id) project_id: ObjectId(project_id)
}, },

View file

@ -14,7 +14,7 @@ chai.should()
const sinon = require('sinon') const sinon = require('sinon')
const modulePath = '../../../../app/js/MongoAWS.js' const modulePath = '../../../../app/js/MongoAWS.js'
const SandboxedModule = require('sandboxed-module') const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongojs') const { ObjectId } = require('mongodb')
const MemoryStream = require('memorystream') const MemoryStream = require('memorystream')
const zlib = require('zlib') const zlib = require('zlib')
@ -44,7 +44,7 @@ describe('MongoAWS', function () {
'aws-sdk': (this.awssdk = {}), 'aws-sdk': (this.awssdk = {}),
fs: (this.fs = {}), fs: (this.fs = {}),
's3-streams': (this.S3S = {}), 's3-streams': (this.S3S = {}),
'./mongojs': { db: (this.db = {}), ObjectId }, './mongodb': { db: (this.db = {}), ObjectId },
JSONStream: (this.JSONStream = {}), JSONStream: (this.JSONStream = {}),
'readline-stream': (this.readline = sinon.stub()), 'readline-stream': (this.readline = sinon.stub()),
'metrics-sharelatex': { inc() {} } 'metrics-sharelatex': { inc() {} }
@ -92,7 +92,9 @@ describe('MongoAWS', function () {
this.S3S.ReadStream = () => this.S3S.ReadStream = () =>
MemoryStream.createReadStream(zbuf, { readable: true }) MemoryStream.createReadStream(zbuf, { readable: true })
this.db.docHistory = {} this.db.docHistory = {}
this.db.docHistory.insert = sinon.stub().callsArgWith(1, null, 'pack') this.db.docHistory.insertOne = sinon
.stub()
.yields(null, { insertedId: ObjectId() })
return this.MongoAWS.unArchivePack( return this.MongoAWS.unArchivePack(
this.project_id, this.project_id,
@ -107,7 +109,7 @@ describe('MongoAWS', function () {
}) })
return it('should call db.docHistory.insert', function () { return it('should call db.docHistory.insert', function () {
return this.db.docHistory.insert.called.should.equal(true) return this.db.docHistory.insertOne.called.should.equal(true)
}) })
}) })
}) })

View file

@ -16,7 +16,7 @@ const { expect } = chai
const modulePath = '../../../../app/js/MongoManager.js' const modulePath = '../../../../app/js/MongoManager.js'
const packModulePath = '../../../../app/js/PackManager.js' const packModulePath = '../../../../app/js/PackManager.js'
const SandboxedModule = require('sandboxed-module') const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongojs') const { ObjectId } = require('mongodb')
const tk = require('timekeeper') const tk = require('timekeeper')
describe('MongoManager', function () { describe('MongoManager', function () {
@ -24,7 +24,7 @@ describe('MongoManager', function () {
tk.freeze(new Date()) tk.freeze(new Date())
this.MongoManager = SandboxedModule.require(modulePath, { this.MongoManager = SandboxedModule.require(modulePath, {
requires: { requires: {
'./mongojs': { db: (this.db = {}), ObjectId }, './mongodb': { db: (this.db = {}), ObjectId },
'./PackManager': (this.PackManager = {}), './PackManager': (this.PackManager = {}),
'metrics-sharelatex': { timeAsyncMethod() {} }, 'metrics-sharelatex': { timeAsyncMethod() {} },
'logger-sharelatex': { log() {} } 'logger-sharelatex': { log() {} }
@ -156,7 +156,7 @@ describe('MongoManager', function () {
describe('backportProjectId', function () { describe('backportProjectId', function () {
beforeEach(function () { beforeEach(function () {
this.db.docHistory = { update: sinon.stub().callsArg(3) } this.db.docHistory = { updateMany: sinon.stub().yields() }
return this.MongoManager.backportProjectId( return this.MongoManager.backportProjectId(
this.project_id, this.project_id,
this.doc_id, this.doc_id,
@ -165,7 +165,7 @@ describe('MongoManager', function () {
}) })
it("should insert the project_id into all entries for the doc_id which don't have it set", function () { it("should insert the project_id into all entries for the doc_id which don't have it set", function () {
return this.db.docHistory.update return this.db.docHistory.updateMany
.calledWith( .calledWith(
{ {
doc_id: ObjectId(this.doc_id), doc_id: ObjectId(this.doc_id),
@ -173,9 +173,6 @@ describe('MongoManager', function () {
}, },
{ {
$set: { project_id: ObjectId(this.project_id) } $set: { project_id: ObjectId(this.project_id) }
},
{
multi: true
} }
) )
.should.equal(true) .should.equal(true)
@ -190,7 +187,7 @@ describe('MongoManager', function () {
beforeEach(function () { beforeEach(function () {
this.metadata = { mock: 'metadata' } this.metadata = { mock: 'metadata' }
this.db.projectHistoryMetaData = { this.db.projectHistoryMetaData = {
find: sinon.stub().callsArgWith(1, null, [this.metadata]) findOne: sinon.stub().callsArgWith(1, null, this.metadata)
} }
return this.MongoManager.getProjectMetaData( return this.MongoManager.getProjectMetaData(
this.project_id, this.project_id,
@ -199,7 +196,7 @@ describe('MongoManager', function () {
}) })
it('should look up the meta data in the db', function () { it('should look up the meta data in the db', function () {
return this.db.projectHistoryMetaData.find return this.db.projectHistoryMetaData.findOne
.calledWith({ project_id: ObjectId(this.project_id) }) .calledWith({ project_id: ObjectId(this.project_id) })
.should.equal(true) .should.equal(true)
}) })
@ -213,7 +210,7 @@ describe('MongoManager', function () {
beforeEach(function () { beforeEach(function () {
this.metadata = { mock: 'metadata' } this.metadata = { mock: 'metadata' }
this.db.projectHistoryMetaData = { this.db.projectHistoryMetaData = {
update: sinon.stub().callsArgWith(3, null, [this.metadata]) updateOne: sinon.stub().yields()
} }
return this.MongoManager.setProjectMetaData( return this.MongoManager.setProjectMetaData(
this.project_id, this.project_id,
@ -223,7 +220,7 @@ describe('MongoManager', function () {
}) })
it('should upsert the metadata into the DB', function () { it('should upsert the metadata into the DB', function () {
return this.db.projectHistoryMetaData.update return this.db.projectHistoryMetaData.updateOne
.calledWith( .calledWith(
{ {
project_id: ObjectId(this.project_id) project_id: ObjectId(this.project_id)

View file

@ -17,9 +17,7 @@ const should = chai.should()
const { expect } = chai const { expect } = chai
const modulePath = '../../../../app/js/PackManager.js' const modulePath = '../../../../app/js/PackManager.js'
const SandboxedModule = require('sandboxed-module') const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongojs') const { ObjectId } = require('mongodb')
const bson = require('bson')
const BSON = new bson.BSONPure()
const _ = require('underscore') const _ = require('underscore')
const tk = require('timekeeper') const tk = require('timekeeper')
@ -29,7 +27,8 @@ describe('PackManager', function () {
tk.freeze(new Date()) tk.freeze(new Date())
this.PackManager = SandboxedModule.require(modulePath, { this.PackManager = SandboxedModule.require(modulePath, {
requires: { requires: {
'./mongojs': { db: (this.db = {}), ObjectId, BSON }, bson: require('bson'),
'./mongodb': { db: (this.db = {}), ObjectId },
'./LockManager': {}, './LockManager': {},
'./MongoAWS': {}, './MongoAWS': {},
'logger-sharelatex': { log: sinon.stub(), error: sinon.stub() }, 'logger-sharelatex': { log: sinon.stub(), error: sinon.stub() },
@ -66,8 +65,9 @@ describe('PackManager', function () {
{ op: 'op-4', meta: 'meta-4', v: 4 } { op: 'op-4', meta: 'meta-4', v: 4 }
] ]
return (this.db.docHistory = { return (this.db.docHistory = {
save: sinon.stub().callsArg(1), insertOne: sinon.stub().yields(),
insert: sinon.stub().callsArg(1), insert: sinon.stub().callsArg(1),
updateOne: sinon.stub().yields(),
findAndModify: sinon.stub().callsArg(1) findAndModify: sinon.stub().callsArg(1)
}) })
}) })
@ -390,7 +390,7 @@ describe('PackManager', function () {
return describe('for a small update that will expire', function () { return describe('for a small update that will expire', function () {
it('should insert the update into mongo', function () { it('should insert the update into mongo', function () {
return this.db.docHistory.save return this.db.docHistory.insertOne
.calledWithMatch({ .calledWithMatch({
pack: this.newUpdates, pack: this.newUpdates,
project_id: ObjectId(this.project_id), project_id: ObjectId(this.project_id),
@ -403,7 +403,7 @@ describe('PackManager', function () {
}) })
it('should set an expiry time in the future', function () { it('should set an expiry time in the future', function () {
return this.db.docHistory.save return this.db.docHistory.insertOne
.calledWithMatch({ .calledWithMatch({
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000)
}) })
@ -443,23 +443,21 @@ describe('PackManager', function () {
return describe('for a small update that will expire', function () { return describe('for a small update that will expire', function () {
it('should append the update in mongo', function () { it('should append the update in mongo', function () {
return this.db.docHistory.findAndModify return this.db.docHistory.updateOne
.calledWithMatch({ .calledWithMatch(
query: { _id: this.lastUpdate._id }, { _id: this.lastUpdate._id },
update: { {
$push: { pack: { $each: this.newUpdates } }, $push: { pack: { $each: this.newUpdates } },
$set: { v_end: this.newUpdates[this.newUpdates.length - 1].v } $set: { v_end: this.newUpdates[this.newUpdates.length - 1].v }
} }
}) )
.should.equal(true) .should.equal(true)
}) })
it('should set an expiry time in the future', function () { it('should set an expiry time in the future', function () {
return this.db.docHistory.findAndModify return this.db.docHistory.updateOne
.calledWithMatch({ .calledWithMatch(sinon.match.any, {
update: { $set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) }
$set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) }
}
}) })
.should.equal(true) .should.equal(true)
}) })
@ -498,7 +496,7 @@ describe('PackManager', function () {
return describe('for a small update that will not expire', function () { return describe('for a small update that will not expire', function () {
it('should insert the update into mongo', function () { it('should insert the update into mongo', function () {
return this.db.docHistory.save return this.db.docHistory.insertOne
.calledWithMatch({ .calledWithMatch({
pack: this.newUpdates, pack: this.newUpdates,
project_id: ObjectId(this.project_id), project_id: ObjectId(this.project_id),
@ -511,7 +509,7 @@ describe('PackManager', function () {
}) })
it('should not set any expiry time', function () { it('should not set any expiry time', function () {
return this.db.docHistory.save return this.db.docHistory.insertOne
.neverCalledWithMatch(sinon.match.has('expiresAt')) .neverCalledWithMatch(sinon.match.has('expiresAt'))
.should.equal(true) .should.equal(true)
}) })
@ -548,7 +546,7 @@ describe('PackManager', function () {
return describe('for a small update that will expire', function () { return describe('for a small update that will expire', function () {
it('should insert the update into mongo', function () { it('should insert the update into mongo', function () {
return this.db.docHistory.save return this.db.docHistory.insertOne
.calledWithMatch({ .calledWithMatch({
pack: this.newUpdates, pack: this.newUpdates,
project_id: ObjectId(this.project_id), project_id: ObjectId(this.project_id),
@ -561,7 +559,7 @@ describe('PackManager', function () {
}) })
it('should set an expiry time in the future', function () { it('should set an expiry time in the future', function () {
return this.db.docHistory.save return this.db.docHistory.insertOne
.calledWithMatch({ .calledWithMatch({
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000)
}) })

View file

@ -17,6 +17,7 @@ const sinon = require('sinon')
const chai = require('chai') const chai = require('chai')
const should = chai.should() const should = chai.should()
const { expect } = chai const { expect } = chai
const { ObjectId } = require('mongodb')
const modulePath = '../../../../app/js/UpdatesManager.js' const modulePath = '../../../../app/js/UpdatesManager.js'
const SandboxedModule = require('sandboxed-module') const SandboxedModule = require('sandboxed-module')
@ -864,7 +865,6 @@ describe('UpdatesManager', function () {
describe('fillUserInfo', function () { describe('fillUserInfo', function () {
describe('with valid users', function () { describe('with valid users', function () {
beforeEach(function (done) { beforeEach(function (done) {
const { ObjectId } = require('mongojs')
this.user_id_1 = ObjectId().toString() this.user_id_1 = ObjectId().toString()
this.user_id_2 = ObjectId().toString() this.user_id_2 = ObjectId().toString()
this.updates = [ this.updates = [