mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #94 from overleaf/jpa-mongodb-native
[misc] migrate to the native mongo driver
This commit is contained in:
commit
712a4c11e2
24 changed files with 323 additions and 928 deletions
|
@ -44,6 +44,7 @@ Metrics.memory.monitor(logger)
|
|||
|
||||
const childProcess = require('child_process')
|
||||
|
||||
const mongodb = require('./app/js/mongodb')
|
||||
const HttpController = require('./app/js/HttpController')
|
||||
const express = require('express')
|
||||
const bodyParser = require('body-parser')
|
||||
|
@ -128,18 +129,26 @@ const host =
|
|||
|
||||
if (!module.parent) {
|
||||
// Called directly
|
||||
app.listen(port, host, function (error) {
|
||||
if (error != null) {
|
||||
return logger.error(
|
||||
{ err: error },
|
||||
'could not start track-changes server'
|
||||
)
|
||||
} else {
|
||||
return logger.info(
|
||||
`trackchanges starting up, listening on ${host}:${port}`
|
||||
)
|
||||
}
|
||||
})
|
||||
mongodb
|
||||
.waitForDb()
|
||||
.then(() => {
|
||||
app.listen(port, host, function (error) {
|
||||
if (error != null) {
|
||||
return logger.error(
|
||||
{ err: error },
|
||||
'could not start track-changes server'
|
||||
)
|
||||
} else {
|
||||
return logger.info(
|
||||
`trackchanges starting up, listening on ${host}:${port}`
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = app
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { ObjectId } = require('mongojs')
|
||||
const { ObjectId } = require('./mongodb')
|
||||
const request = require('request')
|
||||
const async = require('async')
|
||||
const settings = require('settings-sharelatex')
|
||||
|
|
|
@ -18,7 +18,7 @@ const settings = require('settings-sharelatex')
|
|||
const logger = require('logger-sharelatex')
|
||||
const AWS = require('aws-sdk')
|
||||
const S3S = require('s3-streams')
|
||||
const { db, ObjectId } = require('./mongojs')
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const JSONStream = require('JSONStream')
|
||||
const ReadlineStream = require('byline')
|
||||
const zlib = require('zlib')
|
||||
|
@ -187,7 +187,11 @@ module.exports = MongoAWS = {
|
|||
// allow the object to expire, we can always retrieve it again
|
||||
object.expiresAt = new Date(Date.now() + 7 * DAYS)
|
||||
logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3')
|
||||
return db.docHistory.insert(object, callback)
|
||||
return db.docHistory.insertOne(object, (err, confirmation) => {
|
||||
if (err) return callback(err)
|
||||
object._id = confirmation.insertedId
|
||||
callback(null, object)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MongoManager
|
||||
const { db, ObjectId } = require('./mongojs')
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const PackManager = require('./PackManager')
|
||||
const async = require('async')
|
||||
const _ = require('underscore')
|
||||
|
@ -25,7 +25,11 @@ module.exports = MongoManager = {
|
|||
callback = function (error, update) {}
|
||||
}
|
||||
return db.docHistory
|
||||
.find({ doc_id: ObjectId(doc_id.toString()) }, { pack: { $slice: -1 } }) // only return the last entry in a pack
|
||||
.find(
|
||||
{ doc_id: ObjectId(doc_id.toString()) },
|
||||
// only return the last entry in a pack
|
||||
{ projection: { pack: { $slice: -1 } } }
|
||||
)
|
||||
.sort({ v: -1 })
|
||||
.limit(1)
|
||||
.toArray(function (error, compressedUpdates) {
|
||||
|
@ -96,7 +100,7 @@ module.exports = MongoManager = {
|
|||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.docHistory.update(
|
||||
return db.docHistory.updateMany(
|
||||
{
|
||||
doc_id: ObjectId(doc_id.toString()),
|
||||
project_id: { $exists: false }
|
||||
|
@ -104,9 +108,6 @@ module.exports = MongoManager = {
|
|||
{
|
||||
$set: { project_id: ObjectId(project_id.toString()) }
|
||||
},
|
||||
{
|
||||
multi: true
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
@ -115,16 +116,11 @@ module.exports = MongoManager = {
|
|||
if (callback == null) {
|
||||
callback = function (error, metadata) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.find(
|
||||
return db.projectHistoryMetaData.findOne(
|
||||
{
|
||||
project_id: ObjectId(project_id.toString())
|
||||
},
|
||||
function (error, results) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, results[0])
|
||||
}
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
|
@ -132,7 +128,7 @@ module.exports = MongoManager = {
|
|||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.update(
|
||||
return db.projectHistoryMetaData.updateOne(
|
||||
{
|
||||
project_id: ObjectId(project_id)
|
||||
},
|
||||
|
@ -151,7 +147,7 @@ module.exports = MongoManager = {
|
|||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.docHistory.update(
|
||||
return db.docHistory.updateMany(
|
||||
{
|
||||
project_id: ObjectId(project_id),
|
||||
temporary: true,
|
||||
|
@ -161,9 +157,6 @@ module.exports = MongoManager = {
|
|||
$set: { temporary: false },
|
||||
$unset: { expiresAt: '' }
|
||||
},
|
||||
{
|
||||
multi: true
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
|
|
@ -16,7 +16,9 @@
|
|||
let PackManager
|
||||
const async = require('async')
|
||||
const _ = require('underscore')
|
||||
const { db, ObjectId, BSON } = require('./mongojs')
|
||||
const Bson = require('bson')
|
||||
const BSON = new Bson()
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const logger = require('logger-sharelatex')
|
||||
const LockManager = require('./LockManager')
|
||||
const MongoAWS = require('./MongoAWS')
|
||||
|
@ -218,7 +220,7 @@ module.exports = PackManager = {
|
|||
{ project_id, doc_id, newUpdates },
|
||||
'inserting updates into new pack'
|
||||
)
|
||||
return db.docHistory.save(newPack, function (err, result) {
|
||||
return db.docHistory.insertOne(newPack, function (err) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
@ -273,10 +275,7 @@ module.exports = PackManager = {
|
|||
'appending updates to existing pack'
|
||||
)
|
||||
Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`)
|
||||
return db.docHistory.findAndModify(
|
||||
{ query, update, new: true, fields: { meta: 1, v_end: 1 } },
|
||||
callback
|
||||
)
|
||||
return db.docHistory.updateOne(query, update, callback)
|
||||
},
|
||||
|
||||
// Retrieve all changes for a document
|
||||
|
@ -301,7 +300,8 @@ module.exports = PackManager = {
|
|||
// console.log "query:", query
|
||||
return db.docHistory
|
||||
.find(query)
|
||||
.sort({ v: -1 }, function (err, result) {
|
||||
.sort({ v: -1 })
|
||||
.toArray(function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
@ -380,20 +380,12 @@ module.exports = PackManager = {
|
|||
|
||||
fetchPacksIfNeeded(project_id, doc_id, pack_ids, callback) {
|
||||
let id
|
||||
return db.docHistory.find(
|
||||
{
|
||||
_id: {
|
||||
$in: (() => {
|
||||
const result = []
|
||||
for (id of Array.from(pack_ids)) {
|
||||
result.push(ObjectId(id))
|
||||
}
|
||||
return result
|
||||
})()
|
||||
}
|
||||
},
|
||||
{ _id: 1 },
|
||||
function (err, loadedPacks) {
|
||||
return db.docHistory
|
||||
.find(
|
||||
{ _id: { $in: pack_ids.map(ObjectId) } },
|
||||
{ projection: { _id: 1 } }
|
||||
)
|
||||
.toArray(function (err, loadedPacks) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
@ -428,8 +420,7 @@ module.exports = PackManager = {
|
|||
return callback()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
// Retrieve all changes across a project
|
||||
|
@ -437,8 +428,12 @@ module.exports = PackManager = {
|
|||
makeProjectIterator(project_id, before, callback) {
|
||||
// get all the docHistory Entries
|
||||
return db.docHistory
|
||||
.find({ project_id: ObjectId(project_id) }, { pack: false })
|
||||
.sort({ 'meta.end_ts': -1 }, function (err, packs) {
|
||||
.find(
|
||||
{ project_id: ObjectId(project_id) },
|
||||
{ projection: { pack: false } }
|
||||
)
|
||||
.sort({ 'meta.end_ts': -1 })
|
||||
.toArray(function (err, packs) {
|
||||
let pack
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
|
@ -449,9 +444,9 @@ module.exports = PackManager = {
|
|||
allPacks.push(pack)
|
||||
seenIds[pack._id] = true
|
||||
}
|
||||
return db.docHistoryIndex.find(
|
||||
{ project_id: ObjectId(project_id) },
|
||||
function (err, indexes) {
|
||||
return db.docHistoryIndex
|
||||
.find({ project_id: ObjectId(project_id) })
|
||||
.toArray(function (err, indexes) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
@ -470,8 +465,7 @@ module.exports = PackManager = {
|
|||
null,
|
||||
new ProjectIterator(allPacks, before, PackManager.getPackById)
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
|
@ -497,11 +491,9 @@ module.exports = PackManager = {
|
|||
increaseTTL(pack, callback) {
|
||||
if (pack.expiresAt < new Date(Date.now() + 6 * DAYS)) {
|
||||
// update cache expiry since we are using this pack
|
||||
return db.docHistory.findAndModify(
|
||||
{
|
||||
query: { _id: pack._id },
|
||||
update: { $set: { expiresAt: new Date(Date.now() + 7 * DAYS) } }
|
||||
},
|
||||
return db.docHistory.updateOne(
|
||||
{ _id: pack._id },
|
||||
{ $set: { expiresAt: new Date(Date.now() + 7 * DAYS) } },
|
||||
(err) => callback(err, pack)
|
||||
)
|
||||
} else {
|
||||
|
@ -521,7 +513,7 @@ module.exports = PackManager = {
|
|||
getPackFromIndex(doc_id, pack_id, callback) {
|
||||
return db.docHistoryIndex.findOne(
|
||||
{ _id: ObjectId(doc_id.toString()), 'packs._id': pack_id },
|
||||
{ 'packs.$': 1 },
|
||||
{ projection: { 'packs.$': 1 } },
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
@ -529,7 +521,7 @@ module.exports = PackManager = {
|
|||
getLastPackFromIndex(doc_id, callback) {
|
||||
return db.docHistoryIndex.findOne(
|
||||
{ _id: ObjectId(doc_id.toString()) },
|
||||
{ packs: { $slice: -1 } },
|
||||
{ projection: { packs: { $slice: -1 } } },
|
||||
function (err, indexPack) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
|
@ -616,8 +608,9 @@ module.exports = PackManager = {
|
|||
expiresAt: { $exists: false }
|
||||
}
|
||||
return db.docHistory
|
||||
.find(query, { pack: false })
|
||||
.sort({ v: 1 }, function (err, packs) {
|
||||
.find(query, { projection: { pack: false } })
|
||||
.sort({ v: 1 })
|
||||
.toArray(function (err, packs) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
@ -641,8 +634,9 @@ module.exports = PackManager = {
|
|||
expiresAt: { $exists: false }
|
||||
}
|
||||
return db.docHistory
|
||||
.find(query, { pack: false })
|
||||
.sort({ v: 1 }, function (err, packs) {
|
||||
.find(query, { projection: { pack: false } })
|
||||
.sort({ v: 1 })
|
||||
.toArray(function (err, packs) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
@ -727,15 +721,15 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
_insertPacksIntoIndex(project_id, doc_id, newPacks, callback) {
|
||||
return db.docHistoryIndex.findAndModify(
|
||||
return db.docHistoryIndex.updateOne(
|
||||
{ _id: ObjectId(doc_id.toString()) },
|
||||
{
|
||||
$setOnInsert: { project_id: ObjectId(project_id.toString()) },
|
||||
$push: {
|
||||
packs: { $each: newPacks, $sort: { v: 1 } }
|
||||
}
|
||||
},
|
||||
{
|
||||
query: { _id: ObjectId(doc_id.toString()) },
|
||||
update: {
|
||||
$setOnInsert: { project_id: ObjectId(project_id.toString()) },
|
||||
$push: {
|
||||
packs: { $each: newPacks, $sort: { v: 1 } }
|
||||
}
|
||||
},
|
||||
upsert: true
|
||||
},
|
||||
callback
|
||||
|
@ -993,11 +987,9 @@ module.exports = PackManager = {
|
|||
|
||||
_markPackAsFinalised(project_id, doc_id, pack_id, callback) {
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marking pack as finalised')
|
||||
return db.docHistory.findAndModify(
|
||||
{
|
||||
query: { _id: pack_id },
|
||||
update: { $set: { finalised: true } }
|
||||
},
|
||||
return db.docHistory.updateOne(
|
||||
{ _id: pack_id },
|
||||
{ $set: { finalised: true } },
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
@ -1018,11 +1010,9 @@ module.exports = PackManager = {
|
|||
|
||||
markPackAsChecked(project_id, doc_id, pack_id, callback) {
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marking pack as checked')
|
||||
return db.docHistory.findAndModify(
|
||||
{
|
||||
query: { _id: pack_id },
|
||||
update: { $currentDate: { last_checked: true } }
|
||||
},
|
||||
return db.docHistory.updateOne(
|
||||
{ _id: pack_id },
|
||||
{ $currentDate: { last_checked: true } },
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
@ -1085,20 +1075,18 @@ module.exports = PackManager = {
|
|||
{ project_id, doc_id },
|
||||
'marking pack as archive in progress status'
|
||||
)
|
||||
return db.docHistoryIndex.findAndModify(
|
||||
return db.docHistoryIndex.findOneAndUpdate(
|
||||
{
|
||||
query: {
|
||||
_id: ObjectId(doc_id.toString()),
|
||||
packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } }
|
||||
},
|
||||
fields: { 'packs.$': 1 },
|
||||
update: { $set: { 'packs.$.inS3': false } }
|
||||
_id: ObjectId(doc_id.toString()),
|
||||
packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } }
|
||||
},
|
||||
{ $set: { 'packs.$.inS3': false } },
|
||||
{ projection: { 'packs.$': 1 } },
|
||||
function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (result == null) {
|
||||
if (!result.value) {
|
||||
return callback(new Error('archive is already in progress'))
|
||||
}
|
||||
logger.log(
|
||||
|
@ -1115,35 +1103,30 @@ module.exports = PackManager = {
|
|||
{ project_id, doc_id, pack_id },
|
||||
'clearing as archive in progress'
|
||||
)
|
||||
return db.docHistoryIndex.findAndModify(
|
||||
return db.docHistoryIndex.updateOne(
|
||||
{
|
||||
query: {
|
||||
_id: ObjectId(doc_id.toString()),
|
||||
packs: { $elemMatch: { _id: pack_id, inS3: false } }
|
||||
},
|
||||
fields: { 'packs.$': 1 },
|
||||
update: { $unset: { 'packs.$.inS3': true } }
|
||||
_id: ObjectId(doc_id.toString()),
|
||||
packs: { $elemMatch: { _id: pack_id, inS3: false } }
|
||||
},
|
||||
{ $unset: { 'packs.$.inS3': true } },
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
markPackAsArchived(project_id, doc_id, pack_id, callback) {
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marking pack as archived')
|
||||
return db.docHistoryIndex.findAndModify(
|
||||
return db.docHistoryIndex.findOneAndUpdate(
|
||||
{
|
||||
query: {
|
||||
_id: ObjectId(doc_id.toString()),
|
||||
packs: { $elemMatch: { _id: pack_id, inS3: false } }
|
||||
},
|
||||
fields: { 'packs.$': 1 },
|
||||
update: { $set: { 'packs.$.inS3': true } }
|
||||
_id: ObjectId(doc_id.toString()),
|
||||
packs: { $elemMatch: { _id: pack_id, inS3: false } }
|
||||
},
|
||||
{ $set: { 'packs.$.inS3': true } },
|
||||
{ projection: { 'packs.$': 1 } },
|
||||
function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (result == null) {
|
||||
if (!result.value) {
|
||||
return callback(new Error('archive is not marked as progress'))
|
||||
}
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marked as archived')
|
||||
|
@ -1153,12 +1136,13 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
setTTLOnArchivedPack(project_id, doc_id, pack_id, callback) {
|
||||
return db.docHistory.findAndModify(
|
||||
{
|
||||
query: { _id: pack_id },
|
||||
update: { $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } }
|
||||
},
|
||||
return db.docHistory.updateOne(
|
||||
{ _id: pack_id },
|
||||
{ $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } },
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
logger.log({ project_id, doc_id, pack_id }, 'set expiry on pack')
|
||||
return callback()
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ let project_id, doc_id
|
|||
const Settings = require('settings-sharelatex')
|
||||
const async = require('async')
|
||||
const _ = require('underscore')
|
||||
const { db, ObjectId, BSON } = require('./mongojs')
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const fs = require('fs')
|
||||
const Metrics = require('metrics-sharelatex')
|
||||
Metrics.initialize('track-changes')
|
||||
|
@ -78,18 +78,6 @@ logger.log(
|
|||
`checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}`
|
||||
)
|
||||
|
||||
// work around for https://github.com/mafintosh/mongojs/issues/224
|
||||
db.close = function (callback) {
|
||||
return this._getServer(function (err, server) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
server = server.destroy != null ? server : server.topology
|
||||
server.destroy(true, true)
|
||||
return callback()
|
||||
})
|
||||
}
|
||||
|
||||
const finish = function () {
|
||||
if (shutDownTimer != null) {
|
||||
logger.log('cancelling timeout')
|
||||
|
@ -186,12 +174,13 @@ if (pending != null) {
|
|||
_id: { $lt: ObjectIdFromDate(oneWeekAgo) },
|
||||
last_checked: { $lt: oneWeekAgo }
|
||||
},
|
||||
{ _id: 1, doc_id: 1, project_id: 1 }
|
||||
{ projection: { _id: 1, doc_id: 1, project_id: 1 } }
|
||||
)
|
||||
.sort({
|
||||
last_checked: 1
|
||||
})
|
||||
.limit(LIMIT, function (err, results) {
|
||||
.limit(LIMIT)
|
||||
.toArray(function (err, results) {
|
||||
if (err != null) {
|
||||
logger.log({ err }, 'error checking for updates')
|
||||
finish()
|
||||
|
|
30
services/track-changes/app/js/mongodb.js
Normal file
30
services/track-changes/app/js/mongodb.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
const Settings = require('settings-sharelatex')
|
||||
const { MongoClient, ObjectId } = require('mongodb')
|
||||
|
||||
const clientPromise = MongoClient.connect(
|
||||
Settings.mongo.url,
|
||||
Settings.mongo.options
|
||||
)
|
||||
|
||||
let setupDbPromise
|
||||
async function waitForDb() {
|
||||
if (!setupDbPromise) {
|
||||
setupDbPromise = setupDb()
|
||||
}
|
||||
await setupDbPromise
|
||||
}
|
||||
|
||||
const db = {}
|
||||
async function setupDb() {
|
||||
const internalDb = (await clientPromise).db()
|
||||
|
||||
db.docHistory = internalDb.collection('docHistory')
|
||||
db.docHistoryIndex = internalDb.collection('docHistoryIndex')
|
||||
db.projectHistoryMetaData = internalDb.collection('projectHistoryMetaData')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
db,
|
||||
ObjectId,
|
||||
waitForDb
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Sanity-check the conversion and remove this comment.
|
||||
const Settings = require('settings-sharelatex')
|
||||
const mongojs = require('mongojs')
|
||||
const bson = require('bson')
|
||||
const db = mongojs(Settings.mongo.url, [
|
||||
'docHistory',
|
||||
'projectHistoryMetaData',
|
||||
'docHistoryIndex'
|
||||
])
|
||||
module.exports = {
|
||||
db,
|
||||
ObjectId: mongojs.ObjectId,
|
||||
BSON: new bson.BSONPure()
|
||||
}
|
|
@ -4,6 +4,10 @@ const TMP_DIR =
|
|||
|
||||
module.exports = {
|
||||
mongo: {
|
||||
options: {
|
||||
useUnifiedTopology:
|
||||
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true'
|
||||
},
|
||||
url:
|
||||
process.env.MONGO_CONNECTION_STRING ||
|
||||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`
|
||||
|
|
832
services/track-changes/package-lock.json
generated
832
services/track-changes/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -22,7 +22,7 @@
|
|||
"async": "^2.6.3",
|
||||
"aws-sdk": "^2.643.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"bson": "^0.4.20",
|
||||
"bson": "^1.1.5",
|
||||
"byline": "^5.0.0",
|
||||
"express": "4.17.1",
|
||||
"heap": "^0.2.6",
|
||||
|
@ -30,7 +30,7 @@
|
|||
"logger-sharelatex": "^2.2.0",
|
||||
"metrics-sharelatex": "^2.6.2",
|
||||
"mongo-uri": "^0.1.2",
|
||||
"mongojs": "3.1.0",
|
||||
"mongodb": "^3.6.0",
|
||||
"redis": "~0.10.1",
|
||||
"redis-sharelatex": "^1.0.13",
|
||||
"request": "~2.88.2",
|
||||
|
|
|
@ -14,8 +14,7 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = chai
|
||||
const mongojs = require('../../../app/js/mongojs')
|
||||
const { ObjectId } = mongojs
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const request = require('request')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
|
|
|
@ -18,9 +18,7 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = chai
|
||||
const mongojs = require('../../../app/js/mongojs')
|
||||
const { db } = mongojs
|
||||
const { ObjectId } = mongojs
|
||||
const { db, ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const request = require('request')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
|
@ -126,7 +124,7 @@ describe('Archiving updates', function () {
|
|||
|
||||
after(function (done) {
|
||||
MockWebApi.getUserInfo.restore()
|
||||
return db.docHistory.remove(
|
||||
return db.docHistory.deleteMany(
|
||||
{ project_id: ObjectId(this.project_id) },
|
||||
() => {
|
||||
return db.docHistoryIndex.remove(
|
||||
|
@ -172,7 +170,7 @@ describe('Archiving updates', function () {
|
|||
})
|
||||
|
||||
it('should have one remaining pack after cache is expired', function (done) {
|
||||
return db.docHistory.remove(
|
||||
return db.docHistory.deleteMany(
|
||||
{
|
||||
doc_id: ObjectId(this.doc_id),
|
||||
expiresAt: { $exists: true }
|
||||
|
|
|
@ -14,8 +14,7 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = chai
|
||||
const mongojs = require('../../../app/js/mongojs')
|
||||
const { ObjectId } = mongojs
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const request = require('request')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
|
|
|
@ -13,9 +13,7 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = chai
|
||||
const mongojs = require('../../../app/js/mongojs')
|
||||
const { db } = mongojs
|
||||
const { ObjectId } = mongojs
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
|
|
|
@ -14,9 +14,7 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = chai
|
||||
const mongojs = require('../../../app/js/mongojs')
|
||||
const { db } = mongojs
|
||||
const { ObjectId } = mongojs
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
|
|
|
@ -13,8 +13,6 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = chai
|
||||
const mongojs = require('../../../app/js/mongojs')
|
||||
const { ObjectId } = mongojs
|
||||
const Settings = require('settings-sharelatex')
|
||||
const LockManager = require('../../../app/js/LockManager')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
|
|
|
@ -13,9 +13,7 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = chai
|
||||
const mongojs = require('../../../app/js/mongojs')
|
||||
const { db } = mongojs
|
||||
const { ObjectId } = mongojs
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const app = require('../../../../app')
|
||||
const { waitForDb } = require('../../../../app/js/mongodb')
|
||||
require('logger-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
@ -29,9 +30,10 @@ module.exports = {
|
|||
return callback()
|
||||
} else if (this.initing) {
|
||||
return this.callbacks.push(callback)
|
||||
} else {
|
||||
this.initing = true
|
||||
this.callbacks.push(callback)
|
||||
}
|
||||
this.initing = true
|
||||
this.callbacks.push(callback)
|
||||
waitForDb().then(() => {
|
||||
return app.listen(
|
||||
__guard__(
|
||||
Settings.internal != null
|
||||
|
@ -56,7 +58,7 @@ module.exports = {
|
|||
})()
|
||||
}
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
function __guard__(value, transform) {
|
||||
|
|
|
@ -19,7 +19,7 @@ const request = require('request')
|
|||
const Settings = require('settings-sharelatex')
|
||||
const rclient = require('redis-sharelatex').createClient(Settings.redis.history) // Only works locally for now
|
||||
const Keys = Settings.redis.history.key_schema
|
||||
const { db, ObjectId } = require('../../../../app/js/mongojs')
|
||||
const { db, ObjectId } = require('../../../../app/js/mongodb')
|
||||
|
||||
const aws = require('aws-sdk')
|
||||
const s3 = new aws.S3({
|
||||
|
@ -87,11 +87,11 @@ module.exports = TrackChangesClient = {
|
|||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.find(
|
||||
return db.projectHistoryMetaData.findOne(
|
||||
{
|
||||
project_id: ObjectId(project_id)
|
||||
},
|
||||
(error, projects) => callback(error, projects[0])
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
|
@ -99,7 +99,7 @@ module.exports = TrackChangesClient = {
|
|||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.update(
|
||||
return db.projectHistoryMetaData.updateOne(
|
||||
{
|
||||
project_id: ObjectId(project_id)
|
||||
},
|
||||
|
|
|
@ -14,7 +14,7 @@ chai.should()
|
|||
const sinon = require('sinon')
|
||||
const modulePath = '../../../../app/js/MongoAWS.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongojs')
|
||||
const { ObjectId } = require('mongodb')
|
||||
const MemoryStream = require('memorystream')
|
||||
const zlib = require('zlib')
|
||||
|
||||
|
@ -44,7 +44,7 @@ describe('MongoAWS', function () {
|
|||
'aws-sdk': (this.awssdk = {}),
|
||||
fs: (this.fs = {}),
|
||||
's3-streams': (this.S3S = {}),
|
||||
'./mongojs': { db: (this.db = {}), ObjectId },
|
||||
'./mongodb': { db: (this.db = {}), ObjectId },
|
||||
JSONStream: (this.JSONStream = {}),
|
||||
'readline-stream': (this.readline = sinon.stub()),
|
||||
'metrics-sharelatex': { inc() {} }
|
||||
|
@ -92,7 +92,9 @@ describe('MongoAWS', function () {
|
|||
this.S3S.ReadStream = () =>
|
||||
MemoryStream.createReadStream(zbuf, { readable: true })
|
||||
this.db.docHistory = {}
|
||||
this.db.docHistory.insert = sinon.stub().callsArgWith(1, null, 'pack')
|
||||
this.db.docHistory.insertOne = sinon
|
||||
.stub()
|
||||
.yields(null, { insertedId: ObjectId() })
|
||||
|
||||
return this.MongoAWS.unArchivePack(
|
||||
this.project_id,
|
||||
|
@ -107,7 +109,7 @@ describe('MongoAWS', function () {
|
|||
})
|
||||
|
||||
return it('should call db.docHistory.insert', function () {
|
||||
return this.db.docHistory.insert.called.should.equal(true)
|
||||
return this.db.docHistory.insertOne.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -16,7 +16,7 @@ const { expect } = chai
|
|||
const modulePath = '../../../../app/js/MongoManager.js'
|
||||
const packModulePath = '../../../../app/js/PackManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongojs')
|
||||
const { ObjectId } = require('mongodb')
|
||||
const tk = require('timekeeper')
|
||||
|
||||
describe('MongoManager', function () {
|
||||
|
@ -24,7 +24,7 @@ describe('MongoManager', function () {
|
|||
tk.freeze(new Date())
|
||||
this.MongoManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongojs': { db: (this.db = {}), ObjectId },
|
||||
'./mongodb': { db: (this.db = {}), ObjectId },
|
||||
'./PackManager': (this.PackManager = {}),
|
||||
'metrics-sharelatex': { timeAsyncMethod() {} },
|
||||
'logger-sharelatex': { log() {} }
|
||||
|
@ -156,7 +156,7 @@ describe('MongoManager', function () {
|
|||
|
||||
describe('backportProjectId', function () {
|
||||
beforeEach(function () {
|
||||
this.db.docHistory = { update: sinon.stub().callsArg(3) }
|
||||
this.db.docHistory = { updateMany: sinon.stub().yields() }
|
||||
return this.MongoManager.backportProjectId(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
|
@ -165,7 +165,7 @@ describe('MongoManager', function () {
|
|||
})
|
||||
|
||||
it("should insert the project_id into all entries for the doc_id which don't have it set", function () {
|
||||
return this.db.docHistory.update
|
||||
return this.db.docHistory.updateMany
|
||||
.calledWith(
|
||||
{
|
||||
doc_id: ObjectId(this.doc_id),
|
||||
|
@ -173,9 +173,6 @@ describe('MongoManager', function () {
|
|||
},
|
||||
{
|
||||
$set: { project_id: ObjectId(this.project_id) }
|
||||
},
|
||||
{
|
||||
multi: true
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
|
@ -190,7 +187,7 @@ describe('MongoManager', function () {
|
|||
beforeEach(function () {
|
||||
this.metadata = { mock: 'metadata' }
|
||||
this.db.projectHistoryMetaData = {
|
||||
find: sinon.stub().callsArgWith(1, null, [this.metadata])
|
||||
findOne: sinon.stub().callsArgWith(1, null, this.metadata)
|
||||
}
|
||||
return this.MongoManager.getProjectMetaData(
|
||||
this.project_id,
|
||||
|
@ -199,7 +196,7 @@ describe('MongoManager', function () {
|
|||
})
|
||||
|
||||
it('should look up the meta data in the db', function () {
|
||||
return this.db.projectHistoryMetaData.find
|
||||
return this.db.projectHistoryMetaData.findOne
|
||||
.calledWith({ project_id: ObjectId(this.project_id) })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
@ -213,7 +210,7 @@ describe('MongoManager', function () {
|
|||
beforeEach(function () {
|
||||
this.metadata = { mock: 'metadata' }
|
||||
this.db.projectHistoryMetaData = {
|
||||
update: sinon.stub().callsArgWith(3, null, [this.metadata])
|
||||
updateOne: sinon.stub().yields()
|
||||
}
|
||||
return this.MongoManager.setProjectMetaData(
|
||||
this.project_id,
|
||||
|
@ -223,7 +220,7 @@ describe('MongoManager', function () {
|
|||
})
|
||||
|
||||
it('should upsert the metadata into the DB', function () {
|
||||
return this.db.projectHistoryMetaData.update
|
||||
return this.db.projectHistoryMetaData.updateOne
|
||||
.calledWith(
|
||||
{
|
||||
project_id: ObjectId(this.project_id)
|
||||
|
|
|
@ -17,9 +17,7 @@ const should = chai.should()
|
|||
const { expect } = chai
|
||||
const modulePath = '../../../../app/js/PackManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongojs')
|
||||
const bson = require('bson')
|
||||
const BSON = new bson.BSONPure()
|
||||
const { ObjectId } = require('mongodb')
|
||||
const _ = require('underscore')
|
||||
|
||||
const tk = require('timekeeper')
|
||||
|
@ -29,7 +27,8 @@ describe('PackManager', function () {
|
|||
tk.freeze(new Date())
|
||||
this.PackManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongojs': { db: (this.db = {}), ObjectId, BSON },
|
||||
bson: require('bson'),
|
||||
'./mongodb': { db: (this.db = {}), ObjectId },
|
||||
'./LockManager': {},
|
||||
'./MongoAWS': {},
|
||||
'logger-sharelatex': { log: sinon.stub(), error: sinon.stub() },
|
||||
|
@ -66,8 +65,9 @@ describe('PackManager', function () {
|
|||
{ op: 'op-4', meta: 'meta-4', v: 4 }
|
||||
]
|
||||
return (this.db.docHistory = {
|
||||
save: sinon.stub().callsArg(1),
|
||||
insertOne: sinon.stub().yields(),
|
||||
insert: sinon.stub().callsArg(1),
|
||||
updateOne: sinon.stub().yields(),
|
||||
findAndModify: sinon.stub().callsArg(1)
|
||||
})
|
||||
})
|
||||
|
@ -390,7 +390,7 @@ describe('PackManager', function () {
|
|||
|
||||
return describe('for a small update that will expire', function () {
|
||||
it('should insert the update into mongo', function () {
|
||||
return this.db.docHistory.save
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
pack: this.newUpdates,
|
||||
project_id: ObjectId(this.project_id),
|
||||
|
@ -403,7 +403,7 @@ describe('PackManager', function () {
|
|||
})
|
||||
|
||||
it('should set an expiry time in the future', function () {
|
||||
return this.db.docHistory.save
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000)
|
||||
})
|
||||
|
@ -443,23 +443,21 @@ describe('PackManager', function () {
|
|||
|
||||
return describe('for a small update that will expire', function () {
|
||||
it('should append the update in mongo', function () {
|
||||
return this.db.docHistory.findAndModify
|
||||
.calledWithMatch({
|
||||
query: { _id: this.lastUpdate._id },
|
||||
update: {
|
||||
return this.db.docHistory.updateOne
|
||||
.calledWithMatch(
|
||||
{ _id: this.lastUpdate._id },
|
||||
{
|
||||
$push: { pack: { $each: this.newUpdates } },
|
||||
$set: { v_end: this.newUpdates[this.newUpdates.length - 1].v }
|
||||
}
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set an expiry time in the future', function () {
|
||||
return this.db.docHistory.findAndModify
|
||||
.calledWithMatch({
|
||||
update: {
|
||||
$set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) }
|
||||
}
|
||||
return this.db.docHistory.updateOne
|
||||
.calledWithMatch(sinon.match.any, {
|
||||
$set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) }
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
@ -498,7 +496,7 @@ describe('PackManager', function () {
|
|||
|
||||
return describe('for a small update that will not expire', function () {
|
||||
it('should insert the update into mongo', function () {
|
||||
return this.db.docHistory.save
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
pack: this.newUpdates,
|
||||
project_id: ObjectId(this.project_id),
|
||||
|
@ -511,7 +509,7 @@ describe('PackManager', function () {
|
|||
})
|
||||
|
||||
it('should not set any expiry time', function () {
|
||||
return this.db.docHistory.save
|
||||
return this.db.docHistory.insertOne
|
||||
.neverCalledWithMatch(sinon.match.has('expiresAt'))
|
||||
.should.equal(true)
|
||||
})
|
||||
|
@ -548,7 +546,7 @@ describe('PackManager', function () {
|
|||
|
||||
return describe('for a small update that will expire', function () {
|
||||
it('should insert the update into mongo', function () {
|
||||
return this.db.docHistory.save
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
pack: this.newUpdates,
|
||||
project_id: ObjectId(this.project_id),
|
||||
|
@ -561,7 +559,7 @@ describe('PackManager', function () {
|
|||
})
|
||||
|
||||
it('should set an expiry time in the future', function () {
|
||||
return this.db.docHistory.save
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000)
|
||||
})
|
||||
|
|
|
@ -17,6 +17,7 @@ const sinon = require('sinon')
|
|||
const chai = require('chai')
|
||||
const should = chai.should()
|
||||
const { expect } = chai
|
||||
const { ObjectId } = require('mongodb')
|
||||
const modulePath = '../../../../app/js/UpdatesManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
|
@ -864,7 +865,6 @@ describe('UpdatesManager', function () {
|
|||
describe('fillUserInfo', function () {
|
||||
describe('with valid users', function () {
|
||||
beforeEach(function (done) {
|
||||
const { ObjectId } = require('mongojs')
|
||||
this.user_id_1 = ObjectId().toString()
|
||||
this.user_id_2 = ObjectId().toString()
|
||||
this.updates = [
|
||||
|
|
Loading…
Reference in a new issue