diff --git a/services/docstore/app/coffee/DocArchiveManager.js b/services/docstore/app/coffee/DocArchiveManager.js index 1fb6fb2657..e3c048558e 100644 --- a/services/docstore/app/coffee/DocArchiveManager.js +++ b/services/docstore/app/coffee/DocArchiveManager.js @@ -1,156 +1,208 @@ -MongoManager = require "./MongoManager" -Errors = require "./Errors" -logger = require "logger-sharelatex" -_ = require "underscore" -async = require "async" -settings = require("settings-sharelatex") -request = require("request") -crypto = require("crypto") -RangeManager = require("./RangeManager") -thirtySeconds = 30 * 1000 +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DocArchive; +const MongoManager = require("./MongoManager"); +const Errors = require("./Errors"); +const logger = require("logger-sharelatex"); +const _ = require("underscore"); +const async = require("async"); +const settings = require("settings-sharelatex"); +const request = require("request"); +const crypto = require("crypto"); +const RangeManager = require("./RangeManager"); +const thirtySeconds = 30 * 1000; -module.exports = DocArchive = +module.exports = (DocArchive = { - archiveAllDocs: (project_id, callback = (err, docs) ->) -> - MongoManager.getProjectsDocs project_id, {include_deleted: true}, {lines: true, ranges: true, rev: true, inS3: true}, (err, docs) -> - if err? - return callback(err) - else if !docs? - return callback new Errors.NotFoundError("No docs for project #{project_id}") - docs = _.filter docs, (doc)-> doc.inS3 != true - jobs = _.map docs, (doc) -> - (cb)-> - DocArchive.archiveDoc project_id, doc, cb - async.parallelLimit jobs, 5, callback + archiveAllDocs(project_id, callback) { + if (callback == null) { callback = function(err, docs) {}; } + return MongoManager.getProjectsDocs(project_id, {include_deleted: true}, {lines: true, ranges: true, rev: true, inS3: true}, function(err, docs) { + if (err != null) { + return callback(err); + } else if ((docs == null)) { + return callback(new Errors.NotFoundError(`No docs for project ${project_id}`)); + } + docs = _.filter(docs, doc => doc.inS3 !== true); + const jobs = _.map(docs, doc => cb => DocArchive.archiveDoc(project_id, doc, cb)); + return async.parallelLimit(jobs, 5, callback); + }); + }, - archiveDoc: (project_id, doc, callback)-> - logger.log project_id: project_id, doc_id: doc._id, "sending doc to s3" - try - options = DocArchive.buildS3Options(project_id+"/"+doc._id) - catch e - return callback e - DocArchive._mongoDocToS3Doc doc, (error, json_doc) -> - return callback(error) if error? - options.body = json_doc - options.headers = - 'Content-Type': "application/json" - request.put options, (err, res) -> - if err? || res.statusCode != 200 - logger.err err:err, res:res, project_id:project_id, doc_id: doc._id, statusCode: res?.statusCode, "something went wrong archiving doc in aws" - return callback new Error("Error in S3 request") - md5lines = crypto.createHash("md5").update(json_doc, "utf8").digest("hex") - md5response = res.headers.etag.toString().replace(/\"/g, '') - if md5lines != md5response - logger.err responseMD5:md5response, linesMD5:md5lines, project_id:project_id, doc_id: doc?._id, "err in response md5 from s3" - return callback new Error("Error in S3 md5 response") - MongoManager.markDocAsArchived doc._id, doc.rev, (err) -> - return callback(err) if err? - callback() - - unArchiveAllDocs: (project_id, callback = (err) ->) -> - MongoManager.getArchivedProjectDocs project_id, (err, docs) -> - if err? - logger.err err:err, project_id:project_id, "error unarchiving all docs" - return callback(err) - else if !docs? - return callback new Errors.NotFoundError("No docs for project #{project_id}") - jobs = _.map docs, (doc) -> - (cb)-> - if !doc.inS3? - return cb() - else - DocArchive.unarchiveDoc project_id, doc._id, cb - async.parallelLimit jobs, 5, callback - - unarchiveDoc: (project_id, doc_id, callback)-> - logger.log project_id: project_id, doc_id: doc_id, "getting doc from s3" - try - options = DocArchive.buildS3Options(project_id+"/"+doc_id) - catch e - return callback e - options.json = true - request.get options, (err, res, doc)-> - if err? || res.statusCode != 200 - logger.err err:err, res:res, project_id:project_id, doc_id:doc_id, "something went wrong unarchiving doc from aws" - return callback new Errors.NotFoundError("Error in S3 request") - DocArchive._s3DocToMongoDoc doc, (error, mongo_doc) -> - return callback(error) if error? - MongoManager.upsertIntoDocCollection project_id, doc_id.toString(), mongo_doc, (err) -> - return callback(err) if err? - logger.log project_id: project_id, doc_id: doc_id, "deleting doc from s3" - DocArchive._deleteDocFromS3 project_id, doc_id, callback - - destroyAllDocs: (project_id, callback = (err) ->) -> - MongoManager.getProjectsDocs project_id, {include_deleted: true}, {_id: 1}, (err, docs) -> - if err? - logger.err err:err, project_id:project_id, "error getting project's docs" - return callback(err) - else if !docs? - return callback() - jobs = _.map docs, (doc) -> - (cb)-> - DocArchive.destroyDoc(project_id, doc._id, cb) - async.parallelLimit jobs, 5, callback - - destroyDoc: (project_id, doc_id, callback)-> - logger.log project_id: project_id, doc_id: doc_id, "removing doc from mongo and s3" - MongoManager.findDoc project_id, doc_id, {inS3: 1}, (error, doc) -> - return callback error if error? - return callback new Errors.NotFoundError("Doc not found in Mongo") unless doc? - if doc.inS3 == true - DocArchive._deleteDocFromS3 project_id, doc_id, (err) -> - return err if err? - MongoManager.destroyDoc doc_id, callback - else - MongoManager.destroyDoc doc_id, callback - - _deleteDocFromS3: (project_id, doc_id, callback) -> - try - options = DocArchive.buildS3Options(project_id+"/"+doc_id) - catch e - return callback e - options.json = true - request.del options, (err, res, body)-> - if err? || res.statusCode != 204 - logger.err err:err, res:res, project_id:project_id, doc_id:doc_id, "something went wrong deleting doc from aws" - return callback new Error("Error in S3 request") - callback() - - _s3DocToMongoDoc: (doc, callback = (error, mongo_doc) ->) -> - mongo_doc = {} - if doc.schema_v == 1 and doc.lines? - mongo_doc.lines = doc.lines - if doc.ranges? - mongo_doc.ranges = RangeManager.jsonRangesToMongo(doc.ranges) - else if doc instanceof Array - mongo_doc.lines = doc - else - return callback(new Error("I don't understand the doc format in s3")) - return callback null, mongo_doc - - _mongoDocToS3Doc: (doc, callback = (error, s3_doc) ->) -> - if !doc.lines? - return callback(new Error("doc has no lines")) - json = JSON.stringify({ - lines: doc.lines - ranges: doc.ranges - schema_v: 1 - }) - if json.indexOf("\u0000") != -1 - error = new Error("null bytes detected") - logger.err {err: error, doc, json}, error.message - return callback(error) - return callback null, json - - buildS3Options: (key)-> - if !settings.docstore.s3? - throw new Error("S3 settings are not configured") - return { - aws: - key: settings.docstore.s3.key - secret: settings.docstore.s3.secret - bucket: settings.docstore.s3.bucket - timeout: thirtySeconds - uri:"https://#{settings.docstore.s3.bucket}.s3.amazonaws.com/#{key}" + archiveDoc(project_id, doc, callback){ + let options; + logger.log({project_id, doc_id: doc._id}, "sending doc to s3"); + try { + options = DocArchive.buildS3Options(project_id+"/"+doc._id); + } catch (e) { + return callback(e); } + return DocArchive._mongoDocToS3Doc(doc, function(error, json_doc) { + if (error != null) { return callback(error); } + options.body = json_doc; + options.headers = + {'Content-Type': "application/json"}; + return request.put(options, function(err, res) { + if ((err != null) || (res.statusCode !== 200)) { + logger.err({err, res, project_id, doc_id: doc._id, statusCode: (res != null ? res.statusCode : undefined)}, "something went wrong archiving doc in aws"); + return callback(new Error("Error in S3 request")); + } + const md5lines = crypto.createHash("md5").update(json_doc, "utf8").digest("hex"); + const md5response = res.headers.etag.toString().replace(/\"/g, ''); + if (md5lines !== md5response) { + logger.err({responseMD5:md5response, linesMD5:md5lines, project_id, doc_id: (doc != null ? doc._id : undefined)}, "err in response md5 from s3"); + return callback(new Error("Error in S3 md5 response")); + } + return MongoManager.markDocAsArchived(doc._id, doc.rev, function(err) { + if (err != null) { return callback(err); } + return callback(); + }); + }); + }); + }, + + unArchiveAllDocs(project_id, callback) { + if (callback == null) { callback = function(err) {}; } + return MongoManager.getArchivedProjectDocs(project_id, function(err, docs) { + if (err != null) { + logger.err({err, project_id}, "error unarchiving all docs"); + return callback(err); + } else if ((docs == null)) { + return callback(new Errors.NotFoundError(`No docs for project ${project_id}`)); + } + const jobs = _.map(docs, doc => (function(cb) { + if ((doc.inS3 == null)) { + return cb(); + } else { + return DocArchive.unarchiveDoc(project_id, doc._id, cb); + } + })); + return async.parallelLimit(jobs, 5, callback); + }); + }, + + unarchiveDoc(project_id, doc_id, callback){ + let options; + logger.log({project_id, doc_id}, "getting doc from s3"); + try { + options = DocArchive.buildS3Options(project_id+"/"+doc_id); + } catch (e) { + return callback(e); + } + options.json = true; + return request.get(options, function(err, res, doc){ + if ((err != null) || (res.statusCode !== 200)) { + logger.err({err, res, project_id, doc_id}, "something went wrong unarchiving doc from aws"); + return callback(new Errors.NotFoundError("Error in S3 request")); + } + return DocArchive._s3DocToMongoDoc(doc, function(error, mongo_doc) { + if (error != null) { return callback(error); } + return MongoManager.upsertIntoDocCollection(project_id, doc_id.toString(), mongo_doc, function(err) { + if (err != null) { return callback(err); } + logger.log({project_id, doc_id}, "deleting doc from s3"); + return DocArchive._deleteDocFromS3(project_id, doc_id, callback); + }); + }); + }); + }, + + destroyAllDocs(project_id, callback) { + if (callback == null) { callback = function(err) {}; } + return MongoManager.getProjectsDocs(project_id, {include_deleted: true}, {_id: 1}, function(err, docs) { + if (err != null) { + logger.err({err, project_id}, "error getting project's docs"); + return callback(err); + } else if ((docs == null)) { + return callback(); + } + const jobs = _.map(docs, doc => cb => DocArchive.destroyDoc(project_id, doc._id, cb)); + return async.parallelLimit(jobs, 5, callback); + }); + }, + + destroyDoc(project_id, doc_id, callback){ + logger.log({project_id, doc_id}, "removing doc from mongo and s3"); + return MongoManager.findDoc(project_id, doc_id, {inS3: 1}, function(error, doc) { + if (error != null) { return callback(error); } + if (doc == null) { return callback(new Errors.NotFoundError("Doc not found in Mongo")); } + if (doc.inS3 === true) { + return DocArchive._deleteDocFromS3(project_id, doc_id, function(err) { + if (err != null) { return err; } + return MongoManager.destroyDoc(doc_id, callback); + }); + } else { + return MongoManager.destroyDoc(doc_id, callback); + } + }); + }, + + _deleteDocFromS3(project_id, doc_id, callback) { + let options; + try { + options = DocArchive.buildS3Options(project_id+"/"+doc_id); + } catch (e) { + return callback(e); + } + options.json = true; + return request.del(options, function(err, res, body){ + if ((err != null) || (res.statusCode !== 204)) { + logger.err({err, res, project_id, doc_id}, "something went wrong deleting doc from aws"); + return callback(new Error("Error in S3 request")); + } + return callback(); + }); + }, + + _s3DocToMongoDoc(doc, callback) { + if (callback == null) { callback = function(error, mongo_doc) {}; } + const mongo_doc = {}; + if ((doc.schema_v === 1) && (doc.lines != null)) { + mongo_doc.lines = doc.lines; + if (doc.ranges != null) { + mongo_doc.ranges = RangeManager.jsonRangesToMongo(doc.ranges); + } + } else if (doc instanceof Array) { + mongo_doc.lines = doc; + } else { + return callback(new Error("I don't understand the doc format in s3")); + } + return callback(null, mongo_doc); + }, + + _mongoDocToS3Doc(doc, callback) { + if (callback == null) { callback = function(error, s3_doc) {}; } + if ((doc.lines == null)) { + return callback(new Error("doc has no lines")); + } + const json = JSON.stringify({ + lines: doc.lines, + ranges: doc.ranges, + schema_v: 1 + }); + if (json.indexOf("\u0000") !== -1) { + const error = new Error("null bytes detected"); + logger.err({err: error, doc, json}, error.message); + return callback(error); + } + return callback(null, json); + }, + + buildS3Options(key){ + if ((settings.docstore.s3 == null)) { + throw new Error("S3 settings are not configured"); + } + return { + aws: { + key: settings.docstore.s3.key, + secret: settings.docstore.s3.secret, + bucket: settings.docstore.s3.bucket + }, + timeout: thirtySeconds, + uri:`https://${settings.docstore.s3.bucket}.s3.amazonaws.com/${key}` + }; + } +}); diff --git a/services/docstore/app/coffee/DocManager.js b/services/docstore/app/coffee/DocManager.js index be55a23e44..8bb86d8eb3 100644 --- a/services/docstore/app/coffee/DocManager.js +++ b/services/docstore/app/coffee/DocManager.js @@ -1,130 +1,184 @@ -MongoManager = require "./MongoManager" -Errors = require "./Errors" -logger = require "logger-sharelatex" -_ = require "underscore" -DocArchive = require "./DocArchiveManager" -RangeManager = require "./RangeManager" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DocManager; +const MongoManager = require("./MongoManager"); +const Errors = require("./Errors"); +const logger = require("logger-sharelatex"); +const _ = require("underscore"); +const DocArchive = require("./DocArchiveManager"); +const RangeManager = require("./RangeManager"); -module.exports = DocManager = +module.exports = (DocManager = { - # TODO: For historical reasons, the doc version is currently stored in the docOps - # collection (which is all that this collection contains). In future, we should - # migrate this version property to be part of the docs collection, to guarantee - # consitency between lines and version when writing/reading, and for a simpler schema. - _getDoc: (project_id, doc_id, filter = {}, callback = (error, doc) ->) -> - if filter.inS3 != true - return callback("must include inS3 when getting doc") + // TODO: For historical reasons, the doc version is currently stored in the docOps + // collection (which is all that this collection contains). In future, we should + // migrate this version property to be part of the docs collection, to guarantee + // consitency between lines and version when writing/reading, and for a simpler schema. + _getDoc(project_id, doc_id, filter, callback) { + if (filter == null) { filter = {}; } + if (callback == null) { callback = function(error, doc) {}; } + if (filter.inS3 !== true) { + return callback("must include inS3 when getting doc"); + } - MongoManager.findDoc project_id, doc_id, filter, (err, doc)-> - if err? - return callback(err) - else if !doc? - return callback new Errors.NotFoundError("No such doc: #{doc_id} in project #{project_id}") - else if doc?.inS3 - DocArchive.unarchiveDoc project_id, doc_id, (err)-> - if err? - logger.err err:err, project_id:project_id, doc_id:doc_id, "error unarchiving doc" - return callback(err) - DocManager._getDoc project_id, doc_id, filter, callback - else - if filter.version - MongoManager.getDocVersion doc_id, (error, version) -> - return callback(error) if error? - doc.version = version - callback err, doc - else - callback err, doc + return MongoManager.findDoc(project_id, doc_id, filter, function(err, doc){ + if (err != null) { + return callback(err); + } else if ((doc == null)) { + return callback(new Errors.NotFoundError(`No such doc: ${doc_id} in project ${project_id}`)); + } else if ((doc != null ? doc.inS3 : undefined)) { + return DocArchive.unarchiveDoc(project_id, doc_id, function(err){ + if (err != null) { + logger.err({err, project_id, doc_id}, "error unarchiving doc"); + return callback(err); + } + return DocManager._getDoc(project_id, doc_id, filter, callback); + }); + } else { + if (filter.version) { + return MongoManager.getDocVersion(doc_id, function(error, version) { + if (error != null) { return callback(error); } + doc.version = version; + return callback(err, doc); + }); + } else { + return callback(err, doc); + } + } + }); + }, - checkDocExists: (project_id, doc_id, callback = (err, exists)->)-> - DocManager._getDoc project_id, doc_id, {_id:1, inS3:true}, (err, doc)-> - if err? - return callback(err) - callback(err, doc?) + checkDocExists(project_id, doc_id, callback){ + if (callback == null) { callback = function(err, exists){}; } + return DocManager._getDoc(project_id, doc_id, {_id:1, inS3:true}, function(err, doc){ + if (err != null) { + return callback(err); + } + return callback(err, (doc != null)); + }); + }, - getFullDoc: (project_id, doc_id, callback = (err, doc)->)-> - DocManager._getDoc project_id, doc_id, {lines: true, rev: true, deleted: true, version: true, ranges: true, inS3:true}, (err, doc)-> - if err? - return callback(err) - callback(err, doc) + getFullDoc(project_id, doc_id, callback){ + if (callback == null) { callback = function(err, doc){}; } + return DocManager._getDoc(project_id, doc_id, {lines: true, rev: true, deleted: true, version: true, ranges: true, inS3:true}, function(err, doc){ + if (err != null) { + return callback(err); + } + return callback(err, doc); + }); + }, - getDocLines: (project_id, doc_id, callback = (err, doc)->)-> - DocManager._getDoc project_id, doc_id, {lines:true, inS3:true}, (err, doc)-> - if err? - return callback(err) - callback(err, doc) + getDocLines(project_id, doc_id, callback){ + if (callback == null) { callback = function(err, doc){}; } + return DocManager._getDoc(project_id, doc_id, {lines:true, inS3:true}, function(err, doc){ + if (err != null) { + return callback(err); + } + return callback(err, doc); + }); + }, - getAllNonDeletedDocs: (project_id, filter, callback = (error, docs) ->) -> - DocArchive.unArchiveAllDocs project_id, (error) -> - if error? - return callback(error) - MongoManager.getProjectsDocs project_id, {include_deleted: false}, filter, (error, docs) -> - if err? - return callback(error) - else if !docs? - return callback new Errors.NotFoundError("No docs for project #{project_id}") - else - return callback(null, docs) + getAllNonDeletedDocs(project_id, filter, callback) { + if (callback == null) { callback = function(error, docs) {}; } + return DocArchive.unArchiveAllDocs(project_id, function(error) { + if (error != null) { + return callback(error); + } + return MongoManager.getProjectsDocs(project_id, {include_deleted: false}, filter, function(error, docs) { + if (typeof err !== 'undefined' && err !== null) { + return callback(error); + } else if ((docs == null)) { + return callback(new Errors.NotFoundError(`No docs for project ${project_id}`)); + } else { + return callback(null, docs); + } + }); + }); + }, - updateDoc: (project_id, doc_id, lines, version, ranges, callback = (error, modified, rev) ->) -> - if !lines? or !version? or !ranges? - return callback(new Error("no lines, version or ranges provided")) + updateDoc(project_id, doc_id, lines, version, ranges, callback) { + if (callback == null) { callback = function(error, modified, rev) {}; } + if ((lines == null) || (version == null) || (ranges == null)) { + return callback(new Error("no lines, version or ranges provided")); + } - DocManager._getDoc project_id, doc_id, {version: true, rev: true, lines: true, version: true, ranges: true, inS3:true}, (err, doc)-> - if err? and !(err instanceof Errors.NotFoundError) - logger.err project_id: project_id, doc_id: doc_id, err:err, "error getting document for update" - return callback(err) + return DocManager._getDoc(project_id, doc_id, {version: true, rev: true, lines: true, version: true, ranges: true, inS3:true}, function(err, doc){ + let updateLines, updateRanges, updateVersion; + if ((err != null) && !(err instanceof Errors.NotFoundError)) { + logger.err({project_id, doc_id, err}, "error getting document for update"); + return callback(err); + } - ranges = RangeManager.jsonRangesToMongo(ranges) + ranges = RangeManager.jsonRangesToMongo(ranges); - if !doc? - # If the document doesn't exist, we'll make sure to create/update all parts of it. - updateLines = true - updateVersion = true - updateRanges = true - else - updateLines = not _.isEqual(doc.lines, lines) - updateVersion = (doc.version != version) - updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges) + if ((doc == null)) { + // If the document doesn't exist, we'll make sure to create/update all parts of it. + updateLines = true; + updateVersion = true; + updateRanges = true; + } else { + updateLines = !_.isEqual(doc.lines, lines); + updateVersion = (doc.version !== version); + updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges); + } - modified = false - rev = doc?.rev || 0 + let modified = false; + let rev = (doc != null ? doc.rev : undefined) || 0; - updateLinesAndRangesIfNeeded = (cb) -> - if updateLines or updateRanges - update = {} - if updateLines - update.lines = lines - if updateRanges - update.ranges = ranges - logger.log { project_id, doc_id }, "updating doc lines and ranges" + const updateLinesAndRangesIfNeeded = function(cb) { + if (updateLines || updateRanges) { + const update = {}; + if (updateLines) { + update.lines = lines; + } + if (updateRanges) { + update.ranges = ranges; + } + logger.log({ project_id, doc_id }, "updating doc lines and ranges"); - modified = true - rev += 1 # rev will be incremented in mongo by MongoManager.upsertIntoDocCollection - MongoManager.upsertIntoDocCollection project_id, doc_id, update, cb - else - logger.log { project_id, doc_id, }, "doc lines have not changed - not updating" - cb() + modified = true; + rev += 1; // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection + return MongoManager.upsertIntoDocCollection(project_id, doc_id, update, cb); + } else { + logger.log({ project_id, doc_id, }, "doc lines have not changed - not updating"); + return cb(); + } + }; - updateVersionIfNeeded = (cb) -> - if updateVersion - logger.log { project_id, doc_id, oldVersion: doc?.version, newVersion: version }, "updating doc version" - modified = true - MongoManager.setDocVersion doc_id, version, cb - else - logger.log { project_id, doc_id, version }, "doc version has not changed - not updating" - cb() + const updateVersionIfNeeded = function(cb) { + if (updateVersion) { + logger.log({ project_id, doc_id, oldVersion: (doc != null ? doc.version : undefined), newVersion: version }, "updating doc version"); + modified = true; + return MongoManager.setDocVersion(doc_id, version, cb); + } else { + logger.log({ project_id, doc_id, version }, "doc version has not changed - not updating"); + return cb(); + } + }; - updateLinesAndRangesIfNeeded (error) -> - return callback(error) if error? - updateVersionIfNeeded (error) -> - return callback(error) if error? - callback null, modified, rev + return updateLinesAndRangesIfNeeded(function(error) { + if (error != null) { return callback(error); } + return updateVersionIfNeeded(function(error) { + if (error != null) { return callback(error); } + return callback(null, modified, rev); + }); + }); + }); + }, - deleteDoc: (project_id, doc_id, callback = (error) ->) -> - DocManager.checkDocExists project_id, doc_id, (error, exists) -> - return callback(error) if error? - return callback new Errors.NotFoundError("No such project/doc to delete: #{project_id}/#{doc_id}") if !exists - MongoManager.markDocAsDeleted project_id, doc_id, callback + deleteDoc(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return DocManager.checkDocExists(project_id, doc_id, function(error, exists) { + if (error != null) { return callback(error); } + if (!exists) { return callback(new Errors.NotFoundError(`No such project/doc to delete: ${project_id}/${doc_id}`)); } + return MongoManager.markDocAsDeleted(project_id, doc_id, callback); + }); + } +}); diff --git a/services/docstore/app/coffee/Errors.js b/services/docstore/app/coffee/Errors.js index 4a29822efc..1345cc0f86 100644 --- a/services/docstore/app/coffee/Errors.js +++ b/services/docstore/app/coffee/Errors.js @@ -1,10 +1,12 @@ -NotFoundError = (message) -> - error = new Error(message) - error.name = "NotFoundError" - error.__proto__ = NotFoundError.prototype - return error -NotFoundError.prototype.__proto__ = Error.prototype +let Errors; +var NotFoundError = function(message) { + const error = new Error(message); + error.name = "NotFoundError"; + error.__proto__ = NotFoundError.prototype; + return error; +}; +NotFoundError.prototype.__proto__ = Error.prototype; -module.exports = Errors = - NotFoundError: NotFoundError +module.exports = (Errors = + {NotFoundError}); diff --git a/services/docstore/app/coffee/HealthChecker.js b/services/docstore/app/coffee/HealthChecker.js index 4280f8aa58..1da8c770d0 100644 --- a/services/docstore/app/coffee/HealthChecker.js +++ b/services/docstore/app/coffee/HealthChecker.js @@ -1,44 +1,59 @@ -{db, ObjectId} = require "./mongojs" -request = require("request") -async = require("async") -_ = require("underscore") -crypto = require("crypto") -settings = require("settings-sharelatex") -port = settings.internal.docstore.port -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const {db, ObjectId} = require("./mongojs"); +const request = require("request"); +const async = require("async"); +const _ = require("underscore"); +const crypto = require("crypto"); +const settings = require("settings-sharelatex"); +const { + port +} = settings.internal.docstore; +const logger = require("logger-sharelatex"); -module.exports = - check : (callback)-> - doc_id = ObjectId() - project_id = ObjectId(settings.docstore.healthCheck.project_id) - url = "http://localhost:#{port}/project/#{project_id}/doc/#{doc_id}" - lines = ["smoke test - delete me", "#{crypto.randomBytes(32).toString("hex")}"] - getOpts = -> {url:url, timeout:3000} - logger.log lines:lines, url:url, doc_id:doc_id, project_id:project_id, "running health check" - jobs = [ - (cb)-> - opts = getOpts() - opts.json = {lines: lines, version: 42, ranges: {}} - request.post(opts, cb) - (cb)-> - opts = getOpts() - opts.json = true - request.get opts, (err, res, body)-> - if err? - logger.err err:err, "docstore returned a error in health check get" - cb(err) - else if !res? - cb("no response from docstore with get check") - else if res?.statusCode != 200 - cb("status code not 200, its #{res.statusCode}") - else if _.isEqual(body?.lines, lines) and body?._id == doc_id.toString() - cb() - else - cb("health check lines not equal #{body.lines} != #{lines}") - (cb)-> - db.docs.remove {_id: doc_id, project_id: project_id}, cb - (cb)-> - db.docOps.remove {doc_id: doc_id}, cb - ] - async.series jobs, callback +module.exports = { + check(callback){ + const doc_id = ObjectId(); + const project_id = ObjectId(settings.docstore.healthCheck.project_id); + const url = `http://localhost:${port}/project/${project_id}/doc/${doc_id}`; + const lines = ["smoke test - delete me", `${crypto.randomBytes(32).toString("hex")}`]; + const getOpts = () => ({ + url, + timeout:3000 + }); + logger.log({lines, url, doc_id, project_id}, "running health check"); + const jobs = [ + function(cb){ + const opts = getOpts(); + opts.json = {lines, version: 42, ranges: {}}; + return request.post(opts, cb); + }, + function(cb){ + const opts = getOpts(); + opts.json = true; + return request.get(opts, function(err, res, body){ + if (err != null) { + logger.err({err}, "docstore returned a error in health check get"); + return cb(err); + } else if ((res == null)) { + return cb("no response from docstore with get check"); + } else if ((res != null ? res.statusCode : undefined) !== 200) { + return cb(`status code not 200, its ${res.statusCode}`); + } else if (_.isEqual(body != null ? body.lines : undefined, lines) && ((body != null ? body._id : undefined) === doc_id.toString())) { + return cb(); + } else { + return cb(`health check lines not equal ${body.lines} != ${lines}`); + } + }); + }, + cb => db.docs.remove({_id: doc_id, project_id}, cb), + cb => db.docOps.remove({doc_id}, cb) + ]; + return async.series(jobs, callback); + } +}; diff --git a/services/docstore/app/coffee/HttpController.js b/services/docstore/app/coffee/HttpController.js index cec535e1cd..3d87086a0d 100644 --- a/services/docstore/app/coffee/HttpController.js +++ b/services/docstore/app/coffee/HttpController.js @@ -1,144 +1,224 @@ -DocManager = require "./DocManager" -logger = require "logger-sharelatex" -DocArchive = require "./DocArchiveManager" -HealthChecker = require "./HealthChecker" -Settings = require "settings-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let HttpController; +const DocManager = require("./DocManager"); +const logger = require("logger-sharelatex"); +const DocArchive = require("./DocArchiveManager"); +const HealthChecker = require("./HealthChecker"); +const Settings = require("settings-sharelatex"); -module.exports = HttpController = - getDoc: (req, res, next = (error) ->) -> - project_id = req.params.project_id - doc_id = req.params.doc_id - include_deleted = req.query?.include_deleted == "true" - logger.log project_id: project_id, doc_id: doc_id, "getting doc" - DocManager.getFullDoc project_id, doc_id, (error, doc) -> - return next(error) if error? - logger.log {doc_id, project_id}, "got doc" - if !doc? - res.send 404 - else if doc.deleted && !include_deleted - res.send 404 - else - res.json HttpController._buildDocView(doc) - - getRawDoc: (req, res, next = (error)->)-> - project_id = req.params.project_id - doc_id = req.params.doc_id - logger.log project_id: project_id, doc_id: doc_id, "getting raw doc" - DocManager.getDocLines project_id, doc_id, (error, doc) -> - return next(error) if error? - if !doc? - res.send 404 - else - res.setHeader('content-type', 'text/plain') - res.send HttpController._buildRawDocView(doc) - - getAllDocs: (req, res, next = (error) ->) -> - project_id = req.params.project_id - logger.log project_id: project_id, "getting all docs" - DocManager.getAllNonDeletedDocs project_id, {lines: true, rev: true}, (error, docs = []) -> - return next(error) if error? - res.json HttpController._buildDocsArrayView(project_id, docs) - - getAllRanges: (req, res, next = (error) ->) -> - project_id = req.params.project_id - logger.log {project_id}, "getting all ranges" - DocManager.getAllNonDeletedDocs project_id, {ranges: true}, (error, docs = []) -> - return next(error) if error? - res.json HttpController._buildDocsArrayView(project_id, docs) - - updateDoc: (req, res, next = (error) ->) -> - project_id = req.params.project_id - doc_id = req.params.doc_id - lines = req.body?.lines - version = req.body?.version - ranges = req.body?.ranges - - if !lines? or lines not instanceof Array - logger.error project_id: project_id, doc_id: doc_id, "no doc lines provided" - res.send 400 # Bad Request - return - - if !version? or typeof version is not "number" - logger.error project_id: project_id, doc_id: doc_id, "no doc version provided" - res.send 400 # Bad Request - return - - if !ranges? - logger.error project_id: project_id, doc_id: doc_id, "no doc ranges provided" - res.send 400 # Bad Request - return - - bodyLength = lines.reduce( - (len, line) => line.length + len - 0 - ) - if bodyLength > Settings.max_doc_length - logger.error project_id: project_id, doc_id: doc_id, bodyLength: bodyLength, "document body too large" - res.status(413).send("document body too large") - return - - logger.log project_id: project_id, doc_id: doc_id, "got http request to update doc" - DocManager.updateDoc project_id, doc_id, lines, version, ranges, (error, modified, rev) -> - return next(error) if error? - res.json { - modified: modified - rev: rev +module.exports = (HttpController = { + getDoc(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + const { + doc_id + } = req.params; + const include_deleted = (req.query != null ? req.query.include_deleted : undefined) === "true"; + logger.log({project_id, doc_id}, "getting doc"); + return DocManager.getFullDoc(project_id, doc_id, function(error, doc) { + if (error != null) { return next(error); } + logger.log({doc_id, project_id}, "got doc"); + if ((doc == null)) { + return res.send(404); + } else if (doc.deleted && !include_deleted) { + return res.send(404); + } else { + return res.json(HttpController._buildDocView(doc)); } + }); + }, - deleteDoc: (req, res, next = (error) ->) -> - project_id = req.params.project_id - doc_id = req.params.doc_id - logger.log project_id: project_id, doc_id: doc_id, "deleting doc" - DocManager.deleteDoc project_id, doc_id, (error) -> - return next(error) if error? - res.send 204 + getRawDoc(req, res, next){ + if (next == null) { next = function(error){}; } + const { + project_id + } = req.params; + const { + doc_id + } = req.params; + logger.log({project_id, doc_id}, "getting raw doc"); + return DocManager.getDocLines(project_id, doc_id, function(error, doc) { + if (error != null) { return next(error); } + if ((doc == null)) { + return res.send(404); + } else { + res.setHeader('content-type', 'text/plain'); + return res.send(HttpController._buildRawDocView(doc)); + } + }); + }, - _buildDocView: (doc) -> - doc_view = { _id: doc._id?.toString() } - for attribute in ["lines", "rev", "version", "ranges", "deleted"] - if doc[attribute]? - doc_view[attribute] = doc[attribute] - return doc_view - - _buildRawDocView: (doc)-> - return (doc?.lines or []).join("\n") + getAllDocs(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + logger.log({project_id}, "getting all docs"); + return DocManager.getAllNonDeletedDocs(project_id, {lines: true, rev: true}, function(error, docs) { + if (docs == null) { docs = []; } + if (error != null) { return next(error); } + return res.json(HttpController._buildDocsArrayView(project_id, docs)); + }); + }, - _buildDocsArrayView: (project_id, docs) -> - docViews = [] - for doc in docs - if doc? # There can end up being null docs for some reason :( (probably a race condition) - docViews.push HttpController._buildDocView(doc) - else - logger.error err: new Error("null doc"), project_id: project_id, "encountered null doc" - return docViews + getAllRanges(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + logger.log({project_id}, "getting all ranges"); + return DocManager.getAllNonDeletedDocs(project_id, {ranges: true}, function(error, docs) { + if (docs == null) { docs = []; } + if (error != null) { return next(error); } + return res.json(HttpController._buildDocsArrayView(project_id, docs)); + }); + }, - archiveAllDocs: (req, res, next = (error) ->) -> - project_id = req.params.project_id - logger.log project_id: project_id, "archiving all docs" - DocArchive.archiveAllDocs project_id, (error) -> - return next(error) if error? - res.send 204 + updateDoc(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + const { + doc_id + } = req.params; + const lines = req.body != null ? req.body.lines : undefined; + const version = req.body != null ? req.body.version : undefined; + const ranges = req.body != null ? req.body.ranges : undefined; - unArchiveAllDocs: (req, res, next = (error) ->) -> - project_id = req.params.project_id - logger.log project_id: project_id, "unarchiving all docs" - DocArchive.unArchiveAllDocs project_id, (error) -> - return next(error) if error? - res.send 200 + if ((lines == null) || !(lines instanceof Array)) { + logger.error({project_id, doc_id}, "no doc lines provided"); + res.send(400); // Bad Request + return; + } + + if ((version == null) || (typeof version === !"number")) { + logger.error({project_id, doc_id}, "no doc version provided"); + res.send(400); // Bad Request + return; + } + + if ((ranges == null)) { + logger.error({project_id, doc_id}, "no doc ranges provided"); + res.send(400); // Bad Request + return; + } - destroyAllDocs: (req, res, next = (error) ->) -> - project_id = req.params.project_id - logger.log project_id: project_id, "destroying all docs" - DocArchive.destroyAllDocs project_id, (error) -> - return next(error) if error? - res.send 204 + const bodyLength = lines.reduce( + (len, line) => line.length + len, + 0 + ); + if (bodyLength > Settings.max_doc_length) { + logger.error({project_id, doc_id, bodyLength}, "document body too large"); + res.status(413).send("document body too large"); + return; + } - healthCheck: (req, res)-> - HealthChecker.check (err)-> - if err? - logger.err err:err, "error performing health check" - res.send 500 - else - res.send 200 + logger.log({project_id, doc_id}, "got http request to update doc"); + return DocManager.updateDoc(project_id, doc_id, lines, version, ranges, function(error, modified, rev) { + if (error != null) { return next(error); } + return res.json({ + modified, + rev + }); + }); + }, + + deleteDoc(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + const { + doc_id + } = req.params; + logger.log({project_id, doc_id}, "deleting doc"); + return DocManager.deleteDoc(project_id, doc_id, function(error) { + if (error != null) { return next(error); } + return res.send(204); + }); + }, + + _buildDocView(doc) { + const doc_view = { _id: (doc._id != null ? doc._id.toString() : undefined) }; + for (let attribute of ["lines", "rev", "version", "ranges", "deleted"]) { + if (doc[attribute] != null) { + doc_view[attribute] = doc[attribute]; + } + } + return doc_view; + }, + + _buildRawDocView(doc){ + return ((doc != null ? doc.lines : undefined) || []).join("\n"); + }, + + _buildDocsArrayView(project_id, docs) { + const docViews = []; + for (let doc of Array.from(docs)) { + if (doc != null) { // There can end up being null docs for some reason :( (probably a race condition) + docViews.push(HttpController._buildDocView(doc)); + } else { + logger.error({err: new Error("null doc"), project_id}, "encountered null doc"); + } + } + return docViews; + }, + + archiveAllDocs(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + logger.log({project_id}, "archiving all docs"); + return DocArchive.archiveAllDocs(project_id, function(error) { + if (error != null) { return next(error); } + return res.send(204); + }); + }, + + unArchiveAllDocs(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + logger.log({project_id}, "unarchiving all docs"); + return DocArchive.unArchiveAllDocs(project_id, function(error) { + if (error != null) { return next(error); } + return res.send(200); + }); + }, + + destroyAllDocs(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + logger.log({project_id}, "destroying all docs"); + return DocArchive.destroyAllDocs(project_id, function(error) { + if (error != null) { return next(error); } + return res.send(204); + }); + }, + + healthCheck(req, res){ + return HealthChecker.check(function(err){ + if (err != null) { + logger.err({err}, "error performing health check"); + return res.send(500); + } else { + return res.send(200); + } + }); + } +}); diff --git a/services/docstore/app/coffee/MongoManager.js b/services/docstore/app/coffee/MongoManager.js index 3bd9eb34c5..0fc304b79f 100644 --- a/services/docstore/app/coffee/MongoManager.js +++ b/services/docstore/app/coffee/MongoManager.js @@ -1,85 +1,118 @@ -{db, ObjectId} = require "./mongojs" -logger = require 'logger-sharelatex' -metrics = require 'metrics-sharelatex' +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MongoManager; +const {db, ObjectId} = require("./mongojs"); +const logger = require('logger-sharelatex'); +const metrics = require('metrics-sharelatex'); -module.exports = MongoManager = +module.exports = (MongoManager = { - findDoc: (project_id, doc_id, filter, callback = (error, doc) ->) -> - db.docs.find {_id: ObjectId(doc_id.toString()), project_id: ObjectId(project_id.toString())}, filter, (error, docs = []) -> - callback error, docs[0] + findDoc(project_id, doc_id, filter, callback) { + if (callback == null) { callback = function(error, doc) {}; } + return db.docs.find({_id: ObjectId(doc_id.toString()), project_id: ObjectId(project_id.toString())}, filter, function(error, docs) { + if (docs == null) { docs = []; } + return callback(error, docs[0]); + }); + }, - getProjectsDocs: (project_id, options = {include_deleted: true}, filter, callback)-> - query = {project_id: ObjectId(project_id.toString())} - if !options.include_deleted - query.deleted = { $ne: true } - db.docs.find query, filter, callback + getProjectsDocs(project_id, options, filter, callback){ + if (options == null) { options = {include_deleted: true}; } + const query = {project_id: ObjectId(project_id.toString())}; + if (!options.include_deleted) { + query.deleted = { $ne: true }; + } + return db.docs.find(query, filter, callback); + }, - getArchivedProjectDocs: (project_id, callback)-> - query = - project_id: ObjectId(project_id.toString()) + getArchivedProjectDocs(project_id, callback){ + const query = { + project_id: ObjectId(project_id.toString()), inS3: true - db.docs.find query, {}, callback + }; + return db.docs.find(query, {}, callback); + }, - upsertIntoDocCollection: (project_id, doc_id, updates, callback)-> - update = - $set: updates - $inc: + upsertIntoDocCollection(project_id, doc_id, updates, callback){ + const update = { + $set: updates, + $inc: { rev: 1 - $unset: + }, + $unset: { inS3: true - update.$set["project_id"] = ObjectId(project_id) - db.docs.update _id: ObjectId(doc_id), update, {upsert: true}, callback + } + }; + update.$set["project_id"] = ObjectId(project_id); + return db.docs.update({_id: ObjectId(doc_id)}, update, {upsert: true}, callback); + }, - markDocAsDeleted: (project_id, doc_id, callback)-> - db.docs.update { + markDocAsDeleted(project_id, doc_id, callback){ + return db.docs.update({ _id: ObjectId(doc_id), project_id: ObjectId(project_id) }, { $set: { deleted: true } - }, callback + }, callback); + }, - markDocAsArchived: (doc_id, rev, callback)-> - update = - $set: {} + markDocAsArchived(doc_id, rev, callback){ + const update = { + $set: {}, $unset: {} - update.$set["inS3"] = true - update.$unset["lines"] = true - update.$unset["ranges"] = true - query = - _id: doc_id - rev: rev - db.docs.update query, update, (err)-> - callback(err) + }; + update.$set["inS3"] = true; + update.$unset["lines"] = true; + update.$unset["ranges"] = true; + const query = { + _id: doc_id, + rev + }; + return db.docs.update(query, update, err => callback(err)); + }, - getDocVersion: (doc_id, callback = (error, version) ->) -> - db.docOps.find { + getDocVersion(doc_id, callback) { + if (callback == null) { callback = function(error, version) {}; } + return db.docOps.find({ doc_id: ObjectId(doc_id) }, { version: 1 - }, (error, docs) -> - return callback(error) if error? - if docs.length < 1 or !docs[0].version? - return callback null, 0 - else - return callback null, docs[0].version + }, function(error, docs) { + if (error != null) { return callback(error); } + if ((docs.length < 1) || (docs[0].version == null)) { + return callback(null, 0); + } else { + return callback(null, docs[0].version); + } + }); + }, - setDocVersion: (doc_id, version, callback = (error) ->) -> - db.docOps.update { + setDocVersion(doc_id, version, callback) { + if (callback == null) { callback = function(error) {}; } + return db.docOps.update({ doc_id: ObjectId(doc_id) }, { - $set: version: version + $set: { version + } }, { upsert: true - }, callback + }, callback); + }, - destroyDoc: (doc_id, callback) -> - db.docs.remove { + destroyDoc(doc_id, callback) { + return db.docs.remove({ _id: ObjectId(doc_id) - }, (err) -> - return callback(err) if err? - db.docOps.remove { + }, function(err) { + if (err != null) { return callback(err); } + return db.docOps.remove({ doc_id: ObjectId(doc_id) - }, callback + }, callback); + }); + } +}); [ 'findDoc', @@ -89,5 +122,4 @@ module.exports = MongoManager = 'markDocAsArchived', 'getDocVersion', 'setDocVersion' -].map (method) -> - metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger) +].map(method => metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)); diff --git a/services/docstore/app/coffee/RangeManager.js b/services/docstore/app/coffee/RangeManager.js index 4867ffbe19..6973ebd259 100644 --- a/services/docstore/app/coffee/RangeManager.js +++ b/services/docstore/app/coffee/RangeManager.js @@ -1,40 +1,61 @@ -_ = require "underscore" -{ObjectId} = require("./mongojs") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RangeManager; +const _ = require("underscore"); +const {ObjectId} = require("./mongojs"); -module.exports = RangeManager = - shouldUpdateRanges: (doc_ranges, incoming_ranges) -> - if !incoming_ranges? - throw new Error("expected incoming_ranges") +module.exports = (RangeManager = { + shouldUpdateRanges(doc_ranges, incoming_ranges) { + if ((incoming_ranges == null)) { + throw new Error("expected incoming_ranges"); + } - # If the ranges are empty, we don't store them in the DB, so set - # doc_ranges to an empty object as default, since this is was the - # incoming_ranges will be for an empty range set. - if !doc_ranges? - doc_ranges = {} + // If the ranges are empty, we don't store them in the DB, so set + // doc_ranges to an empty object as default, since this is was the + // incoming_ranges will be for an empty range set. + if ((doc_ranges == null)) { + doc_ranges = {}; + } - return not _.isEqual(doc_ranges, incoming_ranges) + return !_.isEqual(doc_ranges, incoming_ranges); + }, - jsonRangesToMongo: (ranges) -> - return null if !ranges? + jsonRangesToMongo(ranges) { + if ((ranges == null)) { return null; } - updateMetadata = (metadata) -> - if metadata?.ts? - metadata.ts = new Date(metadata.ts) - if metadata?.user_id? - metadata.user_id = RangeManager._safeObjectId(metadata.user_id) + const updateMetadata = function(metadata) { + if ((metadata != null ? metadata.ts : undefined) != null) { + metadata.ts = new Date(metadata.ts); + } + if ((metadata != null ? metadata.user_id : undefined) != null) { + return metadata.user_id = RangeManager._safeObjectId(metadata.user_id); + } + }; - for change in ranges.changes or [] - change.id = RangeManager._safeObjectId(change.id) - updateMetadata(change.metadata) - for comment in ranges.comments or [] - comment.id = RangeManager._safeObjectId(comment.id) - if comment.op?.t? - comment.op.t = RangeManager._safeObjectId(comment.op.t) - updateMetadata(comment.metadata) - return ranges + for (let change of Array.from(ranges.changes || [])) { + change.id = RangeManager._safeObjectId(change.id); + updateMetadata(change.metadata); + } + for (let comment of Array.from(ranges.comments || [])) { + comment.id = RangeManager._safeObjectId(comment.id); + if ((comment.op != null ? comment.op.t : undefined) != null) { + comment.op.t = RangeManager._safeObjectId(comment.op.t); + } + updateMetadata(comment.metadata); + } + return ranges; + }, - _safeObjectId: (data) -> - try - return ObjectId(data) - catch error - return data \ No newline at end of file + _safeObjectId(data) { + try { + return ObjectId(data); + } catch (error) { + return data; + } + } +}); \ No newline at end of file diff --git a/services/docstore/app/coffee/mongojs.js b/services/docstore/app/coffee/mongojs.js index 0153c3cfcd..fd0d6cde8b 100644 --- a/services/docstore/app/coffee/mongojs.js +++ b/services/docstore/app/coffee/mongojs.js @@ -1,7 +1,8 @@ -Settings = require "settings-sharelatex" -mongojs = require "mongojs" -db = mongojs(Settings.mongo.url, ["docs", "docOps"]) -module.exports = - db: db +const Settings = require("settings-sharelatex"); +const mongojs = require("mongojs"); +const db = mongojs(Settings.mongo.url, ["docs", "docOps"]); +module.exports = { + db, ObjectId: mongojs.ObjectId +};