prettier: convert app/js decaffeinated files to Prettier format

This commit is contained in:
Simon Detheridge 2020-02-16 14:02:21 +00:00
parent c1805978c4
commit fb931e206c
8 changed files with 1039 additions and 787 deletions

View file

@ -11,205 +11,282 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DocArchive;
const MongoManager = require("./MongoManager");
const Errors = require("./Errors");
const logger = require("logger-sharelatex");
const _ = require("underscore");
const async = require("async");
const settings = require("settings-sharelatex");
const request = require("request");
const crypto = require("crypto");
const RangeManager = require("./RangeManager");
const thirtySeconds = 30 * 1000;
let DocArchive
const MongoManager = require('./MongoManager')
const Errors = require('./Errors')
const logger = require('logger-sharelatex')
const _ = require('underscore')
const async = require('async')
const settings = require('settings-sharelatex')
const request = require('request')
const crypto = require('crypto')
const RangeManager = require('./RangeManager')
const thirtySeconds = 30 * 1000
module.exports = (DocArchive = {
module.exports = DocArchive = {
archiveAllDocs(project_id, callback) {
if (callback == null) {
callback = function(err, docs) {}
}
return MongoManager.getProjectsDocs(
project_id,
{ include_deleted: true },
{ lines: true, ranges: true, rev: true, inS3: true },
function(err, docs) {
if (err != null) {
return callback(err)
} else if (docs == null) {
return callback(
new Errors.NotFoundError(`No docs for project ${project_id}`)
)
}
docs = _.filter(docs, doc => doc.inS3 !== true)
const jobs = _.map(docs, doc => cb =>
DocArchive.archiveDoc(project_id, doc, cb)
)
return async.parallelLimit(jobs, 5, callback)
}
)
},
archiveAllDocs(project_id, callback) {
if (callback == null) { callback = function(err, docs) {}; }
return MongoManager.getProjectsDocs(project_id, {include_deleted: true}, {lines: true, ranges: true, rev: true, inS3: true}, function(err, docs) {
if (err != null) {
return callback(err);
} else if ((docs == null)) {
return callback(new Errors.NotFoundError(`No docs for project ${project_id}`));
}
docs = _.filter(docs, doc => doc.inS3 !== true);
const jobs = _.map(docs, doc => cb => DocArchive.archiveDoc(project_id, doc, cb));
return async.parallelLimit(jobs, 5, callback);
});
},
archiveDoc(project_id, doc, callback) {
let options
logger.log({ project_id, doc_id: doc._id }, 'sending doc to s3')
try {
options = DocArchive.buildS3Options(project_id + '/' + doc._id)
} catch (e) {
return callback(e)
}
return DocArchive._mongoDocToS3Doc(doc, function(error, json_doc) {
if (error != null) {
return callback(error)
}
options.body = json_doc
options.headers = { 'Content-Type': 'application/json' }
return request.put(options, function(err, res) {
if (err != null || res.statusCode !== 200) {
logger.err(
{
err,
res,
project_id,
doc_id: doc._id,
statusCode: res != null ? res.statusCode : undefined
},
'something went wrong archiving doc in aws'
)
return callback(new Error('Error in S3 request'))
}
const md5lines = crypto
.createHash('md5')
.update(json_doc, 'utf8')
.digest('hex')
const md5response = res.headers.etag.toString().replace(/\"/g, '')
if (md5lines !== md5response) {
logger.err(
{
responseMD5: md5response,
linesMD5: md5lines,
project_id,
doc_id: doc != null ? doc._id : undefined
},
'err in response md5 from s3'
)
return callback(new Error('Error in S3 md5 response'))
}
return MongoManager.markDocAsArchived(doc._id, doc.rev, function(err) {
if (err != null) {
return callback(err)
}
return callback()
})
})
})
},
unArchiveAllDocs(project_id, callback) {
if (callback == null) {
callback = function(err) {}
}
return MongoManager.getArchivedProjectDocs(project_id, function(err, docs) {
if (err != null) {
logger.err({ err, project_id }, 'error unarchiving all docs')
return callback(err)
} else if (docs == null) {
return callback(
new Errors.NotFoundError(`No docs for project ${project_id}`)
)
}
const jobs = _.map(
docs,
doc =>
function(cb) {
if (doc.inS3 == null) {
return cb()
} else {
return DocArchive.unarchiveDoc(project_id, doc._id, cb)
}
}
)
return async.parallelLimit(jobs, 5, callback)
})
},
archiveDoc(project_id, doc, callback){
let options;
logger.log({project_id, doc_id: doc._id}, "sending doc to s3");
try {
options = DocArchive.buildS3Options(project_id+"/"+doc._id);
} catch (e) {
return callback(e);
}
return DocArchive._mongoDocToS3Doc(doc, function(error, json_doc) {
if (error != null) { return callback(error); }
options.body = json_doc;
options.headers =
{'Content-Type': "application/json"};
return request.put(options, function(err, res) {
if ((err != null) || (res.statusCode !== 200)) {
logger.err({err, res, project_id, doc_id: doc._id, statusCode: (res != null ? res.statusCode : undefined)}, "something went wrong archiving doc in aws");
return callback(new Error("Error in S3 request"));
}
const md5lines = crypto.createHash("md5").update(json_doc, "utf8").digest("hex");
const md5response = res.headers.etag.toString().replace(/\"/g, '');
if (md5lines !== md5response) {
logger.err({responseMD5:md5response, linesMD5:md5lines, project_id, doc_id: (doc != null ? doc._id : undefined)}, "err in response md5 from s3");
return callback(new Error("Error in S3 md5 response"));
}
return MongoManager.markDocAsArchived(doc._id, doc.rev, function(err) {
if (err != null) { return callback(err); }
return callback();
});
});
});
},
unarchiveDoc(project_id, doc_id, callback) {
let options
logger.log({ project_id, doc_id }, 'getting doc from s3')
try {
options = DocArchive.buildS3Options(project_id + '/' + doc_id)
} catch (e) {
return callback(e)
}
options.json = true
return request.get(options, function(err, res, doc) {
if (err != null || res.statusCode !== 200) {
logger.err(
{ err, res, project_id, doc_id },
'something went wrong unarchiving doc from aws'
)
return callback(new Errors.NotFoundError('Error in S3 request'))
}
return DocArchive._s3DocToMongoDoc(doc, function(error, mongo_doc) {
if (error != null) {
return callback(error)
}
return MongoManager.upsertIntoDocCollection(
project_id,
doc_id.toString(),
mongo_doc,
function(err) {
if (err != null) {
return callback(err)
}
logger.log({ project_id, doc_id }, 'deleting doc from s3')
return DocArchive._deleteDocFromS3(project_id, doc_id, callback)
}
)
})
})
},
unArchiveAllDocs(project_id, callback) {
if (callback == null) { callback = function(err) {}; }
return MongoManager.getArchivedProjectDocs(project_id, function(err, docs) {
if (err != null) {
logger.err({err, project_id}, "error unarchiving all docs");
return callback(err);
} else if ((docs == null)) {
return callback(new Errors.NotFoundError(`No docs for project ${project_id}`));
}
const jobs = _.map(docs, doc => (function(cb) {
if ((doc.inS3 == null)) {
return cb();
} else {
return DocArchive.unarchiveDoc(project_id, doc._id, cb);
}
}));
return async.parallelLimit(jobs, 5, callback);
});
},
destroyAllDocs(project_id, callback) {
if (callback == null) {
callback = function(err) {}
}
return MongoManager.getProjectsDocs(
project_id,
{ include_deleted: true },
{ _id: 1 },
function(err, docs) {
if (err != null) {
logger.err({ err, project_id }, "error getting project's docs")
return callback(err)
} else if (docs == null) {
return callback()
}
const jobs = _.map(docs, doc => cb =>
DocArchive.destroyDoc(project_id, doc._id, cb)
)
return async.parallelLimit(jobs, 5, callback)
}
)
},
unarchiveDoc(project_id, doc_id, callback){
let options;
logger.log({project_id, doc_id}, "getting doc from s3");
try {
options = DocArchive.buildS3Options(project_id+"/"+doc_id);
} catch (e) {
return callback(e);
}
options.json = true;
return request.get(options, function(err, res, doc){
if ((err != null) || (res.statusCode !== 200)) {
logger.err({err, res, project_id, doc_id}, "something went wrong unarchiving doc from aws");
return callback(new Errors.NotFoundError("Error in S3 request"));
}
return DocArchive._s3DocToMongoDoc(doc, function(error, mongo_doc) {
if (error != null) { return callback(error); }
return MongoManager.upsertIntoDocCollection(project_id, doc_id.toString(), mongo_doc, function(err) {
if (err != null) { return callback(err); }
logger.log({project_id, doc_id}, "deleting doc from s3");
return DocArchive._deleteDocFromS3(project_id, doc_id, callback);
});
});
});
},
destroyDoc(project_id, doc_id, callback) {
logger.log({ project_id, doc_id }, 'removing doc from mongo and s3')
return MongoManager.findDoc(project_id, doc_id, { inS3: 1 }, function(
error,
doc
) {
if (error != null) {
return callback(error)
}
if (doc == null) {
return callback(new Errors.NotFoundError('Doc not found in Mongo'))
}
if (doc.inS3 === true) {
return DocArchive._deleteDocFromS3(project_id, doc_id, function(err) {
if (err != null) {
return err
}
return MongoManager.destroyDoc(doc_id, callback)
})
} else {
return MongoManager.destroyDoc(doc_id, callback)
}
})
},
destroyAllDocs(project_id, callback) {
if (callback == null) { callback = function(err) {}; }
return MongoManager.getProjectsDocs(project_id, {include_deleted: true}, {_id: 1}, function(err, docs) {
if (err != null) {
logger.err({err, project_id}, "error getting project's docs");
return callback(err);
} else if ((docs == null)) {
return callback();
}
const jobs = _.map(docs, doc => cb => DocArchive.destroyDoc(project_id, doc._id, cb));
return async.parallelLimit(jobs, 5, callback);
});
},
_deleteDocFromS3(project_id, doc_id, callback) {
let options
try {
options = DocArchive.buildS3Options(project_id + '/' + doc_id)
} catch (e) {
return callback(e)
}
options.json = true
return request.del(options, function(err, res, body) {
if (err != null || res.statusCode !== 204) {
logger.err(
{ err, res, project_id, doc_id },
'something went wrong deleting doc from aws'
)
return callback(new Error('Error in S3 request'))
}
return callback()
})
},
destroyDoc(project_id, doc_id, callback){
logger.log({project_id, doc_id}, "removing doc from mongo and s3");
return MongoManager.findDoc(project_id, doc_id, {inS3: 1}, function(error, doc) {
if (error != null) { return callback(error); }
if (doc == null) { return callback(new Errors.NotFoundError("Doc not found in Mongo")); }
if (doc.inS3 === true) {
return DocArchive._deleteDocFromS3(project_id, doc_id, function(err) {
if (err != null) { return err; }
return MongoManager.destroyDoc(doc_id, callback);
});
} else {
return MongoManager.destroyDoc(doc_id, callback);
}
});
},
_s3DocToMongoDoc(doc, callback) {
if (callback == null) {
callback = function(error, mongo_doc) {}
}
const mongo_doc = {}
if (doc.schema_v === 1 && doc.lines != null) {
mongo_doc.lines = doc.lines
if (doc.ranges != null) {
mongo_doc.ranges = RangeManager.jsonRangesToMongo(doc.ranges)
}
} else if (doc instanceof Array) {
mongo_doc.lines = doc
} else {
return callback(new Error("I don't understand the doc format in s3"))
}
return callback(null, mongo_doc)
},
_deleteDocFromS3(project_id, doc_id, callback) {
let options;
try {
options = DocArchive.buildS3Options(project_id+"/"+doc_id);
} catch (e) {
return callback(e);
}
options.json = true;
return request.del(options, function(err, res, body){
if ((err != null) || (res.statusCode !== 204)) {
logger.err({err, res, project_id, doc_id}, "something went wrong deleting doc from aws");
return callback(new Error("Error in S3 request"));
}
return callback();
});
},
_mongoDocToS3Doc(doc, callback) {
if (callback == null) {
callback = function(error, s3_doc) {}
}
if (doc.lines == null) {
return callback(new Error('doc has no lines'))
}
const json = JSON.stringify({
lines: doc.lines,
ranges: doc.ranges,
schema_v: 1
})
if (json.indexOf('\u0000') !== -1) {
const error = new Error('null bytes detected')
logger.err({ err: error, doc, json }, error.message)
return callback(error)
}
return callback(null, json)
},
_s3DocToMongoDoc(doc, callback) {
if (callback == null) { callback = function(error, mongo_doc) {}; }
const mongo_doc = {};
if ((doc.schema_v === 1) && (doc.lines != null)) {
mongo_doc.lines = doc.lines;
if (doc.ranges != null) {
mongo_doc.ranges = RangeManager.jsonRangesToMongo(doc.ranges);
}
} else if (doc instanceof Array) {
mongo_doc.lines = doc;
} else {
return callback(new Error("I don't understand the doc format in s3"));
}
return callback(null, mongo_doc);
},
_mongoDocToS3Doc(doc, callback) {
if (callback == null) { callback = function(error, s3_doc) {}; }
if ((doc.lines == null)) {
return callback(new Error("doc has no lines"));
}
const json = JSON.stringify({
lines: doc.lines,
ranges: doc.ranges,
schema_v: 1
});
if (json.indexOf("\u0000") !== -1) {
const error = new Error("null bytes detected");
logger.err({err: error, doc, json}, error.message);
return callback(error);
}
return callback(null, json);
},
buildS3Options(key){
if ((settings.docstore.s3 == null)) {
throw new Error("S3 settings are not configured");
}
return {
aws: {
key: settings.docstore.s3.key,
secret: settings.docstore.s3.secret,
bucket: settings.docstore.s3.bucket
},
timeout: thirtySeconds,
uri:`https://${settings.docstore.s3.bucket}.s3.amazonaws.com/${key}`
};
}
});
buildS3Options(key) {
if (settings.docstore.s3 == null) {
throw new Error('S3 settings are not configured')
}
return {
aws: {
key: settings.docstore.s3.key,
secret: settings.docstore.s3.secret,
bucket: settings.docstore.s3.bucket
},
timeout: thirtySeconds,
uri: `https://${settings.docstore.s3.bucket}.s3.amazonaws.com/${key}`
}
}
}

View file

@ -13,181 +13,277 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DocManager;
const MongoManager = require("./MongoManager");
const Errors = require("./Errors");
const logger = require("logger-sharelatex");
const _ = require("underscore");
const DocArchive = require("./DocArchiveManager");
const RangeManager = require("./RangeManager");
let DocManager
const MongoManager = require('./MongoManager')
const Errors = require('./Errors')
const logger = require('logger-sharelatex')
const _ = require('underscore')
const DocArchive = require('./DocArchiveManager')
const RangeManager = require('./RangeManager')
module.exports = (DocManager = {
module.exports = DocManager = {
// TODO: For historical reasons, the doc version is currently stored in the docOps
// collection (which is all that this collection contains). In future, we should
// migrate this version property to be part of the docs collection, to guarantee
// consitency between lines and version when writing/reading, and for a simpler schema.
_getDoc(project_id, doc_id, filter, callback) {
if (filter == null) {
filter = {}
}
if (callback == null) {
callback = function(error, doc) {}
}
if (filter.inS3 !== true) {
return callback('must include inS3 when getting doc')
}
// TODO: For historical reasons, the doc version is currently stored in the docOps
// collection (which is all that this collection contains). In future, we should
// migrate this version property to be part of the docs collection, to guarantee
// consitency between lines and version when writing/reading, and for a simpler schema.
_getDoc(project_id, doc_id, filter, callback) {
if (filter == null) { filter = {}; }
if (callback == null) { callback = function(error, doc) {}; }
if (filter.inS3 !== true) {
return callback("must include inS3 when getting doc");
}
return MongoManager.findDoc(project_id, doc_id, filter, function(err, doc) {
if (err != null) {
return callback(err)
} else if (doc == null) {
return callback(
new Errors.NotFoundError(
`No such doc: ${doc_id} in project ${project_id}`
)
)
} else if (doc != null ? doc.inS3 : undefined) {
return DocArchive.unarchiveDoc(project_id, doc_id, function(err) {
if (err != null) {
logger.err({ err, project_id, doc_id }, 'error unarchiving doc')
return callback(err)
}
return DocManager._getDoc(project_id, doc_id, filter, callback)
})
} else {
if (filter.version) {
return MongoManager.getDocVersion(doc_id, function(error, version) {
if (error != null) {
return callback(error)
}
doc.version = version
return callback(err, doc)
})
} else {
return callback(err, doc)
}
}
})
},
return MongoManager.findDoc(project_id, doc_id, filter, function(err, doc){
if (err != null) {
return callback(err);
} else if ((doc == null)) {
return callback(new Errors.NotFoundError(`No such doc: ${doc_id} in project ${project_id}`));
} else if ((doc != null ? doc.inS3 : undefined)) {
return DocArchive.unarchiveDoc(project_id, doc_id, function(err){
if (err != null) {
logger.err({err, project_id, doc_id}, "error unarchiving doc");
return callback(err);
}
return DocManager._getDoc(project_id, doc_id, filter, callback);
});
} else {
if (filter.version) {
return MongoManager.getDocVersion(doc_id, function(error, version) {
if (error != null) { return callback(error); }
doc.version = version;
return callback(err, doc);
});
} else {
return callback(err, doc);
}
}
});
},
checkDocExists(project_id, doc_id, callback) {
if (callback == null) {
callback = function(err, exists) {}
}
return DocManager._getDoc(
project_id,
doc_id,
{ _id: 1, inS3: true },
function(err, doc) {
if (err != null) {
return callback(err)
}
return callback(err, doc != null)
}
)
},
checkDocExists(project_id, doc_id, callback){
if (callback == null) { callback = function(err, exists){}; }
return DocManager._getDoc(project_id, doc_id, {_id:1, inS3:true}, function(err, doc){
if (err != null) {
return callback(err);
}
return callback(err, (doc != null));
});
},
getFullDoc(project_id, doc_id, callback) {
if (callback == null) {
callback = function(err, doc) {}
}
return DocManager._getDoc(
project_id,
doc_id,
{
lines: true,
rev: true,
deleted: true,
version: true,
ranges: true,
inS3: true
},
function(err, doc) {
if (err != null) {
return callback(err)
}
return callback(err, doc)
}
)
},
getFullDoc(project_id, doc_id, callback){
if (callback == null) { callback = function(err, doc){}; }
return DocManager._getDoc(project_id, doc_id, {lines: true, rev: true, deleted: true, version: true, ranges: true, inS3:true}, function(err, doc){
if (err != null) {
return callback(err);
}
return callback(err, doc);
});
},
getDocLines(project_id, doc_id, callback) {
if (callback == null) {
callback = function(err, doc) {}
}
return DocManager._getDoc(
project_id,
doc_id,
{ lines: true, inS3: true },
function(err, doc) {
if (err != null) {
return callback(err)
}
return callback(err, doc)
}
)
},
getAllNonDeletedDocs(project_id, filter, callback) {
if (callback == null) {
callback = function(error, docs) {}
}
return DocArchive.unArchiveAllDocs(project_id, function(error) {
if (error != null) {
return callback(error)
}
return MongoManager.getProjectsDocs(
project_id,
{ include_deleted: false },
filter,
function(error, docs) {
if (typeof err !== 'undefined' && err !== null) {
return callback(error)
} else if (docs == null) {
return callback(
new Errors.NotFoundError(`No docs for project ${project_id}`)
)
} else {
return callback(null, docs)
}
}
)
})
},
getDocLines(project_id, doc_id, callback){
if (callback == null) { callback = function(err, doc){}; }
return DocManager._getDoc(project_id, doc_id, {lines:true, inS3:true}, function(err, doc){
if (err != null) {
return callback(err);
}
return callback(err, doc);
});
},
updateDoc(project_id, doc_id, lines, version, ranges, callback) {
if (callback == null) {
callback = function(error, modified, rev) {}
}
if (lines == null || version == null || ranges == null) {
return callback(new Error('no lines, version or ranges provided'))
}
getAllNonDeletedDocs(project_id, filter, callback) {
if (callback == null) { callback = function(error, docs) {}; }
return DocArchive.unArchiveAllDocs(project_id, function(error) {
if (error != null) {
return callback(error);
}
return MongoManager.getProjectsDocs(project_id, {include_deleted: false}, filter, function(error, docs) {
if (typeof err !== 'undefined' && err !== null) {
return callback(error);
} else if ((docs == null)) {
return callback(new Errors.NotFoundError(`No docs for project ${project_id}`));
} else {
return callback(null, docs);
}
});
});
},
return DocManager._getDoc(
project_id,
doc_id,
{
version: true,
rev: true,
lines: true,
version: true,
ranges: true,
inS3: true
},
function(err, doc) {
let updateLines, updateRanges, updateVersion
if (err != null && !(err instanceof Errors.NotFoundError)) {
logger.err(
{ project_id, doc_id, err },
'error getting document for update'
)
return callback(err)
}
updateDoc(project_id, doc_id, lines, version, ranges, callback) {
if (callback == null) { callback = function(error, modified, rev) {}; }
if ((lines == null) || (version == null) || (ranges == null)) {
return callback(new Error("no lines, version or ranges provided"));
}
return DocManager._getDoc(project_id, doc_id, {version: true, rev: true, lines: true, version: true, ranges: true, inS3:true}, function(err, doc){
let updateLines, updateRanges, updateVersion;
if ((err != null) && !(err instanceof Errors.NotFoundError)) {
logger.err({project_id, doc_id, err}, "error getting document for update");
return callback(err);
}
ranges = RangeManager.jsonRangesToMongo(ranges);
ranges = RangeManager.jsonRangesToMongo(ranges)
if ((doc == null)) {
// If the document doesn't exist, we'll make sure to create/update all parts of it.
updateLines = true;
updateVersion = true;
updateRanges = true;
} else {
updateLines = !_.isEqual(doc.lines, lines);
updateVersion = (doc.version !== version);
updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges);
}
let modified = false;
let rev = (doc != null ? doc.rev : undefined) || 0;
if (doc == null) {
// If the document doesn't exist, we'll make sure to create/update all parts of it.
updateLines = true
updateVersion = true
updateRanges = true
} else {
updateLines = !_.isEqual(doc.lines, lines)
updateVersion = doc.version !== version
updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges)
}
const updateLinesAndRangesIfNeeded = function(cb) {
if (updateLines || updateRanges) {
const update = {};
if (updateLines) {
update.lines = lines;
}
if (updateRanges) {
update.ranges = ranges;
}
logger.log({ project_id, doc_id }, "updating doc lines and ranges");
modified = true;
rev += 1; // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection
return MongoManager.upsertIntoDocCollection(project_id, doc_id, update, cb);
} else {
logger.log({ project_id, doc_id, }, "doc lines have not changed - not updating");
return cb();
}
};
const updateVersionIfNeeded = function(cb) {
if (updateVersion) {
logger.log({ project_id, doc_id, oldVersion: (doc != null ? doc.version : undefined), newVersion: version }, "updating doc version");
modified = true;
return MongoManager.setDocVersion(doc_id, version, cb);
} else {
logger.log({ project_id, doc_id, version }, "doc version has not changed - not updating");
return cb();
}
};
return updateLinesAndRangesIfNeeded(function(error) {
if (error != null) { return callback(error); }
return updateVersionIfNeeded(function(error) {
if (error != null) { return callback(error); }
return callback(null, modified, rev);
});
});
});
},
let modified = false
let rev = (doc != null ? doc.rev : undefined) || 0
deleteDoc(project_id, doc_id, callback) {
if (callback == null) { callback = function(error) {}; }
return DocManager.checkDocExists(project_id, doc_id, function(error, exists) {
if (error != null) { return callback(error); }
if (!exists) { return callback(new Errors.NotFoundError(`No such project/doc to delete: ${project_id}/${doc_id}`)); }
return MongoManager.markDocAsDeleted(project_id, doc_id, callback);
});
}
});
const updateLinesAndRangesIfNeeded = function(cb) {
if (updateLines || updateRanges) {
const update = {}
if (updateLines) {
update.lines = lines
}
if (updateRanges) {
update.ranges = ranges
}
logger.log({ project_id, doc_id }, 'updating doc lines and ranges')
modified = true
rev += 1 // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection
return MongoManager.upsertIntoDocCollection(
project_id,
doc_id,
update,
cb
)
} else {
logger.log(
{ project_id, doc_id },
'doc lines have not changed - not updating'
)
return cb()
}
}
const updateVersionIfNeeded = function(cb) {
if (updateVersion) {
logger.log(
{
project_id,
doc_id,
oldVersion: doc != null ? doc.version : undefined,
newVersion: version
},
'updating doc version'
)
modified = true
return MongoManager.setDocVersion(doc_id, version, cb)
} else {
logger.log(
{ project_id, doc_id, version },
'doc version has not changed - not updating'
)
return cb()
}
}
return updateLinesAndRangesIfNeeded(function(error) {
if (error != null) {
return callback(error)
}
return updateVersionIfNeeded(function(error) {
if (error != null) {
return callback(error)
}
return callback(null, modified, rev)
})
})
}
)
},
deleteDoc(project_id, doc_id, callback) {
if (callback == null) {
callback = function(error) {}
}
return DocManager.checkDocExists(project_id, doc_id, function(
error,
exists
) {
if (error != null) {
return callback(error)
}
if (!exists) {
return callback(
new Errors.NotFoundError(
`No such project/doc to delete: ${project_id}/${doc_id}`
)
)
}
return MongoManager.markDocAsDeleted(project_id, doc_id, callback)
})
}
}

View file

@ -4,15 +4,13 @@
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
let Errors;
let Errors
var NotFoundError = function(message) {
const error = new Error(message);
error.name = "NotFoundError";
error.__proto__ = NotFoundError.prototype;
return error;
};
NotFoundError.prototype.__proto__ = Error.prototype;
module.exports = (Errors =
{NotFoundError});
const error = new Error(message)
error.name = 'NotFoundError'
error.__proto__ = NotFoundError.prototype
return error
}
NotFoundError.prototype.__proto__ = Error.prototype
module.exports = Errors = { NotFoundError }

View file

@ -10,56 +10,59 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const {db, ObjectId} = require("./mongojs");
const request = require("request");
const async = require("async");
const _ = require("underscore");
const crypto = require("crypto");
const settings = require("settings-sharelatex");
const {
port
} = settings.internal.docstore;
const logger = require("logger-sharelatex");
const { db, ObjectId } = require('./mongojs')
const request = require('request')
const async = require('async')
const _ = require('underscore')
const crypto = require('crypto')
const settings = require('settings-sharelatex')
const { port } = settings.internal.docstore
const logger = require('logger-sharelatex')
module.exports = {
check(callback){
const doc_id = ObjectId();
const project_id = ObjectId(settings.docstore.healthCheck.project_id);
const url = `http://localhost:${port}/project/${project_id}/doc/${doc_id}`;
const lines = ["smoke test - delete me", `${crypto.randomBytes(32).toString("hex")}`];
const getOpts = () => ({
url,
timeout:3000
});
logger.log({lines, url, doc_id, project_id}, "running health check");
const jobs = [
function(cb){
const opts = getOpts();
opts.json = {lines, version: 42, ranges: {}};
return request.post(opts, cb);
},
function(cb){
const opts = getOpts();
opts.json = true;
return request.get(opts, function(err, res, body){
if (err != null) {
logger.err({err}, "docstore returned a error in health check get");
return cb(err);
} else if ((res == null)) {
return cb("no response from docstore with get check");
} else if ((res != null ? res.statusCode : undefined) !== 200) {
return cb(`status code not 200, its ${res.statusCode}`);
} else if (_.isEqual(body != null ? body.lines : undefined, lines) && ((body != null ? body._id : undefined) === doc_id.toString())) {
return cb();
} else {
return cb(`health check lines not equal ${body.lines} != ${lines}`);
}
});
},
cb => db.docs.remove({_id: doc_id, project_id}, cb),
cb => db.docOps.remove({doc_id}, cb)
];
return async.series(jobs, callback);
}
};
module.exports = {
check(callback) {
const doc_id = ObjectId()
const project_id = ObjectId(settings.docstore.healthCheck.project_id)
const url = `http://localhost:${port}/project/${project_id}/doc/${doc_id}`
const lines = [
'smoke test - delete me',
`${crypto.randomBytes(32).toString('hex')}`
]
const getOpts = () => ({
url,
timeout: 3000
})
logger.log({ lines, url, doc_id, project_id }, 'running health check')
const jobs = [
function(cb) {
const opts = getOpts()
opts.json = { lines, version: 42, ranges: {} }
return request.post(opts, cb)
},
function(cb) {
const opts = getOpts()
opts.json = true
return request.get(opts, function(err, res, body) {
if (err != null) {
logger.err({ err }, 'docstore returned a error in health check get')
return cb(err)
} else if (res == null) {
return cb('no response from docstore with get check')
} else if ((res != null ? res.statusCode : undefined) !== 200) {
return cb(`status code not 200, its ${res.statusCode}`)
} else if (
_.isEqual(body != null ? body.lines : undefined, lines) &&
(body != null ? body._id : undefined) === doc_id.toString()
) {
return cb()
} else {
return cb(`health check lines not equal ${body.lines} != ${lines}`)
}
})
},
cb => db.docs.remove({ _id: doc_id, project_id }, cb),
cb => db.docOps.remove({ doc_id }, cb)
]
return async.series(jobs, callback)
}
}

View file

@ -12,220 +12,252 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let HttpController;
const DocManager = require("./DocManager");
const logger = require("logger-sharelatex");
const DocArchive = require("./DocArchiveManager");
const HealthChecker = require("./HealthChecker");
const Settings = require("settings-sharelatex");
let HttpController
const DocManager = require('./DocManager')
const logger = require('logger-sharelatex')
const DocArchive = require('./DocArchiveManager')
const HealthChecker = require('./HealthChecker')
const Settings = require('settings-sharelatex')
module.exports = HttpController = {
getDoc(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
const { doc_id } = req.params
const include_deleted =
(req.query != null ? req.query.include_deleted : undefined) === 'true'
logger.log({ project_id, doc_id }, 'getting doc')
return DocManager.getFullDoc(project_id, doc_id, function(error, doc) {
if (error != null) {
return next(error)
}
logger.log({ doc_id, project_id }, 'got doc')
if (doc == null) {
return res.send(404)
} else if (doc.deleted && !include_deleted) {
return res.send(404)
} else {
return res.json(HttpController._buildDocView(doc))
}
})
},
module.exports = (HttpController = {
getDoc(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
const {
doc_id
} = req.params;
const include_deleted = (req.query != null ? req.query.include_deleted : undefined) === "true";
logger.log({project_id, doc_id}, "getting doc");
return DocManager.getFullDoc(project_id, doc_id, function(error, doc) {
if (error != null) { return next(error); }
logger.log({doc_id, project_id}, "got doc");
if ((doc == null)) {
return res.send(404);
} else if (doc.deleted && !include_deleted) {
return res.send(404);
} else {
return res.json(HttpController._buildDocView(doc));
}
});
},
getRawDoc(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
const { doc_id } = req.params
logger.log({ project_id, doc_id }, 'getting raw doc')
return DocManager.getDocLines(project_id, doc_id, function(error, doc) {
if (error != null) {
return next(error)
}
if (doc == null) {
return res.send(404)
} else {
res.setHeader('content-type', 'text/plain')
return res.send(HttpController._buildRawDocView(doc))
}
})
},
getRawDoc(req, res, next){
if (next == null) { next = function(error){}; }
const {
project_id
} = req.params;
const {
doc_id
} = req.params;
logger.log({project_id, doc_id}, "getting raw doc");
return DocManager.getDocLines(project_id, doc_id, function(error, doc) {
if (error != null) { return next(error); }
if ((doc == null)) {
return res.send(404);
} else {
res.setHeader('content-type', 'text/plain');
return res.send(HttpController._buildRawDocView(doc));
}
});
},
getAllDocs(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
logger.log({ project_id }, 'getting all docs')
return DocManager.getAllNonDeletedDocs(
project_id,
{ lines: true, rev: true },
function(error, docs) {
if (docs == null) {
docs = []
}
if (error != null) {
return next(error)
}
return res.json(HttpController._buildDocsArrayView(project_id, docs))
}
)
},
getAllDocs(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "getting all docs");
return DocManager.getAllNonDeletedDocs(project_id, {lines: true, rev: true}, function(error, docs) {
if (docs == null) { docs = []; }
if (error != null) { return next(error); }
return res.json(HttpController._buildDocsArrayView(project_id, docs));
});
},
getAllRanges(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "getting all ranges");
return DocManager.getAllNonDeletedDocs(project_id, {ranges: true}, function(error, docs) {
if (docs == null) { docs = []; }
if (error != null) { return next(error); }
return res.json(HttpController._buildDocsArrayView(project_id, docs));
});
},
getAllRanges(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
logger.log({ project_id }, 'getting all ranges')
return DocManager.getAllNonDeletedDocs(
project_id,
{ ranges: true },
function(error, docs) {
if (docs == null) {
docs = []
}
if (error != null) {
return next(error)
}
return res.json(HttpController._buildDocsArrayView(project_id, docs))
}
)
},
updateDoc(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
const {
doc_id
} = req.params;
const lines = req.body != null ? req.body.lines : undefined;
const version = req.body != null ? req.body.version : undefined;
const ranges = req.body != null ? req.body.ranges : undefined;
updateDoc(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
const { doc_id } = req.params
const lines = req.body != null ? req.body.lines : undefined
const version = req.body != null ? req.body.version : undefined
const ranges = req.body != null ? req.body.ranges : undefined
if ((lines == null) || !(lines instanceof Array)) {
logger.error({project_id, doc_id}, "no doc lines provided");
res.send(400); // Bad Request
return;
}
if ((version == null) || (typeof version === !"number")) {
logger.error({project_id, doc_id}, "no doc version provided");
res.send(400); // Bad Request
return;
}
if ((ranges == null)) {
logger.error({project_id, doc_id}, "no doc ranges provided");
res.send(400); // Bad Request
return;
}
if (lines == null || !(lines instanceof Array)) {
logger.error({ project_id, doc_id }, 'no doc lines provided')
res.send(400) // Bad Request
return
}
const bodyLength = lines.reduce(
(len, line) => line.length + len,
0
);
if (bodyLength > Settings.max_doc_length) {
logger.error({project_id, doc_id, bodyLength}, "document body too large");
res.status(413).send("document body too large");
return;
}
if (version == null || typeof version === !'number') {
logger.error({ project_id, doc_id }, 'no doc version provided')
res.send(400) // Bad Request
return
}
logger.log({project_id, doc_id}, "got http request to update doc");
return DocManager.updateDoc(project_id, doc_id, lines, version, ranges, function(error, modified, rev) {
if (error != null) { return next(error); }
return res.json({
modified,
rev
});
});
},
if (ranges == null) {
logger.error({ project_id, doc_id }, 'no doc ranges provided')
res.send(400) // Bad Request
return
}
deleteDoc(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
const {
doc_id
} = req.params;
logger.log({project_id, doc_id}, "deleting doc");
return DocManager.deleteDoc(project_id, doc_id, function(error) {
if (error != null) { return next(error); }
return res.send(204);
});
},
const bodyLength = lines.reduce((len, line) => line.length + len, 0)
if (bodyLength > Settings.max_doc_length) {
logger.error(
{ project_id, doc_id, bodyLength },
'document body too large'
)
res.status(413).send('document body too large')
return
}
_buildDocView(doc) {
const doc_view = { _id: (doc._id != null ? doc._id.toString() : undefined) };
for (const attribute of ["lines", "rev", "version", "ranges", "deleted"]) {
if (doc[attribute] != null) {
doc_view[attribute] = doc[attribute];
}
}
return doc_view;
},
logger.log({ project_id, doc_id }, 'got http request to update doc')
return DocManager.updateDoc(
project_id,
doc_id,
lines,
version,
ranges,
function(error, modified, rev) {
if (error != null) {
return next(error)
}
return res.json({
modified,
rev
})
}
)
},
_buildRawDocView(doc){
return ((doc != null ? doc.lines : undefined) || []).join("\n");
},
_buildDocsArrayView(project_id, docs) {
const docViews = [];
for (const doc of Array.from(docs)) {
if (doc != null) { // There can end up being null docs for some reason :( (probably a race condition)
docViews.push(HttpController._buildDocView(doc));
} else {
logger.error({err: new Error("null doc"), project_id}, "encountered null doc");
}
}
return docViews;
},
deleteDoc(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
const { doc_id } = req.params
logger.log({ project_id, doc_id }, 'deleting doc')
return DocManager.deleteDoc(project_id, doc_id, function(error) {
if (error != null) {
return next(error)
}
return res.send(204)
})
},
archiveAllDocs(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "archiving all docs");
return DocArchive.archiveAllDocs(project_id, function(error) {
if (error != null) { return next(error); }
return res.send(204);
});
},
_buildDocView(doc) {
const doc_view = { _id: doc._id != null ? doc._id.toString() : undefined }
for (const attribute of ['lines', 'rev', 'version', 'ranges', 'deleted']) {
if (doc[attribute] != null) {
doc_view[attribute] = doc[attribute]
}
}
return doc_view
},
unArchiveAllDocs(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "unarchiving all docs");
return DocArchive.unArchiveAllDocs(project_id, function(error) {
if (error != null) { return next(error); }
return res.send(200);
});
},
_buildRawDocView(doc) {
return ((doc != null ? doc.lines : undefined) || []).join('\n')
},
destroyAllDocs(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "destroying all docs");
return DocArchive.destroyAllDocs(project_id, function(error) {
if (error != null) { return next(error); }
return res.send(204);
});
},
_buildDocsArrayView(project_id, docs) {
const docViews = []
for (const doc of Array.from(docs)) {
if (doc != null) {
// There can end up being null docs for some reason :( (probably a race condition)
docViews.push(HttpController._buildDocView(doc))
} else {
logger.error(
{ err: new Error('null doc'), project_id },
'encountered null doc'
)
}
}
return docViews
},
healthCheck(req, res){
return HealthChecker.check(function(err){
if (err != null) {
logger.err({err}, "error performing health check");
return res.send(500);
} else {
return res.send(200);
}
});
}
});
archiveAllDocs(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
logger.log({ project_id }, 'archiving all docs')
return DocArchive.archiveAllDocs(project_id, function(error) {
if (error != null) {
return next(error)
}
return res.send(204)
})
},
unArchiveAllDocs(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
logger.log({ project_id }, 'unarchiving all docs')
return DocArchive.unArchiveAllDocs(project_id, function(error) {
if (error != null) {
return next(error)
}
return res.send(200)
})
},
destroyAllDocs(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { project_id } = req.params
logger.log({ project_id }, 'destroying all docs')
return DocArchive.destroyAllDocs(project_id, function(error) {
if (error != null) {
return next(error)
}
return res.send(204)
})
},
healthCheck(req, res) {
return HealthChecker.check(function(err) {
if (err != null) {
logger.err({ err }, 'error performing health check')
return res.send(500)
} else {
return res.send(200)
}
})
}
}

View file

@ -10,122 +10,167 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let MongoManager;
const {db, ObjectId} = require("./mongojs");
const logger = require('logger-sharelatex');
const metrics = require('metrics-sharelatex');
let MongoManager
const { db, ObjectId } = require('./mongojs')
const logger = require('logger-sharelatex')
const metrics = require('metrics-sharelatex')
module.exports = (MongoManager = {
module.exports = MongoManager = {
findDoc(project_id, doc_id, filter, callback) {
if (callback == null) {
callback = function(error, doc) {}
}
return db.docs.find(
{
_id: ObjectId(doc_id.toString()),
project_id: ObjectId(project_id.toString())
},
filter,
function(error, docs) {
if (docs == null) {
docs = []
}
return callback(error, docs[0])
}
)
},
findDoc(project_id, doc_id, filter, callback) {
if (callback == null) { callback = function(error, doc) {}; }
return db.docs.find({_id: ObjectId(doc_id.toString()), project_id: ObjectId(project_id.toString())}, filter, function(error, docs) {
if (docs == null) { docs = []; }
return callback(error, docs[0]);
});
},
getProjectsDocs(project_id, options, filter, callback) {
if (options == null) {
options = { include_deleted: true }
}
const query = { project_id: ObjectId(project_id.toString()) }
if (!options.include_deleted) {
query.deleted = { $ne: true }
}
return db.docs.find(query, filter, callback)
},
getProjectsDocs(project_id, options, filter, callback){
if (options == null) { options = {include_deleted: true}; }
const query = {project_id: ObjectId(project_id.toString())};
if (!options.include_deleted) {
query.deleted = { $ne: true };
}
return db.docs.find(query, filter, callback);
},
getArchivedProjectDocs(project_id, callback) {
const query = {
project_id: ObjectId(project_id.toString()),
inS3: true
}
return db.docs.find(query, {}, callback)
},
getArchivedProjectDocs(project_id, callback){
const query = {
project_id: ObjectId(project_id.toString()),
inS3: true
};
return db.docs.find(query, {}, callback);
},
upsertIntoDocCollection(project_id, doc_id, updates, callback) {
const update = {
$set: updates,
$inc: {
rev: 1
},
$unset: {
inS3: true
}
}
update.$set.project_id = ObjectId(project_id)
return db.docs.update(
{ _id: ObjectId(doc_id) },
update,
{ upsert: true },
callback
)
},
upsertIntoDocCollection(project_id, doc_id, updates, callback){
const update = {
$set: updates,
$inc: {
rev: 1
},
$unset: {
inS3: true
}
};
update.$set.project_id = ObjectId(project_id);
return db.docs.update({_id: ObjectId(doc_id)}, update, {upsert: true}, callback);
},
markDocAsDeleted(project_id, doc_id, callback) {
return db.docs.update(
{
_id: ObjectId(doc_id),
project_id: ObjectId(project_id)
},
{
$set: { deleted: true }
},
callback
)
},
markDocAsDeleted(project_id, doc_id, callback){
return db.docs.update({
_id: ObjectId(doc_id),
project_id: ObjectId(project_id)
}, {
$set: { deleted: true }
}, callback);
},
markDocAsArchived(doc_id, rev, callback) {
const update = {
$set: {},
$unset: {}
}
update.$set.inS3 = true
update.$unset.lines = true
update.$unset.ranges = true
const query = {
_id: doc_id,
rev
}
return db.docs.update(query, update, err => callback(err))
},
markDocAsArchived(doc_id, rev, callback){
const update = {
$set: {},
$unset: {}
};
update.$set.inS3 = true;
update.$unset.lines = true;
update.$unset.ranges = true;
const query = {
_id: doc_id,
rev
};
return db.docs.update(query, update, err => callback(err));
},
getDocVersion(doc_id, callback) {
if (callback == null) { callback = function(error, version) {}; }
return db.docOps.find({
doc_id: ObjectId(doc_id)
}, {
version: 1
}, function(error, docs) {
if (error != null) { return callback(error); }
if ((docs.length < 1) || (docs[0].version == null)) {
return callback(null, 0);
} else {
return callback(null, docs[0].version);
}
});
},
getDocVersion(doc_id, callback) {
if (callback == null) {
callback = function(error, version) {}
}
return db.docOps.find(
{
doc_id: ObjectId(doc_id)
},
{
version: 1
},
function(error, docs) {
if (error != null) {
return callback(error)
}
if (docs.length < 1 || docs[0].version == null) {
return callback(null, 0)
} else {
return callback(null, docs[0].version)
}
}
)
},
setDocVersion(doc_id, version, callback) {
if (callback == null) { callback = function(error) {}; }
return db.docOps.update({
doc_id: ObjectId(doc_id)
}, {
$set: { version
}
}, {
upsert: true
}, callback);
},
setDocVersion(doc_id, version, callback) {
if (callback == null) {
callback = function(error) {}
}
return db.docOps.update(
{
doc_id: ObjectId(doc_id)
},
{
$set: { version }
},
{
upsert: true
},
callback
)
},
destroyDoc(doc_id, callback) {
return db.docs.remove({
_id: ObjectId(doc_id)
}, function(err) {
if (err != null) { return callback(err); }
return db.docOps.remove({
doc_id: ObjectId(doc_id)
}, callback);
});
}
});
destroyDoc(doc_id, callback) {
return db.docs.remove(
{
_id: ObjectId(doc_id)
},
function(err) {
if (err != null) {
return callback(err)
}
return db.docOps.remove(
{
doc_id: ObjectId(doc_id)
},
callback
)
}
)
}
}
[
'findDoc',
'getProjectsDocs',
'getArchivedProjectDocs',
'upsertIntoDocCollection',
'markDocAsArchived',
'getDocVersion',
'setDocVersion'
].map(method => metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger));
;[
'findDoc',
'getProjectsDocs',
'getArchivedProjectDocs',
'upsertIntoDocCollection',
'markDocAsArchived',
'getDocVersion',
'setDocVersion'
].map(method =>
metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)
)

View file

@ -11,57 +11,59 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RangeManager;
const _ = require("underscore");
const {ObjectId} = require("./mongojs");
let RangeManager
const _ = require('underscore')
const { ObjectId } = require('./mongojs')
module.exports = (RangeManager = {
shouldUpdateRanges(doc_ranges, incoming_ranges) {
if ((incoming_ranges == null)) {
throw new Error("expected incoming_ranges");
}
module.exports = RangeManager = {
shouldUpdateRanges(doc_ranges, incoming_ranges) {
if (incoming_ranges == null) {
throw new Error('expected incoming_ranges')
}
// If the ranges are empty, we don't store them in the DB, so set
// doc_ranges to an empty object as default, since this is was the
// incoming_ranges will be for an empty range set.
if ((doc_ranges == null)) {
doc_ranges = {};
}
// If the ranges are empty, we don't store them in the DB, so set
// doc_ranges to an empty object as default, since this is was the
// incoming_ranges will be for an empty range set.
if (doc_ranges == null) {
doc_ranges = {}
}
return !_.isEqual(doc_ranges, incoming_ranges);
},
jsonRangesToMongo(ranges) {
if ((ranges == null)) { return null; }
const updateMetadata = function(metadata) {
if ((metadata != null ? metadata.ts : undefined) != null) {
metadata.ts = new Date(metadata.ts);
}
if ((metadata != null ? metadata.user_id : undefined) != null) {
return metadata.user_id = RangeManager._safeObjectId(metadata.user_id);
}
};
for (const change of Array.from(ranges.changes || [])) {
change.id = RangeManager._safeObjectId(change.id);
updateMetadata(change.metadata);
}
for (const comment of Array.from(ranges.comments || [])) {
comment.id = RangeManager._safeObjectId(comment.id);
if ((comment.op != null ? comment.op.t : undefined) != null) {
comment.op.t = RangeManager._safeObjectId(comment.op.t);
}
updateMetadata(comment.metadata);
}
return ranges;
},
_safeObjectId(data) {
try {
return ObjectId(data);
} catch (error) {
return data;
}
}
});
return !_.isEqual(doc_ranges, incoming_ranges)
},
jsonRangesToMongo(ranges) {
if (ranges == null) {
return null
}
const updateMetadata = function(metadata) {
if ((metadata != null ? metadata.ts : undefined) != null) {
metadata.ts = new Date(metadata.ts)
}
if ((metadata != null ? metadata.user_id : undefined) != null) {
return (metadata.user_id = RangeManager._safeObjectId(metadata.user_id))
}
}
for (const change of Array.from(ranges.changes || [])) {
change.id = RangeManager._safeObjectId(change.id)
updateMetadata(change.metadata)
}
for (const comment of Array.from(ranges.comments || [])) {
comment.id = RangeManager._safeObjectId(comment.id)
if ((comment.op != null ? comment.op.t : undefined) != null) {
comment.op.t = RangeManager._safeObjectId(comment.op.t)
}
updateMetadata(comment.metadata)
}
return ranges
},
_safeObjectId(data) {
try {
return ObjectId(data)
} catch (error) {
return data
}
}
}

View file

@ -1,10 +1,9 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
const Settings = require("settings-sharelatex");
const mongojs = require("mongojs");
const db = mongojs(Settings.mongo.url, ["docs", "docOps"]);
const Settings = require('settings-sharelatex')
const mongojs = require('mongojs')
const db = mongojs(Settings.mongo.url, ['docs', 'docOps'])
module.exports = {
db,
ObjectId: mongojs.ObjectId
};
db,
ObjectId: mongojs.ObjectId
}