decaffeinate: Convert DocArchiveManager.coffee and 7 other files to JS

This commit is contained in:
decaffeinate 2020-02-16 14:01:46 +00:00 committed by Simon Detheridge
parent 88badb15c4
commit afa2577381
8 changed files with 796 additions and 539 deletions

View file

@ -1,156 +1,208 @@
MongoManager = require "./MongoManager" /*
Errors = require "./Errors" * decaffeinate suggestions:
logger = require "logger-sharelatex" * DS102: Remove unnecessary code created because of implicit returns
_ = require "underscore" * DS207: Consider shorter variations of null checks
async = require "async" * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
settings = require("settings-sharelatex") */
request = require("request") let DocArchive;
crypto = require("crypto") const MongoManager = require("./MongoManager");
RangeManager = require("./RangeManager") const Errors = require("./Errors");
thirtySeconds = 30 * 1000 const logger = require("logger-sharelatex");
const _ = require("underscore");
const async = require("async");
const settings = require("settings-sharelatex");
const request = require("request");
const crypto = require("crypto");
const RangeManager = require("./RangeManager");
const thirtySeconds = 30 * 1000;
module.exports = DocArchive = module.exports = (DocArchive = {
archiveAllDocs: (project_id, callback = (err, docs) ->) -> archiveAllDocs(project_id, callback) {
MongoManager.getProjectsDocs project_id, {include_deleted: true}, {lines: true, ranges: true, rev: true, inS3: true}, (err, docs) -> if (callback == null) { callback = function(err, docs) {}; }
if err? return MongoManager.getProjectsDocs(project_id, {include_deleted: true}, {lines: true, ranges: true, rev: true, inS3: true}, function(err, docs) {
return callback(err) if (err != null) {
else if !docs? return callback(err);
return callback new Errors.NotFoundError("No docs for project #{project_id}") } else if ((docs == null)) {
docs = _.filter docs, (doc)-> doc.inS3 != true return callback(new Errors.NotFoundError(`No docs for project ${project_id}`));
jobs = _.map docs, (doc) -> }
(cb)-> docs = _.filter(docs, doc => doc.inS3 !== true);
DocArchive.archiveDoc project_id, doc, cb const jobs = _.map(docs, doc => cb => DocArchive.archiveDoc(project_id, doc, cb));
async.parallelLimit jobs, 5, callback return async.parallelLimit(jobs, 5, callback);
});
},
archiveDoc: (project_id, doc, callback)-> archiveDoc(project_id, doc, callback){
logger.log project_id: project_id, doc_id: doc._id, "sending doc to s3" let options;
try logger.log({project_id, doc_id: doc._id}, "sending doc to s3");
options = DocArchive.buildS3Options(project_id+"/"+doc._id) try {
catch e options = DocArchive.buildS3Options(project_id+"/"+doc._id);
return callback e } catch (e) {
DocArchive._mongoDocToS3Doc doc, (error, json_doc) -> return callback(e);
return callback(error) if error?
options.body = json_doc
options.headers =
'Content-Type': "application/json"
request.put options, (err, res) ->
if err? || res.statusCode != 200
logger.err err:err, res:res, project_id:project_id, doc_id: doc._id, statusCode: res?.statusCode, "something went wrong archiving doc in aws"
return callback new Error("Error in S3 request")
md5lines = crypto.createHash("md5").update(json_doc, "utf8").digest("hex")
md5response = res.headers.etag.toString().replace(/\"/g, '')
if md5lines != md5response
logger.err responseMD5:md5response, linesMD5:md5lines, project_id:project_id, doc_id: doc?._id, "err in response md5 from s3"
return callback new Error("Error in S3 md5 response")
MongoManager.markDocAsArchived doc._id, doc.rev, (err) ->
return callback(err) if err?
callback()
unArchiveAllDocs: (project_id, callback = (err) ->) ->
MongoManager.getArchivedProjectDocs project_id, (err, docs) ->
if err?
logger.err err:err, project_id:project_id, "error unarchiving all docs"
return callback(err)
else if !docs?
return callback new Errors.NotFoundError("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)->
if !doc.inS3?
return cb()
else
DocArchive.unarchiveDoc project_id, doc._id, cb
async.parallelLimit jobs, 5, callback
unarchiveDoc: (project_id, doc_id, callback)->
logger.log project_id: project_id, doc_id: doc_id, "getting doc from s3"
try
options = DocArchive.buildS3Options(project_id+"/"+doc_id)
catch e
return callback e
options.json = true
request.get options, (err, res, doc)->
if err? || res.statusCode != 200
logger.err err:err, res:res, project_id:project_id, doc_id:doc_id, "something went wrong unarchiving doc from aws"
return callback new Errors.NotFoundError("Error in S3 request")
DocArchive._s3DocToMongoDoc doc, (error, mongo_doc) ->
return callback(error) if error?
MongoManager.upsertIntoDocCollection project_id, doc_id.toString(), mongo_doc, (err) ->
return callback(err) if err?
logger.log project_id: project_id, doc_id: doc_id, "deleting doc from s3"
DocArchive._deleteDocFromS3 project_id, doc_id, callback
destroyAllDocs: (project_id, callback = (err) ->) ->
MongoManager.getProjectsDocs project_id, {include_deleted: true}, {_id: 1}, (err, docs) ->
if err?
logger.err err:err, project_id:project_id, "error getting project's docs"
return callback(err)
else if !docs?
return callback()
jobs = _.map docs, (doc) ->
(cb)->
DocArchive.destroyDoc(project_id, doc._id, cb)
async.parallelLimit jobs, 5, callback
destroyDoc: (project_id, doc_id, callback)->
logger.log project_id: project_id, doc_id: doc_id, "removing doc from mongo and s3"
MongoManager.findDoc project_id, doc_id, {inS3: 1}, (error, doc) ->
return callback error if error?
return callback new Errors.NotFoundError("Doc not found in Mongo") unless doc?
if doc.inS3 == true
DocArchive._deleteDocFromS3 project_id, doc_id, (err) ->
return err if err?
MongoManager.destroyDoc doc_id, callback
else
MongoManager.destroyDoc doc_id, callback
_deleteDocFromS3: (project_id, doc_id, callback) ->
try
options = DocArchive.buildS3Options(project_id+"/"+doc_id)
catch e
return callback e
options.json = true
request.del options, (err, res, body)->
if err? || res.statusCode != 204
logger.err err:err, res:res, project_id:project_id, doc_id:doc_id, "something went wrong deleting doc from aws"
return callback new Error("Error in S3 request")
callback()
_s3DocToMongoDoc: (doc, callback = (error, mongo_doc) ->) ->
mongo_doc = {}
if doc.schema_v == 1 and doc.lines?
mongo_doc.lines = doc.lines
if doc.ranges?
mongo_doc.ranges = RangeManager.jsonRangesToMongo(doc.ranges)
else if doc instanceof Array
mongo_doc.lines = doc
else
return callback(new Error("I don't understand the doc format in s3"))
return callback null, mongo_doc
_mongoDocToS3Doc: (doc, callback = (error, s3_doc) ->) ->
if !doc.lines?
return callback(new Error("doc has no lines"))
json = JSON.stringify({
lines: doc.lines
ranges: doc.ranges
schema_v: 1
})
if json.indexOf("\u0000") != -1
error = new Error("null bytes detected")
logger.err {err: error, doc, json}, error.message
return callback(error)
return callback null, json
buildS3Options: (key)->
if !settings.docstore.s3?
throw new Error("S3 settings are not configured")
return {
aws:
key: settings.docstore.s3.key
secret: settings.docstore.s3.secret
bucket: settings.docstore.s3.bucket
timeout: thirtySeconds
uri:"https://#{settings.docstore.s3.bucket}.s3.amazonaws.com/#{key}"
} }
return DocArchive._mongoDocToS3Doc(doc, function(error, json_doc) {
if (error != null) { return callback(error); }
options.body = json_doc;
options.headers =
{'Content-Type': "application/json"};
return request.put(options, function(err, res) {
if ((err != null) || (res.statusCode !== 200)) {
logger.err({err, res, project_id, doc_id: doc._id, statusCode: (res != null ? res.statusCode : undefined)}, "something went wrong archiving doc in aws");
return callback(new Error("Error in S3 request"));
}
const md5lines = crypto.createHash("md5").update(json_doc, "utf8").digest("hex");
const md5response = res.headers.etag.toString().replace(/\"/g, '');
if (md5lines !== md5response) {
logger.err({responseMD5:md5response, linesMD5:md5lines, project_id, doc_id: (doc != null ? doc._id : undefined)}, "err in response md5 from s3");
return callback(new Error("Error in S3 md5 response"));
}
return MongoManager.markDocAsArchived(doc._id, doc.rev, function(err) {
if (err != null) { return callback(err); }
return callback();
});
});
});
},
unArchiveAllDocs(project_id, callback) {
if (callback == null) { callback = function(err) {}; }
return MongoManager.getArchivedProjectDocs(project_id, function(err, docs) {
if (err != null) {
logger.err({err, project_id}, "error unarchiving all docs");
return callback(err);
} else if ((docs == null)) {
return callback(new Errors.NotFoundError(`No docs for project ${project_id}`));
}
const jobs = _.map(docs, doc => (function(cb) {
if ((doc.inS3 == null)) {
return cb();
} else {
return DocArchive.unarchiveDoc(project_id, doc._id, cb);
}
}));
return async.parallelLimit(jobs, 5, callback);
});
},
unarchiveDoc(project_id, doc_id, callback){
let options;
logger.log({project_id, doc_id}, "getting doc from s3");
try {
options = DocArchive.buildS3Options(project_id+"/"+doc_id);
} catch (e) {
return callback(e);
}
options.json = true;
return request.get(options, function(err, res, doc){
if ((err != null) || (res.statusCode !== 200)) {
logger.err({err, res, project_id, doc_id}, "something went wrong unarchiving doc from aws");
return callback(new Errors.NotFoundError("Error in S3 request"));
}
return DocArchive._s3DocToMongoDoc(doc, function(error, mongo_doc) {
if (error != null) { return callback(error); }
return MongoManager.upsertIntoDocCollection(project_id, doc_id.toString(), mongo_doc, function(err) {
if (err != null) { return callback(err); }
logger.log({project_id, doc_id}, "deleting doc from s3");
return DocArchive._deleteDocFromS3(project_id, doc_id, callback);
});
});
});
},
destroyAllDocs(project_id, callback) {
if (callback == null) { callback = function(err) {}; }
return MongoManager.getProjectsDocs(project_id, {include_deleted: true}, {_id: 1}, function(err, docs) {
if (err != null) {
logger.err({err, project_id}, "error getting project's docs");
return callback(err);
} else if ((docs == null)) {
return callback();
}
const jobs = _.map(docs, doc => cb => DocArchive.destroyDoc(project_id, doc._id, cb));
return async.parallelLimit(jobs, 5, callback);
});
},
destroyDoc(project_id, doc_id, callback){
logger.log({project_id, doc_id}, "removing doc from mongo and s3");
return MongoManager.findDoc(project_id, doc_id, {inS3: 1}, function(error, doc) {
if (error != null) { return callback(error); }
if (doc == null) { return callback(new Errors.NotFoundError("Doc not found in Mongo")); }
if (doc.inS3 === true) {
return DocArchive._deleteDocFromS3(project_id, doc_id, function(err) {
if (err != null) { return err; }
return MongoManager.destroyDoc(doc_id, callback);
});
} else {
return MongoManager.destroyDoc(doc_id, callback);
}
});
},
_deleteDocFromS3(project_id, doc_id, callback) {
let options;
try {
options = DocArchive.buildS3Options(project_id+"/"+doc_id);
} catch (e) {
return callback(e);
}
options.json = true;
return request.del(options, function(err, res, body){
if ((err != null) || (res.statusCode !== 204)) {
logger.err({err, res, project_id, doc_id}, "something went wrong deleting doc from aws");
return callback(new Error("Error in S3 request"));
}
return callback();
});
},
_s3DocToMongoDoc(doc, callback) {
if (callback == null) { callback = function(error, mongo_doc) {}; }
const mongo_doc = {};
if ((doc.schema_v === 1) && (doc.lines != null)) {
mongo_doc.lines = doc.lines;
if (doc.ranges != null) {
mongo_doc.ranges = RangeManager.jsonRangesToMongo(doc.ranges);
}
} else if (doc instanceof Array) {
mongo_doc.lines = doc;
} else {
return callback(new Error("I don't understand the doc format in s3"));
}
return callback(null, mongo_doc);
},
_mongoDocToS3Doc(doc, callback) {
if (callback == null) { callback = function(error, s3_doc) {}; }
if ((doc.lines == null)) {
return callback(new Error("doc has no lines"));
}
const json = JSON.stringify({
lines: doc.lines,
ranges: doc.ranges,
schema_v: 1
});
if (json.indexOf("\u0000") !== -1) {
const error = new Error("null bytes detected");
logger.err({err: error, doc, json}, error.message);
return callback(error);
}
return callback(null, json);
},
buildS3Options(key){
if ((settings.docstore.s3 == null)) {
throw new Error("S3 settings are not configured");
}
return {
aws: {
key: settings.docstore.s3.key,
secret: settings.docstore.s3.secret,
bucket: settings.docstore.s3.bucket
},
timeout: thirtySeconds,
uri:`https://${settings.docstore.s3.bucket}.s3.amazonaws.com/${key}`
};
}
});

View file

@ -1,130 +1,184 @@
MongoManager = require "./MongoManager" /*
Errors = require "./Errors" * decaffeinate suggestions:
logger = require "logger-sharelatex" * DS102: Remove unnecessary code created because of implicit returns
_ = require "underscore" * DS207: Consider shorter variations of null checks
DocArchive = require "./DocArchiveManager" * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
RangeManager = require "./RangeManager" */
let DocManager;
const MongoManager = require("./MongoManager");
const Errors = require("./Errors");
const logger = require("logger-sharelatex");
const _ = require("underscore");
const DocArchive = require("./DocArchiveManager");
const RangeManager = require("./RangeManager");
module.exports = DocManager = module.exports = (DocManager = {
# TODO: For historical reasons, the doc version is currently stored in the docOps // TODO: For historical reasons, the doc version is currently stored in the docOps
# collection (which is all that this collection contains). In future, we should // collection (which is all that this collection contains). In future, we should
# migrate this version property to be part of the docs collection, to guarantee // migrate this version property to be part of the docs collection, to guarantee
# consitency between lines and version when writing/reading, and for a simpler schema. // consitency between lines and version when writing/reading, and for a simpler schema.
_getDoc: (project_id, doc_id, filter = {}, callback = (error, doc) ->) -> _getDoc(project_id, doc_id, filter, callback) {
if filter.inS3 != true if (filter == null) { filter = {}; }
return callback("must include inS3 when getting doc") if (callback == null) { callback = function(error, doc) {}; }
if (filter.inS3 !== true) {
return callback("must include inS3 when getting doc");
}
MongoManager.findDoc project_id, doc_id, filter, (err, doc)-> return MongoManager.findDoc(project_id, doc_id, filter, function(err, doc){
if err? if (err != null) {
return callback(err) return callback(err);
else if !doc? } else if ((doc == null)) {
return callback new Errors.NotFoundError("No such doc: #{doc_id} in project #{project_id}") return callback(new Errors.NotFoundError(`No such doc: ${doc_id} in project ${project_id}`));
else if doc?.inS3 } else if ((doc != null ? doc.inS3 : undefined)) {
DocArchive.unarchiveDoc project_id, doc_id, (err)-> return DocArchive.unarchiveDoc(project_id, doc_id, function(err){
if err? if (err != null) {
logger.err err:err, project_id:project_id, doc_id:doc_id, "error unarchiving doc" logger.err({err, project_id, doc_id}, "error unarchiving doc");
return callback(err) return callback(err);
DocManager._getDoc project_id, doc_id, filter, callback }
else return DocManager._getDoc(project_id, doc_id, filter, callback);
if filter.version });
MongoManager.getDocVersion doc_id, (error, version) -> } else {
return callback(error) if error? if (filter.version) {
doc.version = version return MongoManager.getDocVersion(doc_id, function(error, version) {
callback err, doc if (error != null) { return callback(error); }
else doc.version = version;
callback err, doc return callback(err, doc);
});
} else {
return callback(err, doc);
}
}
});
},
checkDocExists: (project_id, doc_id, callback = (err, exists)->)-> checkDocExists(project_id, doc_id, callback){
DocManager._getDoc project_id, doc_id, {_id:1, inS3:true}, (err, doc)-> if (callback == null) { callback = function(err, exists){}; }
if err? return DocManager._getDoc(project_id, doc_id, {_id:1, inS3:true}, function(err, doc){
return callback(err) if (err != null) {
callback(err, doc?) return callback(err);
}
return callback(err, (doc != null));
});
},
getFullDoc: (project_id, doc_id, callback = (err, doc)->)-> getFullDoc(project_id, doc_id, callback){
DocManager._getDoc project_id, doc_id, {lines: true, rev: true, deleted: true, version: true, ranges: true, inS3:true}, (err, doc)-> if (callback == null) { callback = function(err, doc){}; }
if err? return DocManager._getDoc(project_id, doc_id, {lines: true, rev: true, deleted: true, version: true, ranges: true, inS3:true}, function(err, doc){
return callback(err) if (err != null) {
callback(err, doc) return callback(err);
}
return callback(err, doc);
});
},
getDocLines: (project_id, doc_id, callback = (err, doc)->)-> getDocLines(project_id, doc_id, callback){
DocManager._getDoc project_id, doc_id, {lines:true, inS3:true}, (err, doc)-> if (callback == null) { callback = function(err, doc){}; }
if err? return DocManager._getDoc(project_id, doc_id, {lines:true, inS3:true}, function(err, doc){
return callback(err) if (err != null) {
callback(err, doc) return callback(err);
}
return callback(err, doc);
});
},
getAllNonDeletedDocs: (project_id, filter, callback = (error, docs) ->) -> getAllNonDeletedDocs(project_id, filter, callback) {
DocArchive.unArchiveAllDocs project_id, (error) -> if (callback == null) { callback = function(error, docs) {}; }
if error? return DocArchive.unArchiveAllDocs(project_id, function(error) {
return callback(error) if (error != null) {
MongoManager.getProjectsDocs project_id, {include_deleted: false}, filter, (error, docs) -> return callback(error);
if err? }
return callback(error) return MongoManager.getProjectsDocs(project_id, {include_deleted: false}, filter, function(error, docs) {
else if !docs? if (typeof err !== 'undefined' && err !== null) {
return callback new Errors.NotFoundError("No docs for project #{project_id}") return callback(error);
else } else if ((docs == null)) {
return callback(null, docs) return callback(new Errors.NotFoundError(`No docs for project ${project_id}`));
} else {
return callback(null, docs);
}
});
});
},
updateDoc: (project_id, doc_id, lines, version, ranges, callback = (error, modified, rev) ->) -> updateDoc(project_id, doc_id, lines, version, ranges, callback) {
if !lines? or !version? or !ranges? if (callback == null) { callback = function(error, modified, rev) {}; }
return callback(new Error("no lines, version or ranges provided")) if ((lines == null) || (version == null) || (ranges == null)) {
return callback(new Error("no lines, version or ranges provided"));
}
DocManager._getDoc project_id, doc_id, {version: true, rev: true, lines: true, version: true, ranges: true, inS3:true}, (err, doc)-> return DocManager._getDoc(project_id, doc_id, {version: true, rev: true, lines: true, version: true, ranges: true, inS3:true}, function(err, doc){
if err? and !(err instanceof Errors.NotFoundError) let updateLines, updateRanges, updateVersion;
logger.err project_id: project_id, doc_id: doc_id, err:err, "error getting document for update" if ((err != null) && !(err instanceof Errors.NotFoundError)) {
return callback(err) logger.err({project_id, doc_id, err}, "error getting document for update");
return callback(err);
}
ranges = RangeManager.jsonRangesToMongo(ranges) ranges = RangeManager.jsonRangesToMongo(ranges);
if !doc? if ((doc == null)) {
# If the document doesn't exist, we'll make sure to create/update all parts of it. // If the document doesn't exist, we'll make sure to create/update all parts of it.
updateLines = true updateLines = true;
updateVersion = true updateVersion = true;
updateRanges = true updateRanges = true;
else } else {
updateLines = not _.isEqual(doc.lines, lines) updateLines = !_.isEqual(doc.lines, lines);
updateVersion = (doc.version != version) updateVersion = (doc.version !== version);
updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges) updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges);
}
modified = false let modified = false;
rev = doc?.rev || 0 let rev = (doc != null ? doc.rev : undefined) || 0;
updateLinesAndRangesIfNeeded = (cb) -> const updateLinesAndRangesIfNeeded = function(cb) {
if updateLines or updateRanges if (updateLines || updateRanges) {
update = {} const update = {};
if updateLines if (updateLines) {
update.lines = lines update.lines = lines;
if updateRanges }
update.ranges = ranges if (updateRanges) {
logger.log { project_id, doc_id }, "updating doc lines and ranges" update.ranges = ranges;
}
logger.log({ project_id, doc_id }, "updating doc lines and ranges");
modified = true modified = true;
rev += 1 # rev will be incremented in mongo by MongoManager.upsertIntoDocCollection rev += 1; // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection
MongoManager.upsertIntoDocCollection project_id, doc_id, update, cb return MongoManager.upsertIntoDocCollection(project_id, doc_id, update, cb);
else } else {
logger.log { project_id, doc_id, }, "doc lines have not changed - not updating" logger.log({ project_id, doc_id, }, "doc lines have not changed - not updating");
cb() return cb();
}
};
updateVersionIfNeeded = (cb) -> const updateVersionIfNeeded = function(cb) {
if updateVersion if (updateVersion) {
logger.log { project_id, doc_id, oldVersion: doc?.version, newVersion: version }, "updating doc version" logger.log({ project_id, doc_id, oldVersion: (doc != null ? doc.version : undefined), newVersion: version }, "updating doc version");
modified = true modified = true;
MongoManager.setDocVersion doc_id, version, cb return MongoManager.setDocVersion(doc_id, version, cb);
else } else {
logger.log { project_id, doc_id, version }, "doc version has not changed - not updating" logger.log({ project_id, doc_id, version }, "doc version has not changed - not updating");
cb() return cb();
}
};
updateLinesAndRangesIfNeeded (error) -> return updateLinesAndRangesIfNeeded(function(error) {
return callback(error) if error? if (error != null) { return callback(error); }
updateVersionIfNeeded (error) -> return updateVersionIfNeeded(function(error) {
return callback(error) if error? if (error != null) { return callback(error); }
callback null, modified, rev return callback(null, modified, rev);
});
});
});
},
deleteDoc: (project_id, doc_id, callback = (error) ->) -> deleteDoc(project_id, doc_id, callback) {
DocManager.checkDocExists project_id, doc_id, (error, exists) -> if (callback == null) { callback = function(error) {}; }
return callback(error) if error? return DocManager.checkDocExists(project_id, doc_id, function(error, exists) {
return callback new Errors.NotFoundError("No such project/doc to delete: #{project_id}/#{doc_id}") if !exists if (error != null) { return callback(error); }
MongoManager.markDocAsDeleted project_id, doc_id, callback if (!exists) { return callback(new Errors.NotFoundError(`No such project/doc to delete: ${project_id}/${doc_id}`)); }
return MongoManager.markDocAsDeleted(project_id, doc_id, callback);
});
}
});

View file

@ -1,10 +1,12 @@
NotFoundError = (message) -> let Errors;
error = new Error(message) var NotFoundError = function(message) {
error.name = "NotFoundError" const error = new Error(message);
error.__proto__ = NotFoundError.prototype error.name = "NotFoundError";
return error error.__proto__ = NotFoundError.prototype;
NotFoundError.prototype.__proto__ = Error.prototype return error;
};
NotFoundError.prototype.__proto__ = Error.prototype;
module.exports = Errors = module.exports = (Errors =
NotFoundError: NotFoundError {NotFoundError});

View file

@ -1,44 +1,59 @@
{db, ObjectId} = require "./mongojs" /*
request = require("request") * decaffeinate suggestions:
async = require("async") * DS102: Remove unnecessary code created because of implicit returns
_ = require("underscore") * DS207: Consider shorter variations of null checks
crypto = require("crypto") * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
settings = require("settings-sharelatex") */
port = settings.internal.docstore.port const {db, ObjectId} = require("./mongojs");
logger = require "logger-sharelatex" const request = require("request");
const async = require("async");
const _ = require("underscore");
const crypto = require("crypto");
const settings = require("settings-sharelatex");
const {
port
} = settings.internal.docstore;
const logger = require("logger-sharelatex");
module.exports = module.exports = {
check : (callback)-> check(callback){
doc_id = ObjectId() const doc_id = ObjectId();
project_id = ObjectId(settings.docstore.healthCheck.project_id) const project_id = ObjectId(settings.docstore.healthCheck.project_id);
url = "http://localhost:#{port}/project/#{project_id}/doc/#{doc_id}" const url = `http://localhost:${port}/project/${project_id}/doc/${doc_id}`;
lines = ["smoke test - delete me", "#{crypto.randomBytes(32).toString("hex")}"] const lines = ["smoke test - delete me", `${crypto.randomBytes(32).toString("hex")}`];
getOpts = -> {url:url, timeout:3000} const getOpts = () => ({
logger.log lines:lines, url:url, doc_id:doc_id, project_id:project_id, "running health check" url,
jobs = [ timeout:3000
(cb)-> });
opts = getOpts() logger.log({lines, url, doc_id, project_id}, "running health check");
opts.json = {lines: lines, version: 42, ranges: {}} const jobs = [
request.post(opts, cb) function(cb){
(cb)-> const opts = getOpts();
opts = getOpts() opts.json = {lines, version: 42, ranges: {}};
opts.json = true return request.post(opts, cb);
request.get opts, (err, res, body)-> },
if err? function(cb){
logger.err err:err, "docstore returned a error in health check get" const opts = getOpts();
cb(err) opts.json = true;
else if !res? return request.get(opts, function(err, res, body){
cb("no response from docstore with get check") if (err != null) {
else if res?.statusCode != 200 logger.err({err}, "docstore returned a error in health check get");
cb("status code not 200, its #{res.statusCode}") return cb(err);
else if _.isEqual(body?.lines, lines) and body?._id == doc_id.toString() } else if ((res == null)) {
cb() return cb("no response from docstore with get check");
else } else if ((res != null ? res.statusCode : undefined) !== 200) {
cb("health check lines not equal #{body.lines} != #{lines}") return cb(`status code not 200, its ${res.statusCode}`);
(cb)-> } else if (_.isEqual(body != null ? body.lines : undefined, lines) && ((body != null ? body._id : undefined) === doc_id.toString())) {
db.docs.remove {_id: doc_id, project_id: project_id}, cb return cb();
(cb)-> } else {
db.docOps.remove {doc_id: doc_id}, cb return cb(`health check lines not equal ${body.lines} != ${lines}`);
] }
async.series jobs, callback });
},
cb => db.docs.remove({_id: doc_id, project_id}, cb),
cb => db.docOps.remove({doc_id}, cb)
];
return async.series(jobs, callback);
}
};

View file

@ -1,144 +1,224 @@
DocManager = require "./DocManager" /*
logger = require "logger-sharelatex" * decaffeinate suggestions:
DocArchive = require "./DocArchiveManager" * DS101: Remove unnecessary use of Array.from
HealthChecker = require "./HealthChecker" * DS102: Remove unnecessary code created because of implicit returns
Settings = require "settings-sharelatex" * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let HttpController;
const DocManager = require("./DocManager");
const logger = require("logger-sharelatex");
const DocArchive = require("./DocArchiveManager");
const HealthChecker = require("./HealthChecker");
const Settings = require("settings-sharelatex");
module.exports = HttpController = module.exports = (HttpController = {
getDoc: (req, res, next = (error) ->) -> getDoc(req, res, next) {
project_id = req.params.project_id if (next == null) { next = function(error) {}; }
doc_id = req.params.doc_id const {
include_deleted = req.query?.include_deleted == "true" project_id
logger.log project_id: project_id, doc_id: doc_id, "getting doc" } = req.params;
DocManager.getFullDoc project_id, doc_id, (error, doc) -> const {
return next(error) if error? doc_id
logger.log {doc_id, project_id}, "got doc" } = req.params;
if !doc? const include_deleted = (req.query != null ? req.query.include_deleted : undefined) === "true";
res.send 404 logger.log({project_id, doc_id}, "getting doc");
else if doc.deleted && !include_deleted return DocManager.getFullDoc(project_id, doc_id, function(error, doc) {
res.send 404 if (error != null) { return next(error); }
else logger.log({doc_id, project_id}, "got doc");
res.json HttpController._buildDocView(doc) if ((doc == null)) {
return res.send(404);
getRawDoc: (req, res, next = (error)->)-> } else if (doc.deleted && !include_deleted) {
project_id = req.params.project_id return res.send(404);
doc_id = req.params.doc_id } else {
logger.log project_id: project_id, doc_id: doc_id, "getting raw doc" return res.json(HttpController._buildDocView(doc));
DocManager.getDocLines project_id, doc_id, (error, doc) ->
return next(error) if error?
if !doc?
res.send 404
else
res.setHeader('content-type', 'text/plain')
res.send HttpController._buildRawDocView(doc)
getAllDocs: (req, res, next = (error) ->) ->
project_id = req.params.project_id
logger.log project_id: project_id, "getting all docs"
DocManager.getAllNonDeletedDocs project_id, {lines: true, rev: true}, (error, docs = []) ->
return next(error) if error?
res.json HttpController._buildDocsArrayView(project_id, docs)
getAllRanges: (req, res, next = (error) ->) ->
project_id = req.params.project_id
logger.log {project_id}, "getting all ranges"
DocManager.getAllNonDeletedDocs project_id, {ranges: true}, (error, docs = []) ->
return next(error) if error?
res.json HttpController._buildDocsArrayView(project_id, docs)
updateDoc: (req, res, next = (error) ->) ->
project_id = req.params.project_id
doc_id = req.params.doc_id
lines = req.body?.lines
version = req.body?.version
ranges = req.body?.ranges
if !lines? or lines not instanceof Array
logger.error project_id: project_id, doc_id: doc_id, "no doc lines provided"
res.send 400 # Bad Request
return
if !version? or typeof version is not "number"
logger.error project_id: project_id, doc_id: doc_id, "no doc version provided"
res.send 400 # Bad Request
return
if !ranges?
logger.error project_id: project_id, doc_id: doc_id, "no doc ranges provided"
res.send 400 # Bad Request
return
bodyLength = lines.reduce(
(len, line) => line.length + len
0
)
if bodyLength > Settings.max_doc_length
logger.error project_id: project_id, doc_id: doc_id, bodyLength: bodyLength, "document body too large"
res.status(413).send("document body too large")
return
logger.log project_id: project_id, doc_id: doc_id, "got http request to update doc"
DocManager.updateDoc project_id, doc_id, lines, version, ranges, (error, modified, rev) ->
return next(error) if error?
res.json {
modified: modified
rev: rev
} }
});
},
deleteDoc: (req, res, next = (error) ->) -> getRawDoc(req, res, next){
project_id = req.params.project_id if (next == null) { next = function(error){}; }
doc_id = req.params.doc_id const {
logger.log project_id: project_id, doc_id: doc_id, "deleting doc" project_id
DocManager.deleteDoc project_id, doc_id, (error) -> } = req.params;
return next(error) if error? const {
res.send 204 doc_id
} = req.params;
logger.log({project_id, doc_id}, "getting raw doc");
return DocManager.getDocLines(project_id, doc_id, function(error, doc) {
if (error != null) { return next(error); }
if ((doc == null)) {
return res.send(404);
} else {
res.setHeader('content-type', 'text/plain');
return res.send(HttpController._buildRawDocView(doc));
}
});
},
_buildDocView: (doc) -> getAllDocs(req, res, next) {
doc_view = { _id: doc._id?.toString() } if (next == null) { next = function(error) {}; }
for attribute in ["lines", "rev", "version", "ranges", "deleted"] const {
if doc[attribute]? project_id
doc_view[attribute] = doc[attribute] } = req.params;
return doc_view logger.log({project_id}, "getting all docs");
return DocManager.getAllNonDeletedDocs(project_id, {lines: true, rev: true}, function(error, docs) {
_buildRawDocView: (doc)-> if (docs == null) { docs = []; }
return (doc?.lines or []).join("\n") if (error != null) { return next(error); }
return res.json(HttpController._buildDocsArrayView(project_id, docs));
});
},
_buildDocsArrayView: (project_id, docs) -> getAllRanges(req, res, next) {
docViews = [] if (next == null) { next = function(error) {}; }
for doc in docs const {
if doc? # There can end up being null docs for some reason :( (probably a race condition) project_id
docViews.push HttpController._buildDocView(doc) } = req.params;
else logger.log({project_id}, "getting all ranges");
logger.error err: new Error("null doc"), project_id: project_id, "encountered null doc" return DocManager.getAllNonDeletedDocs(project_id, {ranges: true}, function(error, docs) {
return docViews if (docs == null) { docs = []; }
if (error != null) { return next(error); }
return res.json(HttpController._buildDocsArrayView(project_id, docs));
});
},
archiveAllDocs: (req, res, next = (error) ->) -> updateDoc(req, res, next) {
project_id = req.params.project_id if (next == null) { next = function(error) {}; }
logger.log project_id: project_id, "archiving all docs" const {
DocArchive.archiveAllDocs project_id, (error) -> project_id
return next(error) if error? } = req.params;
res.send 204 const {
doc_id
} = req.params;
const lines = req.body != null ? req.body.lines : undefined;
const version = req.body != null ? req.body.version : undefined;
const ranges = req.body != null ? req.body.ranges : undefined;
unArchiveAllDocs: (req, res, next = (error) ->) -> if ((lines == null) || !(lines instanceof Array)) {
project_id = req.params.project_id logger.error({project_id, doc_id}, "no doc lines provided");
logger.log project_id: project_id, "unarchiving all docs" res.send(400); // Bad Request
DocArchive.unArchiveAllDocs project_id, (error) -> return;
return next(error) if error? }
res.send 200
if ((version == null) || (typeof version === !"number")) {
logger.error({project_id, doc_id}, "no doc version provided");
res.send(400); // Bad Request
return;
}
if ((ranges == null)) {
logger.error({project_id, doc_id}, "no doc ranges provided");
res.send(400); // Bad Request
return;
}
destroyAllDocs: (req, res, next = (error) ->) -> const bodyLength = lines.reduce(
project_id = req.params.project_id (len, line) => line.length + len,
logger.log project_id: project_id, "destroying all docs" 0
DocArchive.destroyAllDocs project_id, (error) -> );
return next(error) if error? if (bodyLength > Settings.max_doc_length) {
res.send 204 logger.error({project_id, doc_id, bodyLength}, "document body too large");
res.status(413).send("document body too large");
return;
}
healthCheck: (req, res)-> logger.log({project_id, doc_id}, "got http request to update doc");
HealthChecker.check (err)-> return DocManager.updateDoc(project_id, doc_id, lines, version, ranges, function(error, modified, rev) {
if err? if (error != null) { return next(error); }
logger.err err:err, "error performing health check" return res.json({
res.send 500 modified,
else rev
res.send 200 });
});
},
deleteDoc(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
const {
doc_id
} = req.params;
logger.log({project_id, doc_id}, "deleting doc");
return DocManager.deleteDoc(project_id, doc_id, function(error) {
if (error != null) { return next(error); }
return res.send(204);
});
},
_buildDocView(doc) {
const doc_view = { _id: (doc._id != null ? doc._id.toString() : undefined) };
for (let attribute of ["lines", "rev", "version", "ranges", "deleted"]) {
if (doc[attribute] != null) {
doc_view[attribute] = doc[attribute];
}
}
return doc_view;
},
_buildRawDocView(doc){
return ((doc != null ? doc.lines : undefined) || []).join("\n");
},
_buildDocsArrayView(project_id, docs) {
const docViews = [];
for (let doc of Array.from(docs)) {
if (doc != null) { // There can end up being null docs for some reason :( (probably a race condition)
docViews.push(HttpController._buildDocView(doc));
} else {
logger.error({err: new Error("null doc"), project_id}, "encountered null doc");
}
}
return docViews;
},
archiveAllDocs(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "archiving all docs");
return DocArchive.archiveAllDocs(project_id, function(error) {
if (error != null) { return next(error); }
return res.send(204);
});
},
unArchiveAllDocs(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "unarchiving all docs");
return DocArchive.unArchiveAllDocs(project_id, function(error) {
if (error != null) { return next(error); }
return res.send(200);
});
},
destroyAllDocs(req, res, next) {
if (next == null) { next = function(error) {}; }
const {
project_id
} = req.params;
logger.log({project_id}, "destroying all docs");
return DocArchive.destroyAllDocs(project_id, function(error) {
if (error != null) { return next(error); }
return res.send(204);
});
},
healthCheck(req, res){
return HealthChecker.check(function(err){
if (err != null) {
logger.err({err}, "error performing health check");
return res.send(500);
} else {
return res.send(200);
}
});
}
});

View file

@ -1,85 +1,118 @@
{db, ObjectId} = require "./mongojs" /*
logger = require 'logger-sharelatex' * decaffeinate suggestions:
metrics = require 'metrics-sharelatex' * DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let MongoManager;
const {db, ObjectId} = require("./mongojs");
const logger = require('logger-sharelatex');
const metrics = require('metrics-sharelatex');
module.exports = MongoManager = module.exports = (MongoManager = {
findDoc: (project_id, doc_id, filter, callback = (error, doc) ->) -> findDoc(project_id, doc_id, filter, callback) {
db.docs.find {_id: ObjectId(doc_id.toString()), project_id: ObjectId(project_id.toString())}, filter, (error, docs = []) -> if (callback == null) { callback = function(error, doc) {}; }
callback error, docs[0] return db.docs.find({_id: ObjectId(doc_id.toString()), project_id: ObjectId(project_id.toString())}, filter, function(error, docs) {
if (docs == null) { docs = []; }
return callback(error, docs[0]);
});
},
getProjectsDocs: (project_id, options = {include_deleted: true}, filter, callback)-> getProjectsDocs(project_id, options, filter, callback){
query = {project_id: ObjectId(project_id.toString())} if (options == null) { options = {include_deleted: true}; }
if !options.include_deleted const query = {project_id: ObjectId(project_id.toString())};
query.deleted = { $ne: true } if (!options.include_deleted) {
db.docs.find query, filter, callback query.deleted = { $ne: true };
}
return db.docs.find(query, filter, callback);
},
getArchivedProjectDocs: (project_id, callback)-> getArchivedProjectDocs(project_id, callback){
query = const query = {
project_id: ObjectId(project_id.toString()) project_id: ObjectId(project_id.toString()),
inS3: true inS3: true
db.docs.find query, {}, callback };
return db.docs.find(query, {}, callback);
},
upsertIntoDocCollection: (project_id, doc_id, updates, callback)-> upsertIntoDocCollection(project_id, doc_id, updates, callback){
update = const update = {
$set: updates $set: updates,
$inc: $inc: {
rev: 1 rev: 1
$unset: },
$unset: {
inS3: true inS3: true
update.$set["project_id"] = ObjectId(project_id) }
db.docs.update _id: ObjectId(doc_id), update, {upsert: true}, callback };
update.$set["project_id"] = ObjectId(project_id);
return db.docs.update({_id: ObjectId(doc_id)}, update, {upsert: true}, callback);
},
markDocAsDeleted: (project_id, doc_id, callback)-> markDocAsDeleted(project_id, doc_id, callback){
db.docs.update { return db.docs.update({
_id: ObjectId(doc_id), _id: ObjectId(doc_id),
project_id: ObjectId(project_id) project_id: ObjectId(project_id)
}, { }, {
$set: { deleted: true } $set: { deleted: true }
}, callback }, callback);
},
markDocAsArchived: (doc_id, rev, callback)-> markDocAsArchived(doc_id, rev, callback){
update = const update = {
$set: {} $set: {},
$unset: {} $unset: {}
update.$set["inS3"] = true };
update.$unset["lines"] = true update.$set["inS3"] = true;
update.$unset["ranges"] = true update.$unset["lines"] = true;
query = update.$unset["ranges"] = true;
_id: doc_id const query = {
rev: rev _id: doc_id,
db.docs.update query, update, (err)-> rev
callback(err) };
return db.docs.update(query, update, err => callback(err));
},
getDocVersion: (doc_id, callback = (error, version) ->) -> getDocVersion(doc_id, callback) {
db.docOps.find { if (callback == null) { callback = function(error, version) {}; }
return db.docOps.find({
doc_id: ObjectId(doc_id) doc_id: ObjectId(doc_id)
}, { }, {
version: 1 version: 1
}, (error, docs) -> }, function(error, docs) {
return callback(error) if error? if (error != null) { return callback(error); }
if docs.length < 1 or !docs[0].version? if ((docs.length < 1) || (docs[0].version == null)) {
return callback null, 0 return callback(null, 0);
else } else {
return callback null, docs[0].version return callback(null, docs[0].version);
}
});
},
setDocVersion: (doc_id, version, callback = (error) ->) -> setDocVersion(doc_id, version, callback) {
db.docOps.update { if (callback == null) { callback = function(error) {}; }
return db.docOps.update({
doc_id: ObjectId(doc_id) doc_id: ObjectId(doc_id)
}, { }, {
$set: version: version $set: { version
}
}, { }, {
upsert: true upsert: true
}, callback }, callback);
},
destroyDoc: (doc_id, callback) -> destroyDoc(doc_id, callback) {
db.docs.remove { return db.docs.remove({
_id: ObjectId(doc_id) _id: ObjectId(doc_id)
}, (err) -> }, function(err) {
return callback(err) if err? if (err != null) { return callback(err); }
db.docOps.remove { return db.docOps.remove({
doc_id: ObjectId(doc_id) doc_id: ObjectId(doc_id)
}, callback }, callback);
});
}
});
[ [
'findDoc', 'findDoc',
@ -89,5 +122,4 @@ module.exports = MongoManager =
'markDocAsArchived', 'markDocAsArchived',
'getDocVersion', 'getDocVersion',
'setDocVersion' 'setDocVersion'
].map (method) -> ].map(method => metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger));
metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)

View file

@ -1,40 +1,61 @@
_ = require "underscore" /*
{ObjectId} = require("./mongojs") * decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RangeManager;
const _ = require("underscore");
const {ObjectId} = require("./mongojs");
module.exports = RangeManager = module.exports = (RangeManager = {
shouldUpdateRanges: (doc_ranges, incoming_ranges) -> shouldUpdateRanges(doc_ranges, incoming_ranges) {
if !incoming_ranges? if ((incoming_ranges == null)) {
throw new Error("expected incoming_ranges") throw new Error("expected incoming_ranges");
}
# If the ranges are empty, we don't store them in the DB, so set // If the ranges are empty, we don't store them in the DB, so set
# doc_ranges to an empty object as default, since this is was the // doc_ranges to an empty object as default, since this is was the
# incoming_ranges will be for an empty range set. // incoming_ranges will be for an empty range set.
if !doc_ranges? if ((doc_ranges == null)) {
doc_ranges = {} doc_ranges = {};
}
return not _.isEqual(doc_ranges, incoming_ranges) return !_.isEqual(doc_ranges, incoming_ranges);
},
jsonRangesToMongo: (ranges) -> jsonRangesToMongo(ranges) {
return null if !ranges? if ((ranges == null)) { return null; }
updateMetadata = (metadata) -> const updateMetadata = function(metadata) {
if metadata?.ts? if ((metadata != null ? metadata.ts : undefined) != null) {
metadata.ts = new Date(metadata.ts) metadata.ts = new Date(metadata.ts);
if metadata?.user_id? }
metadata.user_id = RangeManager._safeObjectId(metadata.user_id) if ((metadata != null ? metadata.user_id : undefined) != null) {
return metadata.user_id = RangeManager._safeObjectId(metadata.user_id);
}
};
for change in ranges.changes or [] for (let change of Array.from(ranges.changes || [])) {
change.id = RangeManager._safeObjectId(change.id) change.id = RangeManager._safeObjectId(change.id);
updateMetadata(change.metadata) updateMetadata(change.metadata);
for comment in ranges.comments or [] }
comment.id = RangeManager._safeObjectId(comment.id) for (let comment of Array.from(ranges.comments || [])) {
if comment.op?.t? comment.id = RangeManager._safeObjectId(comment.id);
comment.op.t = RangeManager._safeObjectId(comment.op.t) if ((comment.op != null ? comment.op.t : undefined) != null) {
updateMetadata(comment.metadata) comment.op.t = RangeManager._safeObjectId(comment.op.t);
return ranges }
updateMetadata(comment.metadata);
}
return ranges;
},
_safeObjectId: (data) -> _safeObjectId(data) {
try try {
return ObjectId(data) return ObjectId(data);
catch error } catch (error) {
return data return data;
}
}
});

View file

@ -1,7 +1,8 @@
Settings = require "settings-sharelatex" const Settings = require("settings-sharelatex");
mongojs = require "mongojs" const mongojs = require("mongojs");
db = mongojs(Settings.mongo.url, ["docs", "docOps"]) const db = mongojs(Settings.mongo.url, ["docs", "docOps"]);
module.exports = module.exports = {
db: db db,
ObjectId: mongojs.ObjectId ObjectId: mongojs.ObjectId
};