2020-02-17 12:34:21 -05:00
|
|
|
/* eslint-disable
|
|
|
|
camelcase,
|
|
|
|
handle-callback-err,
|
|
|
|
no-unused-vars,
|
|
|
|
*/
|
|
|
|
// TODO: This file was created by bulk-decaffeinate.
|
|
|
|
// Fix any style issues and re-enable lint.
|
2020-02-17 12:34:04 -05:00
|
|
|
/*
|
|
|
|
* decaffeinate suggestions:
|
|
|
|
* DS101: Remove unnecessary use of Array.from
|
|
|
|
* DS102: Remove unnecessary code created because of implicit returns
|
|
|
|
* DS207: Consider shorter variations of null checks
|
|
|
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
|
|
*/
|
2020-02-17 12:34:28 -05:00
|
|
|
let HttpController
|
|
|
|
const UpdatesManager = require('./UpdatesManager')
|
|
|
|
const DiffManager = require('./DiffManager')
|
|
|
|
const PackManager = require('./PackManager')
|
|
|
|
const RestoreManager = require('./RestoreManager')
|
|
|
|
const logger = require('logger-sharelatex')
|
|
|
|
const HealthChecker = require('./HealthChecker')
|
|
|
|
const _ = require('underscore')
|
|
|
|
|
|
|
|
module.exports = HttpController = {
|
|
|
|
flushDoc(req, res, next) {
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const { doc_id } = req.params
|
|
|
|
const { project_id } = req.params
|
|
|
|
logger.log({ project_id, doc_id }, 'compressing doc history')
|
|
|
|
return UpdatesManager.processUncompressedUpdatesWithLock(
|
|
|
|
project_id,
|
|
|
|
doc_id,
|
2020-06-04 04:24:21 -04:00
|
|
|
function (error) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(204)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
flushProject(req, res, next) {
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const { project_id } = req.params
|
|
|
|
logger.log({ project_id }, 'compressing project history')
|
|
|
|
return UpdatesManager.processUncompressedUpdatesForProject(
|
|
|
|
project_id,
|
2020-06-04 04:24:21 -04:00
|
|
|
function (error) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(204)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
flushAll(req, res, next) {
|
|
|
|
// limit on projects to flush or -1 for all (default)
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const limit = req.query.limit != null ? parseInt(req.query.limit, 10) : -1
|
|
|
|
logger.log({ limit }, 'flushing all projects')
|
2020-06-04 04:24:21 -04:00
|
|
|
return UpdatesManager.flushAll(limit, function (error, result) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
|
|
|
const { failed, succeeded, all } = result
|
|
|
|
const status = `${succeeded.length} succeeded, ${failed.length} failed`
|
|
|
|
if (limit === 0) {
|
|
|
|
return res
|
|
|
|
.status(200)
|
|
|
|
.send(`${status}\nwould flush:\n${all.join('\n')}\n`)
|
|
|
|
} else if (failed.length > 0) {
|
|
|
|
logger.log({ failed, succeeded }, 'error flushing projects')
|
|
|
|
return res
|
|
|
|
.status(500)
|
|
|
|
.send(`${status}\nfailed to flush:\n${failed.join('\n')}\n`)
|
|
|
|
} else {
|
|
|
|
return res
|
|
|
|
.status(200)
|
|
|
|
.send(
|
|
|
|
`${status}\nflushed ${succeeded.length} projects of ${all.length}\n`
|
|
|
|
)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
checkDanglingUpdates(req, res, next) {
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
logger.log('checking dangling updates')
|
2020-06-04 04:24:21 -04:00
|
|
|
return UpdatesManager.getDanglingUpdates(function (error, result) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
|
|
|
if (result.length > 0) {
|
|
|
|
logger.log({ dangling: result }, 'found dangling updates')
|
|
|
|
return res.status(500).send(`dangling updates:\n${result.join('\n')}\n`)
|
|
|
|
} else {
|
|
|
|
return res.status(200).send('no dangling updates found\n')
|
|
|
|
}
|
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
checkDoc(req, res, next) {
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const { doc_id } = req.params
|
|
|
|
const { project_id } = req.params
|
|
|
|
logger.log({ project_id, doc_id }, 'checking doc history')
|
2020-06-04 04:24:21 -04:00
|
|
|
return DiffManager.getDocumentBeforeVersion(
|
|
|
|
project_id,
|
|
|
|
doc_id,
|
|
|
|
1,
|
|
|
|
function (error, document, rewoundUpdates) {
|
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
|
|
|
const broken = []
|
|
|
|
for (const update of Array.from(rewoundUpdates)) {
|
|
|
|
for (const op of Array.from(update.op)) {
|
|
|
|
if (op.broken === true) {
|
|
|
|
broken.push(op)
|
|
|
|
}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
}
|
2020-06-04 04:24:21 -04:00
|
|
|
if (broken.length > 0) {
|
|
|
|
return res.send(broken)
|
|
|
|
} else {
|
|
|
|
return res.sendStatus(204)
|
|
|
|
}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
2020-06-04 04:24:21 -04:00
|
|
|
)
|
2020-02-17 12:34:28 -05:00
|
|
|
},
|
|
|
|
|
|
|
|
getDiff(req, res, next) {
|
|
|
|
let from, to
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const { doc_id } = req.params
|
|
|
|
const { project_id } = req.params
|
|
|
|
|
|
|
|
if (req.query.from != null) {
|
|
|
|
from = parseInt(req.query.from, 10)
|
|
|
|
} else {
|
|
|
|
from = null
|
|
|
|
}
|
|
|
|
if (req.query.to != null) {
|
|
|
|
to = parseInt(req.query.to, 10)
|
|
|
|
} else {
|
|
|
|
to = null
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.log({ project_id, doc_id, from, to }, 'getting diff')
|
2020-06-04 04:24:21 -04:00
|
|
|
return DiffManager.getDiff(project_id, doc_id, from, to, function (
|
2020-02-17 12:34:28 -05:00
|
|
|
error,
|
|
|
|
diff
|
|
|
|
) {
|
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
|
|
|
return res.json({ diff })
|
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
getUpdates(req, res, next) {
|
|
|
|
let before, min_count
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const { project_id } = req.params
|
|
|
|
|
|
|
|
if (req.query.before != null) {
|
|
|
|
before = parseInt(req.query.before, 10)
|
|
|
|
}
|
|
|
|
if (req.query.min_count != null) {
|
|
|
|
min_count = parseInt(req.query.min_count, 10)
|
|
|
|
}
|
|
|
|
|
|
|
|
return UpdatesManager.getSummarizedProjectUpdates(
|
|
|
|
project_id,
|
|
|
|
{ before, min_count },
|
2020-06-04 04:24:21 -04:00
|
|
|
function (error, updates, nextBeforeTimestamp) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
|
|
|
return res.json({
|
|
|
|
updates,
|
|
|
|
nextBeforeTimestamp
|
|
|
|
})
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
restore(req, res, next) {
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
let { doc_id, project_id, version } = req.params
|
|
|
|
const user_id = req.headers['x-user-id']
|
|
|
|
version = parseInt(version, 10)
|
|
|
|
return RestoreManager.restoreToBeforeVersion(
|
|
|
|
project_id,
|
|
|
|
doc_id,
|
|
|
|
version,
|
|
|
|
user_id,
|
2020-06-04 04:24:21 -04:00
|
|
|
function (error) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(204)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
pushDocHistory(req, res, next) {
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const { project_id } = req.params
|
|
|
|
const { doc_id } = req.params
|
|
|
|
logger.log({ project_id, doc_id }, 'pushing all finalised changes to s3')
|
2020-06-04 04:24:21 -04:00
|
|
|
return PackManager.pushOldPacks(project_id, doc_id, function (error) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(204)
|
2020-02-17 12:34:28 -05:00
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
pullDocHistory(req, res, next) {
|
|
|
|
if (next == null) {
|
2020-06-04 04:24:21 -04:00
|
|
|
next = function (error) {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const { project_id } = req.params
|
|
|
|
const { doc_id } = req.params
|
|
|
|
logger.log({ project_id, doc_id }, 'pulling all packs from s3')
|
2020-06-04 04:24:21 -04:00
|
|
|
return PackManager.pullOldPacks(project_id, doc_id, function (error) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return next(error)
|
|
|
|
}
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(204)
|
2020-02-17 12:34:28 -05:00
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
healthCheck(req, res) {
|
2020-06-04 04:24:21 -04:00
|
|
|
return HealthChecker.check(function (err) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (err != null) {
|
|
|
|
logger.err({ err }, 'error performing health check')
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(500)
|
2020-02-17 12:34:28 -05:00
|
|
|
} else {
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(200)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
checkLock(req, res) {
|
2020-06-04 04:24:21 -04:00
|
|
|
return HealthChecker.checkLock(function (err) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (err != null) {
|
|
|
|
logger.err({ err }, 'error performing lock check')
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(500)
|
2020-02-17 12:34:28 -05:00
|
|
|
} else {
|
2020-03-23 06:10:24 -04:00
|
|
|
return res.sendStatus(200)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|