2020-07-31 15:20:07 +00:00
|
|
|
const { callbackify } = require('util')
|
|
|
|
const MongoManager = require('./MongoManager').promises
|
2020-02-16 14:02:21 +00:00
|
|
|
const Errors = require('./Errors')
|
|
|
|
const logger = require('logger-sharelatex')
|
|
|
|
const settings = require('settings-sharelatex')
|
|
|
|
const crypto = require('crypto')
|
2020-07-23 18:42:49 +00:00
|
|
|
const Streamifier = require('streamifier')
|
2020-02-16 14:02:21 +00:00
|
|
|
const RangeManager = require('./RangeManager')
|
2020-07-23 18:42:49 +00:00
|
|
|
const PersistorManager = require('./PersistorManager')
|
2020-09-14 15:34:34 +00:00
|
|
|
const pMap = require('p-map')
|
2015-06-02 18:55:22 +00:00
|
|
|
|
2021-04-16 08:27:09 +00:00
|
|
|
const PARALLEL_JOBS = settings.parallelArchiveJobs
|
|
|
|
const DESTROY_BATCH_SIZE = settings.destroyBatchSize
|
2015-06-02 18:55:22 +00:00
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
module.exports = {
|
|
|
|
archiveAllDocs: callbackify(archiveAllDocs),
|
2021-01-04 11:55:47 +00:00
|
|
|
archiveDocById: callbackify(archiveDocById),
|
2020-07-23 18:42:49 +00:00
|
|
|
archiveDoc: callbackify(archiveDoc),
|
|
|
|
unArchiveAllDocs: callbackify(unArchiveAllDocs),
|
|
|
|
unarchiveDoc: callbackify(unarchiveDoc),
|
|
|
|
destroyAllDocs: callbackify(destroyAllDocs),
|
|
|
|
destroyDoc: callbackify(destroyDoc),
|
|
|
|
promises: {
|
|
|
|
archiveAllDocs,
|
2021-01-04 11:55:47 +00:00
|
|
|
archiveDocById,
|
2020-07-23 18:42:49 +00:00
|
|
|
archiveDoc,
|
|
|
|
unArchiveAllDocs,
|
|
|
|
unarchiveDoc,
|
|
|
|
destroyAllDocs,
|
|
|
|
destroyDoc
|
|
|
|
}
|
|
|
|
}
|
2015-06-02 18:55:22 +00:00
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
async function archiveAllDocs(projectId) {
|
2020-07-31 15:20:07 +00:00
|
|
|
const docs = await MongoManager.getProjectsDocs(
|
2020-07-23 18:42:49 +00:00
|
|
|
projectId,
|
|
|
|
{ include_deleted: true },
|
|
|
|
{ lines: true, ranges: true, rev: true, inS3: true }
|
|
|
|
)
|
2015-06-02 18:55:22 +00:00
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
if (!docs) {
|
|
|
|
throw new Errors.NotFoundError(`No docs for project ${projectId}`)
|
|
|
|
}
|
2015-06-02 18:55:22 +00:00
|
|
|
|
2020-08-10 14:44:41 +00:00
|
|
|
const docsToArchive = docs.filter((doc) => !doc.inS3)
|
2020-09-14 15:34:34 +00:00
|
|
|
await pMap(docsToArchive, (doc) => archiveDoc(projectId, doc), {
|
|
|
|
concurrency: PARALLEL_JOBS
|
|
|
|
})
|
2020-07-23 18:42:49 +00:00
|
|
|
}
|
2015-06-02 18:55:22 +00:00
|
|
|
|
2021-01-04 11:55:47 +00:00
|
|
|
async function archiveDocById(projectId, docId) {
|
|
|
|
const doc = await MongoManager.findDoc(projectId, docId, {
|
|
|
|
lines: true,
|
|
|
|
ranges: true,
|
|
|
|
rev: true,
|
|
|
|
inS3: true
|
|
|
|
})
|
|
|
|
|
|
|
|
if (!doc) {
|
|
|
|
throw new Errors.NotFoundError(
|
|
|
|
`Cannot find doc ${docId} in project ${projectId}`
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(das7pad): consider refactoring MongoManager.findDoc to take a query
|
|
|
|
if (doc.inS3) return
|
|
|
|
return archiveDoc(projectId, doc)
|
|
|
|
}
|
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
async function archiveDoc(projectId, doc) {
|
|
|
|
logger.log(
|
|
|
|
{ project_id: projectId, doc_id: doc._id },
|
|
|
|
'sending doc to persistor'
|
|
|
|
)
|
|
|
|
const key = `${projectId}/${doc._id}`
|
2019-07-02 11:45:54 +00:00
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
if (doc.lines == null) {
|
|
|
|
throw new Error('doc has no lines')
|
|
|
|
}
|
2019-07-02 11:45:54 +00:00
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
const json = JSON.stringify({
|
|
|
|
lines: doc.lines,
|
|
|
|
ranges: doc.ranges,
|
|
|
|
schema_v: 1
|
|
|
|
})
|
2019-07-02 11:45:54 +00:00
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
// this should never happen, but protects against memory-corruption errors that
|
|
|
|
// have happened in the past
|
|
|
|
if (json.indexOf('\u0000') > -1) {
|
|
|
|
const error = new Error('null bytes detected')
|
|
|
|
logger.err({ err: error, doc }, error.message)
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
|
|
|
|
const md5 = crypto.createHash('md5').update(json).digest('hex')
|
|
|
|
const stream = Streamifier.createReadStream(json)
|
|
|
|
await PersistorManager.sendStream(settings.docstore.bucket, key, stream, {
|
|
|
|
sourceMd5: md5
|
|
|
|
})
|
2020-07-31 15:20:07 +00:00
|
|
|
await MongoManager.markDocAsArchived(doc._id, doc.rev)
|
2020-07-23 18:42:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async function unArchiveAllDocs(projectId) {
|
2021-01-04 17:04:33 +00:00
|
|
|
let docs
|
|
|
|
if (settings.docstore.keepSoftDeletedDocsArchived) {
|
|
|
|
docs = await MongoManager.getNonDeletedArchivedProjectDocs(projectId)
|
|
|
|
} else {
|
|
|
|
docs = await MongoManager.getArchivedProjectDocs(projectId)
|
|
|
|
}
|
2020-07-23 18:42:49 +00:00
|
|
|
if (!docs) {
|
|
|
|
throw new Errors.NotFoundError(`No docs for project ${projectId}`)
|
|
|
|
}
|
2020-09-14 15:34:34 +00:00
|
|
|
await pMap(docs, (doc) => unarchiveDoc(projectId, doc._id), {
|
|
|
|
concurrency: PARALLEL_JOBS
|
|
|
|
})
|
2020-07-23 18:42:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async function unarchiveDoc(projectId, docId) {
|
|
|
|
logger.log(
|
|
|
|
{ project_id: projectId, doc_id: docId },
|
|
|
|
'getting doc from persistor'
|
|
|
|
)
|
2020-09-23 13:12:22 +00:00
|
|
|
const originalDoc = await MongoManager.findDoc(projectId, docId, { inS3: 1 })
|
|
|
|
if (!originalDoc.inS3) {
|
|
|
|
// return if it's not actually in S3 as there's nothing to do
|
|
|
|
return
|
|
|
|
}
|
2020-07-23 18:42:49 +00:00
|
|
|
const key = `${projectId}/${docId}`
|
2020-09-23 13:12:22 +00:00
|
|
|
let stream, sourceMd5
|
|
|
|
try {
|
|
|
|
sourceMd5 = await PersistorManager.getObjectMd5Hash(
|
|
|
|
settings.docstore.bucket,
|
|
|
|
key
|
|
|
|
)
|
|
|
|
stream = await PersistorManager.getObjectStream(
|
|
|
|
settings.docstore.bucket,
|
|
|
|
key
|
|
|
|
)
|
|
|
|
} catch (err) {
|
|
|
|
// if we get a 404, we could be in a race and something else has unarchived the doc already
|
|
|
|
if (err instanceof Errors.NotFoundError) {
|
|
|
|
const doc = await MongoManager.findDoc(projectId, docId, { inS3: 1 })
|
|
|
|
if (!doc.inS3) {
|
|
|
|
// the doc has been archived while we were looking for it, so no error
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
throw err
|
|
|
|
}
|
2020-07-23 18:42:49 +00:00
|
|
|
stream.resume()
|
|
|
|
const json = await _streamToString(stream)
|
|
|
|
const md5 = crypto.createHash('md5').update(json).digest('hex')
|
|
|
|
if (sourceMd5 !== md5) {
|
|
|
|
throw new Errors.Md5MismatchError('md5 mismatch when downloading doc', {
|
|
|
|
key,
|
|
|
|
sourceMd5,
|
|
|
|
md5
|
2020-02-16 14:02:21 +00:00
|
|
|
})
|
2020-07-23 18:42:49 +00:00
|
|
|
}
|
2017-03-30 16:19:42 +00:00
|
|
|
|
2020-07-23 18:42:49 +00:00
|
|
|
const doc = JSON.parse(json)
|
|
|
|
|
|
|
|
const mongoDoc = {}
|
|
|
|
if (doc.schema_v === 1 && doc.lines != null) {
|
|
|
|
mongoDoc.lines = doc.lines
|
|
|
|
if (doc.ranges != null) {
|
|
|
|
mongoDoc.ranges = RangeManager.jsonRangesToMongo(doc.ranges)
|
2020-02-16 14:02:21 +00:00
|
|
|
}
|
2020-07-23 18:42:49 +00:00
|
|
|
} else if (Array.isArray(doc)) {
|
|
|
|
mongoDoc.lines = doc
|
|
|
|
} else {
|
|
|
|
throw new Error("I don't understand the doc format in s3")
|
|
|
|
}
|
2020-07-31 15:20:07 +00:00
|
|
|
await MongoManager.upsertIntoDocCollection(projectId, docId, mongoDoc)
|
2020-07-23 18:42:49 +00:00
|
|
|
await PersistorManager.deleteObject(settings.docstore.bucket, key)
|
|
|
|
}
|
|
|
|
|
|
|
|
async function destroyAllDocs(projectId) {
|
2021-04-16 08:27:09 +00:00
|
|
|
while (true) {
|
|
|
|
const docs = await MongoManager.getProjectsDocs(
|
|
|
|
projectId,
|
|
|
|
{ include_deleted: true, limit: DESTROY_BATCH_SIZE },
|
|
|
|
{ _id: 1 }
|
|
|
|
)
|
|
|
|
if (!docs || docs.length === 0) {
|
|
|
|
break
|
|
|
|
}
|
2020-09-14 15:34:34 +00:00
|
|
|
await pMap(docs, (doc) => destroyDoc(projectId, doc._id), {
|
|
|
|
concurrency: PARALLEL_JOBS
|
|
|
|
})
|
2020-07-23 18:42:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function destroyDoc(projectId, docId) {
|
|
|
|
logger.log(
|
|
|
|
{ project_id: projectId, doc_id: docId },
|
|
|
|
'removing doc from mongo and persistor'
|
|
|
|
)
|
2020-07-31 15:20:07 +00:00
|
|
|
const doc = await MongoManager.findDoc(projectId, docId, {
|
2020-07-23 18:42:49 +00:00
|
|
|
inS3: 1
|
|
|
|
})
|
|
|
|
if (!doc) {
|
|
|
|
throw new Errors.NotFoundError('Doc not found in Mongo')
|
2020-02-16 14:02:21 +00:00
|
|
|
}
|
2020-07-23 18:42:49 +00:00
|
|
|
|
|
|
|
if (doc.inS3) {
|
|
|
|
await PersistorManager.deleteObject(
|
|
|
|
settings.docstore.bucket,
|
|
|
|
`${projectId}/${docId}`
|
|
|
|
)
|
|
|
|
}
|
2020-07-31 15:20:07 +00:00
|
|
|
await MongoManager.destroyDoc(docId)
|
2020-07-23 18:42:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async function _streamToString(stream) {
|
|
|
|
const chunks = []
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
stream.on('data', (chunk) => chunks.push(chunk))
|
|
|
|
stream.on('error', reject)
|
|
|
|
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
|
|
|
})
|
2020-02-16 14:02:21 +00:00
|
|
|
}
|