Merge pull request #4665 from overleaf/bg-zip-manager

add streaming zip export of history (migrated from track-changes#117)

GitOrigin-RevId: 45e6a66332541f463241f148892817725c0be39c
This commit is contained in:
Eric Mc Sween 2021-08-16 08:09:52 -04:00 committed by Copybot
parent 1bf89f7dd1
commit b83c35fdbb
11 changed files with 439 additions and 30 deletions

View file

@ -66,6 +66,8 @@ app.get('/project/:project_id/doc/:doc_id/check', HttpController.checkDoc)
app.get('/project/:project_id/updates', HttpController.getUpdates)
app.get('/project/:project_id/export', HttpController.exportProject)
app.get('/project/:project_id/zip', HttpController.zipProject)
app.post('/project/:project_id/flush', HttpController.flushProject)
app.post(

View file

@ -0,0 +1,42 @@
module.exports = class DocIterator {
constructor(packs, getPackByIdFn) {
this.getPackByIdFn = getPackByIdFn
// sort packs in descending order by version (i.e. most recent first)
const byVersion = (a, b) => b.v - a.v
this.packs = packs.slice().sort(byVersion)
this.queue = []
}
next(callback) {
const update = this.queue.shift()
if (update) {
return callback(null, update)
}
if (!this.packs.length) {
this._done = true
return callback(null)
}
const nextPack = this.packs[0]
this.getPackByIdFn(
nextPack.project_id,
nextPack.doc_id,
nextPack._id,
(err, pack) => {
if (err != null) {
return callback(err)
}
this.packs.shift() // have now retrieved this pack, remove it
for (const op of pack.pack.reverse()) {
op.doc_id = nextPack.doc_id
op.project_id = nextPack.project_id
this.queue.push(op)
}
return this.next(callback)
}
)
}
done() {
return this._done
}
}

View file

@ -0,0 +1,51 @@
const request = require('request')
const logger = require('logger-sharelatex')
const Settings = require('@overleaf/settings')
function peekDocument(projectId, docId, callback) {
const url = `${Settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek`
logger.log(
{ project_id: projectId, doc_id: docId },
'getting doc from docstore'
)
request.get(url, function (error, res, body) {
if (error != null) {
return callback(error)
}
if (res.statusCode >= 200 && res.statusCode < 300) {
try {
body = JSON.parse(body)
} catch (error1) {
error = error1
return callback(error)
}
logger.log(
{ project_id: projectId, doc_id: docId, version: body.version },
'got doc from docstore'
)
return callback(null, body.lines.join('\n'), body.version)
} else {
return callback(
new Error(
`docstore returned a non-success status code: ${res.statusCode}`
)
)
}
})
}
module.exports = {
promises: {
peekDocument: (projectId, docId) => {
return new Promise((resolve, reject) => {
peekDocument(projectId, docId, (err, content, version) => {
if (err) {
reject(err)
} else {
resolve([content, version])
}
})
})
},
},
}

View file

@ -17,11 +17,11 @@ const logger = require('logger-sharelatex')
const Settings = require('@overleaf/settings')
module.exports = DocumentUpdaterManager = {
getDocument(project_id, doc_id, callback) {
_requestDocument(project_id, doc_id, url, callback) {
if (callback == null) {
callback = function (error, content, version) {}
}
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
logger.log({ project_id, doc_id }, 'getting doc from document updater')
return request.get(url, function (error, res, body) {
if (error != null) {
@ -52,6 +52,16 @@ module.exports = DocumentUpdaterManager = {
})
},
getDocument(project_id, doc_id, callback) {
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
DocumentUpdaterManager._requestDocument(project_id, doc_id, url, callback)
},
peekDocument(project_id, doc_id, callback) {
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}/peek`
DocumentUpdaterManager._requestDocument(project_id, doc_id, url, callback)
},
setDocument(project_id, doc_id, content, user_id, callback) {
if (callback == null) {
callback = function (error) {}
@ -88,3 +98,23 @@ module.exports = DocumentUpdaterManager = {
)
},
}
module.exports.promises = {
// peekDocument returns two arguments so we can't use util.promisify, which only handles a single argument, we need
// to treat this it as a special case.
peekDocument: (project_id, doc_id) => {
return new Promise((resolve, reject) => {
DocumentUpdaterManager.peekDocument(
project_id,
doc_id,
(err, content, version) => {
if (err) {
reject(err)
} else {
resolve([content, version])
}
}
)
})
},
}

View file

@ -17,9 +17,12 @@ const UpdatesManager = require('./UpdatesManager')
const DiffManager = require('./DiffManager')
const PackManager = require('./PackManager')
const RestoreManager = require('./RestoreManager')
const ZipManager = require('./ZipManager')
const logger = require('logger-sharelatex')
const HealthChecker = require('./HealthChecker')
const _ = require('underscore')
const Path = require('path')
const { pipeline } = require('stream')
module.exports = HttpController = {
flushDoc(req, res, next) {
@ -203,6 +206,29 @@ module.exports = HttpController = {
)
},
zipProject(req, res, next) {
const { project_id: projectId } = req.params
logger.log({ projectId }, 'exporting project history as zip file')
ZipManager.makeTempDirectory((err, tmpdir) => {
if (err) {
return next(err)
}
const zipFilePath = Path.join(tmpdir, 'export.zip')
ZipManager.exportProject(projectId, zipFilePath, err => {
if (err) {
ZipManager.cleanupTempDirectory(tmpdir)
return next(err)
}
res.download(zipFilePath, `${projectId}-track-changes.zip`, err => {
ZipManager.cleanupTempDirectory(tmpdir)
if (err && !res.headersSent) {
return next(err)
}
})
})
})
},
exportProject(req, res, next) {
// The project history can be huge:
// - updates can weight MBs for insert/delete of full doc

View file

@ -24,7 +24,9 @@ const LockManager = require('./LockManager')
const MongoAWS = require('./MongoAWS')
const Metrics = require('@overleaf/metrics')
const ProjectIterator = require('./ProjectIterator')
const DocIterator = require('./DocIterator')
const Settings = require('@overleaf/settings')
const util = require('util')
const keys = Settings.redis.lock.key_schema
// Sharejs operations are stored in a 'pack' object
@ -423,16 +425,59 @@ module.exports = PackManager = {
})
},
findAllDocsInProject(project_id, callback) {
const docIdSet = new Set()
async.series(
[
cb => {
db.docHistory
.find(
{ project_id: ObjectId(project_id) },
{ projection: { pack: false } }
)
.toArray((err, packs) => {
packs.forEach(pack => {
docIdSet.add(pack.doc_id.toString())
})
return cb()
})
},
cb => {
db.docHistoryIndex
.find({ project_id: ObjectId(project_id) })
.toArray((err, indexes) => {
indexes.forEach(index => {
docIdSet.add(index._id.toString())
})
return cb()
})
},
],
err => {
if (err) return callback(err)
callback(null, [...docIdSet])
}
)
},
// rewrite any query using doc_id to use _id instead
// (because docHistoryIndex uses the doc_id)
_rewriteQueryForIndex(query) {
const indexQuery = _.omit(query, 'doc_id')
if ('doc_id' in query) {
indexQuery._id = query.doc_id
}
return indexQuery
},
// Retrieve all changes across a project
makeProjectIterator(project_id, before, callback) {
_findPacks(query, sortKeys, callback) {
// get all the docHistory Entries
return db.docHistory
.find(
{ project_id: ObjectId(project_id) },
{ projection: { pack: false } }
)
.sort({ 'meta.end_ts': -1 })
.find(query, { projection: { pack: false } })
.sort(sortKeys)
.toArray(function (err, packs) {
let pack
if (err != null) {
@ -444,8 +489,9 @@ module.exports = PackManager = {
allPacks.push(pack)
seenIds[pack._id] = true
}
const indexQuery = PackManager._rewriteQueryForIndex(query)
return db.docHistoryIndex
.find({ project_id: ObjectId(project_id) })
.find(indexQuery)
.toArray(function (err, indexes) {
if (err != null) {
return callback(err)
@ -461,14 +507,36 @@ module.exports = PackManager = {
}
}
}
return callback(
null,
new ProjectIterator(allPacks, before, PackManager.getPackById)
)
return callback(null, allPacks)
})
})
},
makeProjectIterator(project_id, before, callback) {
PackManager._findPacks(
{ project_id: ObjectId(project_id) },
{ 'meta.end_ts': -1 },
function (err, allPacks) {
if (err) return callback(err)
callback(
null,
new ProjectIterator(allPacks, before, PackManager.getPackById)
)
}
)
},
makeDocIterator(doc_id, callback) {
PackManager._findPacks(
{ doc_id: ObjectId(doc_id) },
{ v: -1 },
function (err, allPacks) {
if (err) return callback(err)
callback(null, new DocIterator(allPacks, PackManager.getPackById))
}
)
},
getPackById(project_id, doc_id, pack_id, callback) {
return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
if (err != null) {
@ -1165,6 +1233,12 @@ module.exports = PackManager = {
},
}
module.exports.promises = {
getOpsByVersionRange: util.promisify(PackManager.getOpsByVersionRange),
findAllDocsInProject: util.promisify(PackManager.findAllDocsInProject),
makeDocIterator: util.promisify(PackManager.makeDocIterator),
}
// _getOneDayInFutureWithRandomDelay: ->
// thirtyMins = 1000 * 60 * 30
// randomThirtyMinMax = Math.ceil(Math.random() * thirtyMins)

View file

@ -27,6 +27,7 @@ const async = require('async')
const _ = require('underscore')
const Settings = require('@overleaf/settings')
const keys = Settings.redis.lock.key_schema
const util = require('util')
module.exports = UpdatesManager = {
compressAndSaveRawUpdates(
@ -881,6 +882,12 @@ module.exports = UpdatesManager = {
},
}
module.exports.promises = {
processUncompressedUpdatesForProject: util.promisify(
UpdatesManager.processUncompressedUpdatesForProject
),
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)

View file

@ -0,0 +1,162 @@
const logger = require('logger-sharelatex')
const UpdatesManager = require('./UpdatesManager')
const DiffGenerator = require('./DiffGenerator')
const DocumentUpdaterManager = require('./DocumentUpdaterManager')
const DocstoreManager = require('./DocstoreManager')
const PackManager = require('./PackManager')
const yazl = require('yazl')
const util = require('util')
const stream = require('stream')
const fs = require('fs')
const os = require('os')
const Path = require('path')
const streamPipeline = util.promisify(stream.pipeline)
// look in docstore or docupdater for the latest version of the document
async function getLatestContent(projectId, docId, lastUpdateVersion) {
const [docstoreContent, docstoreVersion] =
await DocstoreManager.promises.peekDocument(projectId, docId)
// if docstore is out of date, check for a newer version in docupdater
// and return that instead
if (docstoreVersion <= lastUpdateVersion) {
const [docupdaterContent, docupdaterVersion] =
await DocumentUpdaterManager.promises.peekDocument(projectId, docId)
if (docupdaterVersion > docstoreVersion) {
return [docupdaterContent, docupdaterVersion]
}
}
return [docstoreContent, docstoreVersion]
}
async function rewindDoc(projectId, docId, zipfile) {
logger.log({ projectId, docId }, 'rewinding document')
// Prepare to rewind content
const docIterator = await PackManager.promises.makeDocIterator(docId)
const getUpdate = util.promisify(docIterator.next).bind(docIterator)
const lastUpdate = await getUpdate()
if (!lastUpdate) {
return null
}
const lastUpdateVersion = lastUpdate.v
const [latestContent, version] = await getLatestContent(
projectId,
docId,
lastUpdateVersion
)
const id = docId.toString()
const contentEndPath = `${id}/content/end/${version}`
zipfile.addBuffer(Buffer.from(latestContent), contentEndPath)
const metadata = {
id,
version,
content: {
end: {
path: contentEndPath,
version,
},
},
updates: [],
}
let content = latestContent
let v = version
let update = lastUpdate
while (update) {
const updatePath = `${id}/updates/${update.v}`
zipfile.addBuffer(Buffer.from(JSON.stringify(update)), updatePath, {
mtime: new Date(update.meta.start_ts),
})
try {
content = DiffGenerator.rewindUpdate(content, update)
v = update.v
} catch (e) {
e.attempted_update = update // keep a record of the attempted update
logger.error({ projectId, docId, err: e }, 'rewind error')
break // stop attempting to rewind on error
}
metadata.updates.push({
path: updatePath,
version: update.v,
ts: update.meta.start_ts,
doc_length: content.length,
})
update = await getUpdate()
}
const contentStartPath = `${id}/content/start/${v}`
zipfile.addBuffer(Buffer.from(content), contentStartPath)
metadata.content.start = {
path: contentStartPath,
version: v,
}
return metadata
}
async function generateZip(projectId, zipfile) {
await UpdatesManager.promises.processUncompressedUpdatesForProject(projectId)
const docIds = await PackManager.promises.findAllDocsInProject(projectId)
const manifest = { projectId, docs: [] }
for (const docId of docIds) {
const doc = await rewindDoc(projectId, docId, zipfile)
if (doc) {
manifest.docs.push(doc)
}
}
zipfile.addBuffer(
Buffer.from(JSON.stringify(manifest, null, 2)),
'manifest.json'
)
zipfile.end()
}
async function exportProject(projectId, zipPath) {
const zipfile = new yazl.ZipFile()
const pipeline = streamPipeline(
zipfile.outputStream,
fs.createWriteStream(zipPath)
)
await generateZip(projectId, zipfile)
await pipeline
}
/**
* Create a temporary directory for use with exportProject()
*/
async function makeTempDirectory() {
const tmpdir = await fs.promises.mkdtemp(
(await fs.promises.realpath(os.tmpdir())) + Path.sep
)
return tmpdir
}
/**
* Clean up a temporary directory made with makeTempDirectory()
*/
function cleanupTempDirectory(tmpdir) {
fs.promises.rmdir(tmpdir, { recursive: true }).catch(err => {
logger.warn({ tmpdir }, 'Failed to clean up temp directory')
})
}
module.exports = {
exportProject: util.callbackify(exportProject),
makeTempDirectory: util.callbackify(makeTempDirectory),
cleanupTempDirectory,
}

View file

@ -1344,7 +1344,7 @@
"bintrees": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz",
"integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ="
"integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g=="
},
"bl": {
"version": "2.2.1",
@ -1431,6 +1431,11 @@
"isarray": "^1.0.0"
}
},
"buffer-crc32": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
"integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI="
},
"buffer-equal-constant-time": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
@ -1738,7 +1743,7 @@
"d64": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz",
"integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA="
"integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw=="
},
"dashdash": {
"version": "1.14.1",
@ -2640,7 +2645,7 @@
"findit2": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz",
"integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY="
"integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog=="
},
"flat": {
"version": "5.0.2",
@ -3543,12 +3548,12 @@
"lodash.at": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz",
"integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g="
"integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA=="
},
"lodash.camelcase": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
"integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY="
"integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="
},
"lodash.clonedeep": {
"version": "4.5.0",
@ -3559,12 +3564,12 @@
"lodash.defaults": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
"integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw="
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="
},
"lodash.flatten": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz",
"integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8="
"integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g=="
},
"lodash.get": {
"version": "4.4.2",
@ -3575,7 +3580,7 @@
"lodash.has": {
"version": "4.5.2",
"resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz",
"integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI="
"integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g=="
},
"lodash.merge": {
"version": "4.6.2",
@ -3937,7 +3942,7 @@
"module-details-from-path": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz",
"integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is="
"integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A=="
},
"moment": {
"version": "2.24.0",
@ -4740,12 +4745,12 @@
"redis-errors": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
"integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60="
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w=="
},
"redis-parser": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
"integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=",
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
"requires": {
"redis-errors": "^1.0.0"
}
@ -5291,7 +5296,7 @@
"stubs": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
"integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls="
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw=="
},
"supports-color": {
"version": "5.5.0",
@ -5352,7 +5357,7 @@
"tdigest": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz",
"integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=",
"integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==",
"requires": {
"bintrees": "1.0.1"
}
@ -5409,7 +5414,7 @@
"to-no-case": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz",
"integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo="
"integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg=="
},
"to-regex-range": {
"version": "5.0.1",
@ -5423,7 +5428,7 @@
"to-snake-case": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz",
"integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=",
"integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==",
"requires": {
"to-space-case": "^1.0.0"
}
@ -5431,7 +5436,7 @@
"to-space-case": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz",
"integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=",
"integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==",
"requires": {
"to-no-case": "^1.0.0"
}
@ -5780,6 +5785,14 @@
"is-plain-obj": "^2.1.0"
}
},
"yazl": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/yazl/-/yazl-2.5.1.tgz",
"integrity": "sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==",
"requires": {
"buffer-crc32": "~0.2.3"
}
},
"yn": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",

View file

@ -40,7 +40,8 @@
"request": "~2.88.2",
"requestretry": "^4.1.0",
"s3-streams": "^0.4.0",
"underscore": "~1.13.1"
"underscore": "~1.13.1",
"yazl": "^2.5.1"
},
"devDependencies": {
"chai": "^4.2.0",

View file

@ -25,6 +25,7 @@ describe('HttpController', function () {
'./PackManager': (this.PackManager = {}),
'./DocArchiveManager': (this.DocArchiveManager = {}),
'./HealthChecker': (this.HealthChecker = {}),
'./ZipManager': (this.ZipManager = {}),
},
})
this.doc_id = 'doc-id-123'