overleaf/services/web/app/src/Features/Downloads/ProjectZipStreamManager.js
Alf Eaton a15e1bdc07 Merge pull request #5734 from overleaf/ab-handle-project-zip-not-existing
Handle case of not existing project ID when zipping projects for download

GitOrigin-RevId: 601b14ef0217c27dc50ef0442f7e81db35642b6d
2021-11-17 09:03:29 +00:00

137 lines
3.9 KiB
JavaScript

let ProjectZipStreamManager
const archiver = require('archiver')
const async = require('async')
const logger = require('@overleaf/logger')
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
const ProjectGetter = require('../Project/ProjectGetter')
const FileStoreHandler = require('../FileStore/FileStoreHandler')
module.exports = ProjectZipStreamManager = {
createZipStreamForMultipleProjects(projectIds, callback) {
// We'll build up a zip file that contains multiple zip files
const archive = archiver('zip')
archive.on('error', err =>
logger.err(
{ err, projectIds },
'something went wrong building archive of project'
)
)
callback(null, archive)
const jobs = projectIds.map(projectId => cb => {
ProjectGetter.getProject(projectId, { name: true }, (error, project) => {
if (error) {
return cb(error)
}
if (!project) {
logger.log(
{ projectId },
'cannot append project to zip stream: project not found'
)
return cb()
}
logger.log(
{ projectId, name: project.name },
'appending project to zip stream'
)
ProjectZipStreamManager.createZipStreamForProject(
projectId,
(error, stream) => {
if (error) {
return cb(error)
}
archive.append(stream, { name: `${project.name}.zip` })
stream.on('end', () => {
logger.log({ projectId, name: project.name }, 'zip stream ended')
cb()
})
}
)
})
})
async.series(jobs, () => {
logger.log(
{ projectIds },
'finished creating zip stream of multiple projects'
)
archive.finalize()
})
},
createZipStreamForProject(projectId, callback) {
const archive = archiver('zip')
// return stream immediately before we start adding things to it
archive.on('error', err =>
logger.err(
{ err, projectId },
'something went wrong building archive of project'
)
)
callback(null, archive)
this.addAllDocsToArchive(projectId, archive, error => {
if (error) {
logger.error(
{ err: error, projectId },
'error adding docs to zip stream'
)
}
this.addAllFilesToArchive(projectId, archive, error => {
if (error) {
logger.error(
{ err: error, projectId },
'error adding files to zip stream'
)
}
archive.finalize()
})
})
},
addAllDocsToArchive(projectId, archive, callback) {
ProjectEntityHandler.getAllDocs(projectId, (error, docs) => {
if (error) {
return callback(error)
}
const jobs = Object.entries(docs).map(([path, doc]) => cb => {
if (path[0] === '/') {
path = path.slice(1)
}
logger.log({ projectId }, 'Adding doc')
archive.append(doc.lines.join('\n'), { name: path })
setImmediate(cb)
})
async.series(jobs, callback)
})
},
addAllFilesToArchive(projectId, archive, callback) {
ProjectEntityHandler.getAllFiles(projectId, (error, files) => {
if (error) {
return callback(error)
}
const jobs = Object.entries(files).map(([path, file]) => cb => {
FileStoreHandler.getFileStream(
projectId,
file._id,
{},
(error, stream) => {
if (error) {
logger.warn(
{ err: error, projectId, file_id: file._id },
'something went wrong adding file to zip archive'
)
return cb(error)
}
if (path[0] === '/') {
path = path.slice(1)
}
archive.append(stream, { name: path })
stream.on('end', () => cb())
}
)
})
async.parallelLimit(jobs, 5, callback)
})
},
}