Merge pull request #10249 from overleaf/em-object-persistor-subdirs

Add useSubdirectories option to object-persistor FS backend

GitOrigin-RevId: dc4f0a57e892ffa5be0c88b8baf7efce683ddfc1
This commit is contained in:
Eric Mc Sween 2022-11-10 07:06:26 -05:00 committed by Copybot
parent 8fb3edbecd
commit a98f752b99
6 changed files with 374 additions and 388 deletions

View file

@ -253,7 +253,8 @@ An object with the relevant configuration should be passed to the main function
### FS-specific parameters ### FS-specific parameters
- `path.uploadFolder` (required): Location for temporary files that are being uploaded - `paths.uploadFolder` (required): Location for temporary files that are being uploaded
- `useSubdirectories`: If true, files will be stored in subdirectories on the filesystem. By default, the directory structure is flattened and slashes in the object keys are replaced with underscores.
#### Notes #### Notes

View file

@ -36,6 +36,7 @@
"chai": "^4.3.6", "chai": "^4.3.6",
"chai-as-promised": "^7.1.1", "chai-as-promised": "^7.1.1",
"mocha": "^8.4.0", "mocha": "^8.4.0",
"mock-fs": "^5.2.0",
"mongodb": "^3.5.9", "mongodb": "^3.5.9",
"sandboxed-module": "^2.0.4", "sandboxed-module": "^2.0.4",
"sinon": "^9.2.4", "sinon": "^9.2.4",

View file

@ -2,7 +2,7 @@ const fs = require('fs')
const fsPromises = require('fs/promises') const fsPromises = require('fs/promises')
const globCallbacks = require('glob') const globCallbacks = require('glob')
const uuid = require('node-uuid') const uuid = require('node-uuid')
const path = require('path') const Path = require('path')
const { pipeline } = require('stream/promises') const { pipeline } = require('stream/promises')
const { promisify } = require('util') const { promisify } = require('util')
@ -12,8 +12,6 @@ const PersistorHelper = require('./PersistorHelper')
const glob = promisify(globCallbacks) const glob = promisify(globCallbacks)
const filterName = key => key.replace(/\//g, '_')
module.exports = class FSPersistor extends AbstractPersistor { module.exports = class FSPersistor extends AbstractPersistor {
constructor(settings) { constructor(settings) {
super() super()
@ -22,13 +20,14 @@ module.exports = class FSPersistor extends AbstractPersistor {
} }
async sendFile(location, target, source) { async sendFile(location, target, source) {
const filteredTarget = filterName(target) const fsPath = this._getFsPath(location, target)
// actually copy the file (instead of moving it) to maintain consistent behaviour // actually copy the file (instead of moving it) to maintain consistent behaviour
// between the different implementations // between the different implementations
try { try {
await this._ensureDirectoryExists(fsPath)
const sourceStream = fs.createReadStream(source) const sourceStream = fs.createReadStream(source)
const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) const targetStream = fs.createWriteStream(fsPath)
await pipeline(sourceStream, targetStream) await pipeline(sourceStream, targetStream)
} catch (err) { } catch (err) {
throw PersistorHelper.wrapError( throw PersistorHelper.wrapError(
@ -41,17 +40,18 @@ module.exports = class FSPersistor extends AbstractPersistor {
} }
async sendStream(location, target, sourceStream, opts = {}) { async sendStream(location, target, sourceStream, opts = {}) {
const fsPath = await this._writeStream(sourceStream) const tempFilePath = await this._writeStream(sourceStream)
let sourceMd5 = opts.sourceMd5 let sourceMd5 = opts.sourceMd5
if (!sourceMd5) { if (!sourceMd5) {
sourceMd5 = await FSPersistor._getFileMd5HashForPath(fsPath) sourceMd5 = await _getFileMd5HashForPath(tempFilePath)
} }
try { try {
await this.sendFile(location, target, fsPath) await this.sendFile(location, target, tempFilePath)
const destMd5 = await this.getObjectMd5Hash(location, target) const destMd5 = await this.getObjectMd5Hash(location, target)
if (sourceMd5 !== destMd5) { if (sourceMd5 !== destMd5) {
await this._deleteFile(`${location}/${filterName(target)}`) const fsPath = this._getFsPath(location, target)
await this._deleteFile(fsPath)
throw new WriteError('md5 hash mismatch', { throw new WriteError('md5 hash mismatch', {
sourceMd5, sourceMd5,
destMd5, destMd5,
@ -60,21 +60,21 @@ module.exports = class FSPersistor extends AbstractPersistor {
}) })
} }
} finally { } finally {
await this._deleteFile(fsPath) await this._deleteFile(tempFilePath)
} }
} }
// opts may be {start: Number, end: Number} // opts may be {start: Number, end: Number}
async getObjectStream(location, name, opts) { async getObjectStream(location, name, opts = {}) {
const filteredName = filterName(name) const fsPath = this._getFsPath(location, name)
try { try {
opts.fd = await fsPromises.open(`${location}/${filteredName}`, 'r') opts.fd = await fsPromises.open(fsPath, 'r')
} catch (err) { } catch (err) {
throw PersistorHelper.wrapError( throw PersistorHelper.wrapError(
err, err,
'failed to open file for streaming', 'failed to open file for streaming',
{ location, filteredName, opts }, { location, name, fsPath, opts },
ReadError ReadError
) )
} }
@ -88,10 +88,10 @@ module.exports = class FSPersistor extends AbstractPersistor {
} }
async getObjectSize(location, filename) { async getObjectSize(location, filename) {
const fullPath = path.join(location, filterName(filename)) const fsPath = this._getFsPath(location, filename)
try { try {
const stat = await fsPromises.stat(fullPath) const stat = await fsPromises.stat(fsPath)
return stat.size return stat.size
} catch (err) { } catch (err) {
throw PersistorHelper.wrapError( throw PersistorHelper.wrapError(
@ -104,9 +104,9 @@ module.exports = class FSPersistor extends AbstractPersistor {
} }
async getObjectMd5Hash(location, filename) { async getObjectMd5Hash(location, filename) {
const fullPath = path.join(location, filterName(filename)) const fsPath = this._getFsPath(location, filename)
try { try {
return await FSPersistor._getFileMd5HashForPath(fullPath) return await _getFileMd5HashForPath(fsPath)
} catch (err) { } catch (err) {
throw new ReadError( throw new ReadError(
'unable to get md5 hash from file', 'unable to get md5 hash from file',
@ -116,35 +116,34 @@ module.exports = class FSPersistor extends AbstractPersistor {
} }
} }
async copyObject(location, fromName, toName) { async copyObject(location, source, target) {
const filteredFromName = filterName(fromName) const sourceFsPath = this._getFsPath(location, source)
const filteredToName = filterName(toName) const targetFsPath = this._getFsPath(location, target)
try { try {
const sourceStream = fs.createReadStream( await this._ensureDirectoryExists(targetFsPath)
`${location}/${filteredFromName}` const sourceStream = fs.createReadStream(sourceFsPath)
) const targetStream = fs.createWriteStream(targetFsPath)
const targetStream = fs.createWriteStream(`${location}/${filteredToName}`)
await pipeline(sourceStream, targetStream) await pipeline(sourceStream, targetStream)
} catch (err) { } catch (err) {
throw PersistorHelper.wrapError( throw PersistorHelper.wrapError(
err, err,
'failed to copy file', 'failed to copy file',
{ location, filteredFromName, filteredToName }, { location, source, target, sourceFsPath, targetFsPath },
WriteError WriteError
) )
} }
} }
async deleteObject(location, name) { async deleteObject(location, name) {
const filteredName = filterName(name) const fsPath = this._getFsPath(location, name)
try { try {
await fsPromises.unlink(`${location}/${filteredName}`) await fsPromises.unlink(fsPath)
} catch (err) { } catch (err) {
const wrappedError = PersistorHelper.wrapError( const wrappedError = PersistorHelper.wrapError(
err, err,
'failed to delete file', 'failed to delete file',
{ location, filteredName }, { location, name, fsPath },
WriteError WriteError
) )
if (!(wrappedError instanceof NotFoundError)) { if (!(wrappedError instanceof NotFoundError)) {
@ -156,28 +155,31 @@ module.exports = class FSPersistor extends AbstractPersistor {
} }
async deleteDirectory(location, name) { async deleteDirectory(location, name) {
const filteredName = filterName(name.replace(/\/$/, '')) const fsPath = this._getFsPath(location, name)
try { try {
await Promise.all( if (this.settings.useSubdirectories) {
( await fsPromises.rm(fsPath, { recursive: true, force: true })
await glob(`${location}/${filteredName}_*`) } else {
).map(file => fsPromises.unlink(file)) const files = await this._listDirectory(fsPath)
) for (const file of files) {
await fsPromises.unlink(file)
}
}
} catch (err) { } catch (err) {
throw PersistorHelper.wrapError( throw PersistorHelper.wrapError(
err, err,
'failed to delete directory', 'failed to delete directory',
{ location, filteredName }, { location, name, fsPath },
WriteError WriteError
) )
} }
} }
async checkIfObjectExists(location, name) { async checkIfObjectExists(location, name) {
const filteredName = filterName(name) const fsPath = this._getFsPath(location, name)
try { try {
const stat = await fsPromises.stat(`${location}/${filteredName}`) const stat = await fsPromises.stat(fsPath)
return !!stat return !!stat
} catch (err) { } catch (err) {
if (err.code === 'ENOENT') { if (err.code === 'ENOENT') {
@ -186,7 +188,7 @@ module.exports = class FSPersistor extends AbstractPersistor {
throw PersistorHelper.wrapError( throw PersistorHelper.wrapError(
err, err,
'failed to stat file', 'failed to stat file',
{ location, filteredName }, { location, name, fsPath },
ReadError ReadError
) )
} }
@ -194,11 +196,11 @@ module.exports = class FSPersistor extends AbstractPersistor {
// note, does not recurse into subdirectories, as we use a flattened directory structure // note, does not recurse into subdirectories, as we use a flattened directory structure
async directorySize(location, name) { async directorySize(location, name) {
const filteredName = filterName(name.replace(/\/$/, '')) const fsPath = this._getFsPath(location, name)
let size = 0 let size = 0
try { try {
const files = await glob(`${location}/${filteredName}_*`) const files = await this._listDirectory(fsPath)
for (const file of files) { for (const file of files) {
try { try {
const stat = await fsPromises.stat(file) const stat = await fsPromises.stat(file)
@ -229,7 +231,7 @@ module.exports = class FSPersistor extends AbstractPersistor {
key = uuid.v1() key = uuid.v1()
} }
key = key.replace(/\//g, '-') key = key.replace(/\//g, '-')
return path.join(this.settings.paths.uploadFolder, key) return Path.join(this.settings.paths.uploadFolder, key)
} }
async _writeStream(stream, key) { async _writeStream(stream, key) {
@ -266,8 +268,28 @@ module.exports = class FSPersistor extends AbstractPersistor {
} }
} }
static async _getFileMd5HashForPath(fullPath) { _getFsPath(location, key) {
key = key.replace(/\/$/, '')
if (!this.settings.useSubdirectories) {
key = key.replace(/\//g, '_')
}
return Path.join(location, key)
}
async _listDirectory(path) {
if (this.settings.useSubdirectories) {
return await glob(Path.join(path, '**'))
} else {
return await glob(`${path}_*`)
}
}
async _ensureDirectoryExists(path) {
await fsPromises.mkdir(Path.dirname(path), { recursive: true })
}
}
async function _getFileMd5HashForPath(fullPath) {
const stream = fs.createReadStream(fullPath) const stream = fs.createReadStream(fullPath)
return PersistorHelper.calculateStreamMd5(stream) return PersistorHelper.calculateStreamMd5(stream)
} }
}

View file

@ -1,3 +1,9 @@
const SandboxedModule = require('sandboxed-module')
const chai = require('chai') const chai = require('chai')
chai.use(require('sinon-chai')) chai.use(require('sinon-chai'))
chai.use(require('chai-as-promised')) chai.use(require('chai-as-promised'))
SandboxedModule.configure({
globals: { Buffer, console, process, URL },
})

View file

@ -1,356 +1,310 @@
const sinon = require('sinon') const crypto = require('crypto')
const chai = require('chai') const { expect } = require('chai')
const { expect } = chai const mockFs = require('mock-fs')
const fs = require('fs')
const fsPromises = require('fs/promises')
const Path = require('path')
const StreamPromises = require('stream/promises')
const SandboxedModule = require('sandboxed-module') const SandboxedModule = require('sandboxed-module')
const Errors = require('../../src/Errors') const Errors = require('../../src/Errors')
const StreamModule = require('stream')
const MODULE_PATH = '../../src/FSPersistor.js' const MODULE_PATH = '../../src/FSPersistor.js'
describe('FSPersistorTests', function () { describe('FSPersistorTests', function () {
const stat = { size: 4, isFile: sinon.stub().returns(true) } const localFilePath = '/uploads/info.txt'
const fd = 1234 const localFileContents = Buffer.from('This information is critical', {
const writeStream = 'writeStream' encoding: 'utf-8',
const remoteStream = 'remoteStream' })
const location = '/foo' const uploadFolder = '/tmp'
const error = new Error('guru meditation error') const location = '/bucket'
const md5 = 'ffffffff' const files = {
wombat: 'animals/wombat.tex',
giraffe: 'animals/giraffe.tex',
potato: 'vegetables/potato.tex',
}
const files = ['animals/wombat.tex', 'vegetables/potato.tex'] const scenarios = [
const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] {
const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] description: 'default settings',
let fs, settings: { paths: { uploadFolder } },
fsPromises, fsPath: key => Path.join(location, key.replaceAll('/', '_')),
Stream, },
StreamPromises, {
FSPersistor, description: 'with useSubdirectories = true',
glob, settings: { paths: { uploadFolder }, useSubdirectories: true },
readStream, fsPath: key => Path.join(location, key),
crypto, },
Hash, ]
uuid,
tempFile for (const scenario of scenarios) {
describe(scenario.description, function () {
let persistor
beforeEach(function () { beforeEach(function () {
const randomNumber = Math.random().toString() const FSPersistor = SandboxedModule.require(MODULE_PATH, {
readStream = {
name: 'readStream',
on: sinon.stub().yields(),
pipe: sinon.stub(),
}
uuid = {
v1: () => randomNumber,
}
tempFile = `/tmp/${randomNumber}`
fs = {
createReadStream: sinon.stub().returns(readStream),
createWriteStream: sinon.stub().returns(writeStream),
}
fsPromises = {
unlink: sinon.stub().resolves(),
open: sinon.stub().resolves(fd),
stat: sinon.stub().resolves(stat),
}
glob = sinon.stub().yields(null, globs)
Stream = {
Transform: StreamModule.Transform,
}
StreamPromises = {
pipeline: sinon.stub().resolves(),
}
Hash = {
end: sinon.stub(),
read: sinon.stub().returns(md5),
digest: sinon.stub().returns(md5),
setEncoding: sinon.stub(),
}
crypto = {
createHash: sinon.stub().returns(Hash),
}
FSPersistor = new (SandboxedModule.require(MODULE_PATH, {
requires: { requires: {
'./Errors': Errors,
fs,
'fs/promises': fsPromises, 'fs/promises': fsPromises,
glob,
stream: Stream,
'stream/promises': StreamPromises, 'stream/promises': StreamPromises,
crypto, './Errors': Errors,
'node-uuid': uuid,
// imported by PersistorHelper but otherwise unused here
'@overleaf/logger': {},
}, },
globals: { console }, })
}))({ paths: { uploadFolder: '/tmp' } }) persistor = new FSPersistor(scenario.settings)
})
beforeEach(function () {
mockFs({
[localFilePath]: localFileContents,
[location]: {},
'/not-a-dir':
'This regular file is meant to prevent using this path as a directory',
'/directory/subdirectory': {},
})
})
afterEach(function () {
mockFs.restore()
}) })
describe('sendFile', function () { describe('sendFile', function () {
const localFilesystemPath = '/path/to/local/file'
it('should copy the file', async function () { it('should copy the file', async function () {
await FSPersistor.sendFile(location, files[0], localFilesystemPath) await persistor.sendFile(location, files.wombat, localFilePath)
expect(fs.createReadStream).to.have.been.calledWith(localFilesystemPath) const contents = await fsPromises.readFile(
expect(fs.createWriteStream).to.have.been.calledWith( scenario.fsPath(files.wombat)
`${location}/${filteredFilenames[0]}`
)
expect(StreamPromises.pipeline).to.have.been.calledWith(
readStream,
writeStream
) )
expect(contents.equals(localFileContents)).to.be.true
}) })
it('should return an error if the file cannot be stored', async function () { it('should return an error if the file cannot be stored', async function () {
StreamPromises.pipeline.rejects(error)
await expect( await expect(
FSPersistor.sendFile(location, files[0], localFilesystemPath) persistor.sendFile('/not-a-dir', files.wombat, localFilePath)
).to.eventually.be.rejected.and.have.property('cause', error) ).to.be.rejectedWith(Errors.WriteError)
}) })
}) })
describe('sendStream', function () { describe('sendStream', function () {
let stream
beforeEach(function () {
stream = fs.createReadStream(localFilePath)
})
it('should write the stream to disk', async function () { it('should write the stream to disk', async function () {
await FSPersistor.sendStream(location, files[0], remoteStream) await persistor.sendStream(location, files.wombat, stream)
expect(StreamPromises.pipeline).to.have.been.calledWith( const contents = await fsPromises.readFile(
remoteStream, scenario.fsPath(files.wombat)
writeStream
) )
expect(contents.equals(localFileContents)).to.be.true
}) })
it('should delete the temporary file', async function () { it('should delete the temporary file', async function () {
await FSPersistor.sendStream(location, files[0], remoteStream) await persistor.sendStream(location, files.wombat, stream)
expect(fsPromises.unlink).to.have.been.calledWith(tempFile) const tempFiles = await fsPromises.readdir(uploadFolder)
expect(tempFiles).to.be.empty
}) })
it('should wrap the error from the filesystem', async function () { it('should wrap the error from the filesystem', async function () {
StreamPromises.pipeline.rejects(error) await expect(
await expect(FSPersistor.sendStream(location, files[0], remoteStream)) persistor.sendStream('/not-a-dir', files.wombat, stream)
.to.eventually.be.rejected.and.be.instanceOf(Errors.WriteError) ).to.be.rejectedWith(Errors.WriteError)
.and.have.property('cause', error)
}) })
it('should send the temporary file to the filestore', async function () { describe('when the md5 hash matches', function () {
await FSPersistor.sendStream(location, files[0], remoteStream) it('should write the stream to disk', async function () {
expect(fs.createReadStream).to.have.been.calledWith(tempFile) await persistor.sendStream(location, files.wombat, stream, {
sourceMd5: md5(localFileContents),
})
const contents = await fsPromises.readFile(
scenario.fsPath(files.wombat)
)
expect(contents.equals(localFileContents)).to.be.true
})
}) })
describe('when the md5 hash does not match', function () { describe('when the md5 hash does not match', function () {
it('should return a write error', async function () { let promise
await expect(
FSPersistor.sendStream(location, files[0], remoteStream, { beforeEach(function () {
sourceMd5: '00000000', promise = persistor.sendStream(location, files.wombat, stream, {
sourceMd5: md5('wrong content'),
}) })
})
it('should return a write error', async function () {
await expect(promise).to.be.rejectedWith(
Errors.WriteError,
'md5 hash mismatch'
) )
.to.eventually.be.rejected.and.be.an.instanceOf(Errors.WriteError)
.and.have.property('message', 'md5 hash mismatch')
}) })
it('deletes the copied file', async function () { it('deletes the copied file', async function () {
try { await expect(promise).to.be.rejected
await FSPersistor.sendStream(location, files[0], remoteStream, { await expect(
sourceMd5: '00000000', fsPromises.access(scenario.fsPath(files.wombat))
}) ).to.be.rejected
} catch (_) {}
expect(fsPromises.unlink).to.have.been.calledWith(
`${location}/${filteredFilenames[0]}`
)
}) })
}) })
}) })
describe('getObjectStream', function () { describe('getObjectStream', function () {
it('should use correct file location', async function () { beforeEach(async function () {
await FSPersistor.getObjectStream(location, files[0], {}) await persistor.sendFile(location, files.wombat, localFilePath)
expect(fsPromises.open).to.have.been.calledWith(
`${location}/${filteredFilenames[0]}`
)
}) })
it('should pass the options to createReadStream', async function () { it('should return a string with the object contents', async function () {
await FSPersistor.getObjectStream(location, files[0], { const stream = await persistor.getObjectStream(location, files.wombat)
start: 0, const contents = await streamToBuffer(stream)
end: 8, expect(contents.equals(localFileContents)).to.be.true
})
expect(fs.createReadStream).to.have.been.calledWith(null, {
start: 0,
end: 8,
fd,
}) })
it('should support ranges', async function () {
const stream = await persistor.getObjectStream(
location,
files.wombat,
{
start: 5,
end: 16,
}
)
const contents = await streamToBuffer(stream)
// end is inclusive in ranges, but exclusive in slice()
expect(contents.equals(localFileContents.slice(5, 17))).to.be.true
}) })
it('should give a NotFoundError if the file does not exist', async function () { it('should give a NotFoundError if the file does not exist', async function () {
const err = new Error() await expect(
err.code = 'ENOENT' persistor.getObjectStream(location, 'does-not-exist')
fsPromises.open.rejects(err) ).to.be.rejectedWith(Errors.NotFoundError)
await expect(FSPersistor.getObjectStream(location, files[0], {}))
.to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError)
.and.have.property('cause', err)
})
it('should wrap any other error', async function () {
fsPromises.open.rejects(error)
await expect(FSPersistor.getObjectStream(location, files[0], {}))
.to.eventually.be.rejectedWith('failed to open file for streaming')
.and.be.an.instanceOf(Errors.ReadError)
.and.have.property('cause', error)
}) })
}) })
describe('getObjectSize', function () { describe('getObjectSize', function () {
const badFilename = 'neenaw.tex' beforeEach(async function () {
const size = 65536 await persistor.sendFile(location, files.wombat, localFilePath)
const noentError = new Error('not found')
noentError.code = 'ENOENT'
beforeEach(function () {
fsPromises.stat
.rejects(error)
.withArgs(`${location}/${filteredFilenames[0]}`)
.resolves({ size })
.withArgs(`${location}/${badFilename}`)
.rejects(noentError)
}) })
it('should return the file size', async function () { it('should return the file size', async function () {
expect(await FSPersistor.getObjectSize(location, files[0])).to.equal(size) expect(
await persistor.getObjectSize(location, files.wombat)
).to.equal(localFileContents.length)
}) })
it('should throw a NotFoundError if the file does not exist', async function () { it('should throw a NotFoundError if the file does not exist', async function () {
await expect( await expect(
FSPersistor.getObjectSize(location, badFilename) persistor.getObjectSize(location, 'does-not-exist')
).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) ).to.be.rejectedWith(Errors.NotFoundError)
})
it('should wrap any other error', async function () {
await expect(FSPersistor.getObjectSize(location, 'raccoon'))
.to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError)
.and.have.property('cause', error)
}) })
}) })
describe('copyObject', function () { describe('copyObject', function () {
it('Should open the source for reading', async function () { beforeEach(async function () {
await FSPersistor.copyObject(location, files[0], files[1]) await persistor.sendFile(location, files.wombat, localFilePath)
expect(fs.createReadStream).to.have.been.calledWith(
`${location}/${filteredFilenames[0]}`
)
}) })
it('Should open the target for writing', async function () { it('Should copy the file to the new location', async function () {
await FSPersistor.copyObject(location, files[0], files[1]) await persistor.copyObject(location, files.wombat, files.potato)
expect(fs.createWriteStream).to.have.been.calledWith( const contents = await fsPromises.readFile(
`${location}/${filteredFilenames[1]}` scenario.fsPath(files.potato)
)
})
it('Should pipe the source to the target', async function () {
await FSPersistor.copyObject(location, files[0], files[1])
expect(StreamPromises.pipeline).to.have.been.calledWith(
readStream,
writeStream
) )
expect(contents.equals(localFileContents)).to.be.true
}) })
}) })
describe('deleteObject', function () { describe('deleteObject', function () {
it('Should call unlink with correct options', async function () { beforeEach(async function () {
await FSPersistor.deleteObject(location, files[0]) await persistor.sendFile(location, files.wombat, localFilePath)
expect(fsPromises.unlink).to.have.been.calledWith( await fsPromises.access(scenario.fsPath(files.wombat))
`${location}/${filteredFilenames[0]}`
)
}) })
it('Should propagate the error', async function () { it('should delete the file', async function () {
fsPromises.unlink.rejects(error) await persistor.deleteObject(location, files.wombat)
await expect( await expect(
FSPersistor.deleteObject(location, files[0]) fsPromises.access(scenario.fsPath(files.wombat))
).to.eventually.be.rejected.and.have.property('cause', error) ).to.be.rejected
})
it("should ignore files that don't exist", async function () {
await persistor.deleteObject(location, 'does-not-exist')
}) })
}) })
describe('deleteDirectory', function () { describe('deleteDirectory', function () {
it('Should call glob with correct options', async function () { beforeEach(async function () {
await FSPersistor.deleteDirectory(location, files[0]) for (const file of Object.values(files)) {
expect(glob).to.have.been.calledWith( await persistor.sendFile(location, file, localFilePath)
`${location}/${filteredFilenames[0]}_*` await fsPromises.access(scenario.fsPath(file))
)
})
it('Should call unlink on the returned files', async function () {
await FSPersistor.deleteDirectory(location, files[0])
for (const filename of globs) {
expect(fsPromises.unlink).to.have.been.calledWith(filename)
} }
}) })
it('Should propagate the error', async function () { it('should delete all files under the directory', async function () {
glob.yields(error) await persistor.deleteDirectory(location, 'animals')
await expect( for (const file of [files.wombat, files.giraffe]) {
FSPersistor.deleteDirectory(location, files[0]) await expect(fsPromises.access(scenario.fsPath(file))).to.be
).to.eventually.be.rejected.and.have.property('cause', error) .rejected
}
})
it('should not delete files under other directoris', async function () {
await persistor.deleteDirectory(location, 'animals')
await fsPromises.access(scenario.fsPath(files.potato))
})
it("should ignore directories that don't exist", async function () {
await persistor.deleteDirectory(location, 'does-not-exist')
for (const file of Object.values(files)) {
await fsPromises.access(scenario.fsPath(file))
}
}) })
}) })
describe('checkIfObjectExists', function () { describe('checkIfObjectExists', function () {
const badFilename = 'pototo' beforeEach(async function () {
const noentError = new Error('not found') await persistor.sendFile(location, files.wombat, localFilePath)
noentError.code = 'ENOENT'
beforeEach(function () {
fsPromises.stat
.rejects(error)
.withArgs(`${location}/${filteredFilenames[0]}`)
.resolves({})
.withArgs(`${location}/${badFilename}`)
.rejects(noentError)
}) })
it('Should call stat with correct options', async function () { it('should return true for existing files', async function () {
await FSPersistor.checkIfObjectExists(location, files[0])
expect(fsPromises.stat).to.have.been.calledWith(
`${location}/${filteredFilenames[0]}`
)
})
it('Should return true for existing files', async function () {
expect( expect(
await FSPersistor.checkIfObjectExists(location, files[0]) await persistor.checkIfObjectExists(location, files.wombat)
).to.equal(true) ).to.equal(true)
}) })
it('Should return false for non-existing files', async function () { it('should return false for non-existing files', async function () {
expect( expect(
await FSPersistor.checkIfObjectExists(location, badFilename) await persistor.checkIfObjectExists(location, 'does-not-exist')
).to.equal(false) ).to.equal(false)
}) })
it('should wrap the error if there is a problem', async function () {
await expect(FSPersistor.checkIfObjectExists(location, 'llama'))
.to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError)
.and.have.property('cause', error)
})
}) })
describe('directorySize', function () { describe('directorySize', function () {
it('should wrap the error', async function () { beforeEach(async function () {
glob.yields(error) for (const file of Object.values(files)) {
await expect(FSPersistor.directorySize(location, files[0])) await persistor.sendFile(location, file, localFilePath)
.to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) }
.and.include({ cause: error })
.and.have.property('info')
.which.includes({ location, name: files[0] })
})
it('should filter the directory name', async function () {
await FSPersistor.directorySize(location, files[0])
expect(glob).to.have.been.calledWith(
`${location}/${filteredFilenames[0]}_*`
)
}) })
it('should sum directory files size', async function () { it('should sum directory files size', async function () {
expect(await FSPersistor.directorySize(location, files[0])).to.equal( expect(await persistor.directorySize(location, 'animals')).to.equal(
stat.size * files.length 2 * localFileContents.length
) )
}) })
it('should return 0 on non-existing directories', async function () {
expect(
await persistor.directorySize(location, 'does-not-exist')
).to.equal(0)
}) })
}) })
})
}
})
function md5(str) {
return crypto.createHash('md5').update(str).digest('hex')
}
async function streamToBuffer(stream) {
const chunks = []
for await (const chunk of stream) {
chunks.push(chunk)
}
return Buffer.concat(chunks)
}

14
package-lock.json generated
View file

@ -330,6 +330,7 @@
"chai": "^4.3.6", "chai": "^4.3.6",
"chai-as-promised": "^7.1.1", "chai-as-promised": "^7.1.1",
"mocha": "^8.4.0", "mocha": "^8.4.0",
"mock-fs": "^5.2.0",
"mongodb": "^3.5.9", "mongodb": "^3.5.9",
"sandboxed-module": "^2.0.4", "sandboxed-module": "^2.0.4",
"sinon": "^9.2.4", "sinon": "^9.2.4",
@ -25199,9 +25200,9 @@
} }
}, },
"node_modules/mock-fs": { "node_modules/mock-fs": {
"version": "5.1.2", "version": "5.2.0",
"resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.1.2.tgz", "resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.2.0.tgz",
"integrity": "sha512-YkjQkdLulFrz0vD4BfNQdQRVmgycXTV7ykuHMlyv+C8WCHazpkiQRDthwa02kSyo8wKnY9wRptHfQLgmf0eR+A==", "integrity": "sha512-2dF2R6YMSZbpip1V1WHKGLNjr/k48uQClqMVb5H3MOvwc9qhYis3/IWbj02qIg/Y8MDXKFF4c5v0rxx2o6xTZw==",
"dev": true, "dev": true,
"engines": { "engines": {
"node": ">=12.0.0" "node": ">=12.0.0"
@ -47377,6 +47378,7 @@
"fast-crc32c": "https://github.com/overleaf/node-fast-crc32c/archive/aae6b2a4c7a7a159395df9cc6c38dfde702d6f51.tar.gz", "fast-crc32c": "https://github.com/overleaf/node-fast-crc32c/archive/aae6b2a4c7a7a159395df9cc6c38dfde702d6f51.tar.gz",
"glob": "^7.1.6", "glob": "^7.1.6",
"mocha": "^8.4.0", "mocha": "^8.4.0",
"mock-fs": "*",
"mongodb": "^3.5.9", "mongodb": "^3.5.9",
"node-uuid": "^1.4.8", "node-uuid": "^1.4.8",
"range-parser": "^1.2.1", "range-parser": "^1.2.1",
@ -64347,9 +64349,9 @@
} }
}, },
"mock-fs": { "mock-fs": {
"version": "5.1.2", "version": "5.2.0",
"resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.1.2.tgz", "resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.2.0.tgz",
"integrity": "sha512-YkjQkdLulFrz0vD4BfNQdQRVmgycXTV7ykuHMlyv+C8WCHazpkiQRDthwa02kSyo8wKnY9wRptHfQLgmf0eR+A==", "integrity": "sha512-2dF2R6YMSZbpip1V1WHKGLNjr/k48uQClqMVb5H3MOvwc9qhYis3/IWbj02qIg/Y8MDXKFF4c5v0rxx2o6xTZw==",
"dev": true "dev": true
}, },
"module-details-from-path": { "module-details-from-path": {