mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #86 from overleaf/spd-gcs-persistor
Add GCS Persistor
This commit is contained in:
commit
9a03a3406e
26 changed files with 1864 additions and 351 deletions
|
@ -61,6 +61,11 @@ app.delete(
|
||||||
keyBuilder.userFileKeyMiddleware,
|
keyBuilder.userFileKeyMiddleware,
|
||||||
fileController.deleteFile
|
fileController.deleteFile
|
||||||
)
|
)
|
||||||
|
app.delete(
|
||||||
|
'/project/:project_id',
|
||||||
|
keyBuilder.userProjectKeyMiddleware,
|
||||||
|
fileController.deleteProject
|
||||||
|
)
|
||||||
|
|
||||||
app.head(
|
app.head(
|
||||||
'/template/:template_id/v/:version/:format',
|
'/template/:template_id/v/:version/:format',
|
||||||
|
|
|
@ -25,6 +25,7 @@ class ConversionsDisabledError extends BackwardCompatibleError {}
|
||||||
class ConversionError extends BackwardCompatibleError {}
|
class ConversionError extends BackwardCompatibleError {}
|
||||||
class SettingsError extends BackwardCompatibleError {}
|
class SettingsError extends BackwardCompatibleError {}
|
||||||
class TimeoutError extends BackwardCompatibleError {}
|
class TimeoutError extends BackwardCompatibleError {}
|
||||||
|
class InvalidParametersError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
class FailedCommandError extends OError {
|
class FailedCommandError extends OError {
|
||||||
constructor(command, code, stdout, stderr) {
|
constructor(command, code, stdout, stderr) {
|
||||||
|
@ -50,5 +51,6 @@ module.exports = {
|
||||||
ConversionError,
|
ConversionError,
|
||||||
HealthCheckError,
|
HealthCheckError,
|
||||||
SettingsError,
|
SettingsError,
|
||||||
TimeoutError
|
TimeoutError,
|
||||||
|
InvalidParametersError
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const glob = require('glob')
|
const glob = require('glob')
|
||||||
const path = require('path')
|
const path = require('path')
|
||||||
const rimraf = require('rimraf')
|
|
||||||
const Stream = require('stream')
|
const Stream = require('stream')
|
||||||
const { promisify, callbackify } = require('util')
|
const { promisify, callbackify } = require('util')
|
||||||
|
|
||||||
|
@ -14,7 +13,6 @@ const fsUnlink = promisify(fs.unlink)
|
||||||
const fsOpen = promisify(fs.open)
|
const fsOpen = promisify(fs.open)
|
||||||
const fsStat = promisify(fs.stat)
|
const fsStat = promisify(fs.stat)
|
||||||
const fsGlob = promisify(glob)
|
const fsGlob = promisify(glob)
|
||||||
const rmrf = promisify(rimraf)
|
|
||||||
|
|
||||||
const filterName = key => key.replace(/\//g, '_')
|
const filterName = key => key.replace(/\//g, '_')
|
||||||
|
|
||||||
|
@ -146,7 +144,9 @@ async function deleteDirectory(location, name) {
|
||||||
const filteredName = filterName(name.replace(/\/$/, ''))
|
const filteredName = filterName(name.replace(/\/$/, ''))
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await rmrf(`${location}/${filteredName}`)
|
await Promise.all(
|
||||||
|
(await fsGlob(`${location}/${filteredName}*`)).map(file => fsUnlink(file))
|
||||||
|
)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw PersistorHelper.wrapError(
|
throw PersistorHelper.wrapError(
|
||||||
err,
|
err,
|
||||||
|
|
|
@ -13,6 +13,7 @@ module.exports = {
|
||||||
insertFile,
|
insertFile,
|
||||||
copyFile,
|
copyFile,
|
||||||
deleteFile,
|
deleteFile,
|
||||||
|
deleteProject,
|
||||||
directorySize
|
directorySize
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,6 +159,25 @@ function deleteFile(req, res, next) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function deleteProject(req, res, next) {
|
||||||
|
metrics.inc('deleteProject')
|
||||||
|
const { key, bucket } = req
|
||||||
|
|
||||||
|
req.requestLogger.setMessage('deleting project')
|
||||||
|
req.requestLogger.addFields({ key, bucket })
|
||||||
|
|
||||||
|
FileHandler.deleteProject(bucket, key, function(err) {
|
||||||
|
if (err) {
|
||||||
|
if (err instanceof Errors.InvalidParametersError) {
|
||||||
|
return res.sendStatus(400)
|
||||||
|
}
|
||||||
|
next(err)
|
||||||
|
} else {
|
||||||
|
res.sendStatus(204)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
function directorySize(req, res, next) {
|
function directorySize(req, res, next) {
|
||||||
metrics.inc('projectSize')
|
metrics.inc('projectSize')
|
||||||
const { project_id: projectId, bucket } = req
|
const { project_id: projectId, bucket } = req
|
||||||
|
|
|
@ -5,11 +5,12 @@ const LocalFileWriter = require('./LocalFileWriter')
|
||||||
const FileConverter = require('./FileConverter')
|
const FileConverter = require('./FileConverter')
|
||||||
const KeyBuilder = require('./KeyBuilder')
|
const KeyBuilder = require('./KeyBuilder')
|
||||||
const ImageOptimiser = require('./ImageOptimiser')
|
const ImageOptimiser = require('./ImageOptimiser')
|
||||||
const { ConversionError } = require('./Errors')
|
const { ConversionError, InvalidParametersError } = require('./Errors')
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
insertFile: callbackify(insertFile),
|
insertFile: callbackify(insertFile),
|
||||||
deleteFile: callbackify(deleteFile),
|
deleteFile: callbackify(deleteFile),
|
||||||
|
deleteProject: callbackify(deleteProject),
|
||||||
getFile: callbackify(getFile),
|
getFile: callbackify(getFile),
|
||||||
getFileSize: callbackify(getFileSize),
|
getFileSize: callbackify(getFileSize),
|
||||||
getDirectorySize: callbackify(getDirectorySize),
|
getDirectorySize: callbackify(getDirectorySize),
|
||||||
|
@ -17,6 +18,7 @@ module.exports = {
|
||||||
getFile,
|
getFile,
|
||||||
insertFile,
|
insertFile,
|
||||||
deleteFile,
|
deleteFile,
|
||||||
|
deleteProject,
|
||||||
getFileSize,
|
getFileSize,
|
||||||
getDirectorySize
|
getDirectorySize
|
||||||
}
|
}
|
||||||
|
@ -24,18 +26,40 @@ module.exports = {
|
||||||
|
|
||||||
async function insertFile(bucket, key, stream) {
|
async function insertFile(bucket, key, stream) {
|
||||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
||||||
|
if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) {
|
||||||
|
throw new InvalidParametersError({
|
||||||
|
message: 'key does not match validation regex',
|
||||||
|
info: { bucket, key, convertedKey }
|
||||||
|
})
|
||||||
|
}
|
||||||
await PersistorManager.promises.deleteDirectory(bucket, convertedKey)
|
await PersistorManager.promises.deleteDirectory(bucket, convertedKey)
|
||||||
await PersistorManager.promises.sendStream(bucket, key, stream)
|
await PersistorManager.promises.sendStream(bucket, key, stream)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deleteFile(bucket, key) {
|
async function deleteFile(bucket, key) {
|
||||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
||||||
|
if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) {
|
||||||
|
throw new InvalidParametersError({
|
||||||
|
message: 'key does not match validation regex',
|
||||||
|
info: { bucket, key, convertedKey }
|
||||||
|
})
|
||||||
|
}
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
PersistorManager.promises.deleteFile(bucket, key),
|
PersistorManager.promises.deleteFile(bucket, key),
|
||||||
PersistorManager.promises.deleteDirectory(bucket, convertedKey)
|
PersistorManager.promises.deleteDirectory(bucket, convertedKey)
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function deleteProject(bucket, key) {
|
||||||
|
if (!key.match(/^[0-9a-f]{24}\//i)) {
|
||||||
|
throw new InvalidParametersError({
|
||||||
|
message: 'key does not match validation regex',
|
||||||
|
info: { bucket, key }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
await PersistorManager.promises.deleteDirectory(bucket, key)
|
||||||
|
}
|
||||||
|
|
||||||
async function getFile(bucket, key, opts) {
|
async function getFile(bucket, key, opts) {
|
||||||
opts = opts || {}
|
opts = opts || {}
|
||||||
if (!opts.format && !opts.style) {
|
if (!opts.format && !opts.style) {
|
||||||
|
|
288
services/filestore/app/js/GcsPersistor.js
Normal file
288
services/filestore/app/js/GcsPersistor.js
Normal file
|
@ -0,0 +1,288 @@
|
||||||
|
const settings = require('settings-sharelatex')
|
||||||
|
const fs = require('fs')
|
||||||
|
const { promisify } = require('util')
|
||||||
|
const Stream = require('stream')
|
||||||
|
const { Storage } = require('@google-cloud/storage')
|
||||||
|
const { callbackify } = require('util')
|
||||||
|
const { WriteError, ReadError, NotFoundError } = require('./Errors')
|
||||||
|
const asyncPool = require('tiny-async-pool')
|
||||||
|
const PersistorHelper = require('./PersistorHelper')
|
||||||
|
|
||||||
|
const pipeline = promisify(Stream.pipeline)
|
||||||
|
|
||||||
|
// endpoint settings will be null by default except for tests
|
||||||
|
// that's OK - GCS uses the locally-configured service account by default
|
||||||
|
const storage = new Storage(settings.filestore.gcs.endpoint)
|
||||||
|
// workaround for broken uploads with custom endpoints:
|
||||||
|
// https://github.com/googleapis/nodejs-storage/issues/898
|
||||||
|
if (
|
||||||
|
settings.filestore.gcs.endpoint &&
|
||||||
|
settings.filestore.gcs.endpoint.apiEndpoint
|
||||||
|
) {
|
||||||
|
storage.interceptors.push({
|
||||||
|
request: function(reqOpts) {
|
||||||
|
const url = new URL(reqOpts.uri)
|
||||||
|
url.host = settings.filestore.gcs.endpoint.apiEndpoint
|
||||||
|
if (settings.filestore.gcs.endpoint.apiScheme) {
|
||||||
|
url.protocol = settings.filestore.gcs.endpoint.apiScheme
|
||||||
|
}
|
||||||
|
reqOpts.uri = url.toString()
|
||||||
|
return reqOpts
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const GcsPersistor = {
|
||||||
|
sendFile: callbackify(sendFile),
|
||||||
|
sendStream: callbackify(sendStream),
|
||||||
|
getFileStream: callbackify(getFileStream),
|
||||||
|
getFileMd5Hash: callbackify(getFileMd5Hash),
|
||||||
|
deleteDirectory: callbackify(deleteDirectory),
|
||||||
|
getFileSize: callbackify(getFileSize),
|
||||||
|
deleteFile: callbackify(deleteFile),
|
||||||
|
copyFile: callbackify(copyFile),
|
||||||
|
checkIfFileExists: callbackify(checkIfFileExists),
|
||||||
|
directorySize: callbackify(directorySize),
|
||||||
|
promises: {
|
||||||
|
sendFile,
|
||||||
|
sendStream,
|
||||||
|
getFileStream,
|
||||||
|
getFileMd5Hash,
|
||||||
|
deleteDirectory,
|
||||||
|
getFileSize,
|
||||||
|
deleteFile,
|
||||||
|
copyFile,
|
||||||
|
checkIfFileExists,
|
||||||
|
directorySize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = GcsPersistor
|
||||||
|
|
||||||
|
async function sendFile(bucketName, key, fsPath) {
|
||||||
|
return sendStream(bucketName, key, fs.createReadStream(fsPath))
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sendStream(bucketName, key, readStream, sourceMd5) {
|
||||||
|
try {
|
||||||
|
let hashPromise
|
||||||
|
|
||||||
|
// if there is no supplied md5 hash, we calculate the hash as the data passes through
|
||||||
|
if (!sourceMd5) {
|
||||||
|
hashPromise = PersistorHelper.calculateStreamMd5(readStream)
|
||||||
|
}
|
||||||
|
|
||||||
|
const meteredStream = PersistorHelper.getMeteredStream(
|
||||||
|
readStream,
|
||||||
|
'gcs.egress' // egress from us to gcs
|
||||||
|
)
|
||||||
|
|
||||||
|
const writeOptions = {
|
||||||
|
// disabling of resumable uploads is recommended by Google:
|
||||||
|
resumable: false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sourceMd5) {
|
||||||
|
writeOptions.validation = 'md5'
|
||||||
|
writeOptions.metadata = {
|
||||||
|
md5Hash: PersistorHelper.hexToBase64(sourceMd5)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadStream = storage
|
||||||
|
.bucket(bucketName)
|
||||||
|
.file(key)
|
||||||
|
.createWriteStream(writeOptions)
|
||||||
|
|
||||||
|
await pipeline(meteredStream, uploadStream)
|
||||||
|
|
||||||
|
// if we didn't have an md5 hash, we should compare our computed one with Google's
|
||||||
|
// as we couldn't tell GCS about it beforehand
|
||||||
|
if (hashPromise) {
|
||||||
|
sourceMd5 = await hashPromise
|
||||||
|
// throws on mismatch
|
||||||
|
await PersistorHelper.verifyMd5(GcsPersistor, bucketName, key, sourceMd5)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'upload to GCS failed',
|
||||||
|
{ bucketName, key },
|
||||||
|
WriteError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getFileStream(bucketName, key, _opts = {}) {
|
||||||
|
const opts = Object.assign({}, _opts)
|
||||||
|
if (opts.end) {
|
||||||
|
// S3 (and http range headers) treat 'end' as inclusive, so increase this by 1
|
||||||
|
opts.end++
|
||||||
|
}
|
||||||
|
const stream = storage
|
||||||
|
.bucket(bucketName)
|
||||||
|
.file(key)
|
||||||
|
.createReadStream(opts)
|
||||||
|
|
||||||
|
const meteredStream = PersistorHelper.getMeteredStream(
|
||||||
|
stream,
|
||||||
|
'gcs.ingress' // ingress to us from gcs
|
||||||
|
)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await PersistorHelper.waitForStreamReady(stream)
|
||||||
|
return meteredStream
|
||||||
|
} catch (err) {
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'error reading file from GCS',
|
||||||
|
{ bucketName, key, opts },
|
||||||
|
ReadError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getFileSize(bucketName, key) {
|
||||||
|
try {
|
||||||
|
const [metadata] = await storage
|
||||||
|
.bucket(bucketName)
|
||||||
|
.file(key)
|
||||||
|
.getMetadata()
|
||||||
|
return metadata.size
|
||||||
|
} catch (err) {
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'error getting size of GCS object',
|
||||||
|
{ bucketName, key },
|
||||||
|
ReadError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getFileMd5Hash(bucketName, key) {
|
||||||
|
try {
|
||||||
|
const [metadata] = await storage
|
||||||
|
.bucket(bucketName)
|
||||||
|
.file(key)
|
||||||
|
.getMetadata()
|
||||||
|
return PersistorHelper.base64ToHex(metadata.md5Hash)
|
||||||
|
} catch (err) {
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'error getting hash of GCS object',
|
||||||
|
{ bucketName, key },
|
||||||
|
ReadError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteFile(bucketName, key) {
|
||||||
|
try {
|
||||||
|
const file = storage.bucket(bucketName).file(key)
|
||||||
|
|
||||||
|
if (settings.filestore.gcs.deletedBucketSuffix) {
|
||||||
|
await file.copy(
|
||||||
|
storage
|
||||||
|
.bucket(`${bucketName}${settings.filestore.gcs.deletedBucketSuffix}`)
|
||||||
|
.file(`${key}-${new Date().toISOString()}`)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (settings.filestore.gcs.unlockBeforeDelete) {
|
||||||
|
await file.setMetadata({ eventBasedHold: false })
|
||||||
|
}
|
||||||
|
await file.delete()
|
||||||
|
} catch (err) {
|
||||||
|
const error = PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'error deleting GCS object',
|
||||||
|
{ bucketName, key },
|
||||||
|
WriteError
|
||||||
|
)
|
||||||
|
if (!(error instanceof NotFoundError)) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteDirectory(bucketName, key) {
|
||||||
|
try {
|
||||||
|
const [files] = await storage
|
||||||
|
.bucket(bucketName)
|
||||||
|
.getFiles({ directory: key })
|
||||||
|
|
||||||
|
await asyncPool(
|
||||||
|
settings.filestore.gcs.deleteConcurrency,
|
||||||
|
files,
|
||||||
|
async file => {
|
||||||
|
await deleteFile(bucketName, file.name)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
const error = PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'failed to delete directory in GCS',
|
||||||
|
{ bucketName, key },
|
||||||
|
WriteError
|
||||||
|
)
|
||||||
|
if (error instanceof NotFoundError) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function directorySize(bucketName, key) {
|
||||||
|
let files
|
||||||
|
|
||||||
|
try {
|
||||||
|
const [response] = await storage
|
||||||
|
.bucket(bucketName)
|
||||||
|
.getFiles({ directory: key })
|
||||||
|
files = response
|
||||||
|
} catch (err) {
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'failed to list objects in GCS',
|
||||||
|
{ bucketName, key },
|
||||||
|
ReadError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return files.reduce((acc, file) => Number(file.metadata.size) + acc, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function checkIfFileExists(bucketName, key) {
|
||||||
|
try {
|
||||||
|
const [response] = await storage
|
||||||
|
.bucket(bucketName)
|
||||||
|
.file(key)
|
||||||
|
.exists()
|
||||||
|
return response
|
||||||
|
} catch (err) {
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'error checking if file exists in GCS',
|
||||||
|
{ bucketName, key },
|
||||||
|
ReadError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function copyFile(bucketName, sourceKey, destKey) {
|
||||||
|
try {
|
||||||
|
const src = storage.bucket(bucketName).file(sourceKey)
|
||||||
|
const dest = storage.bucket(bucketName).file(destKey)
|
||||||
|
await src.copy(dest)
|
||||||
|
} catch (err) {
|
||||||
|
// fake-gcs-server has a bug that returns an invalid response when the file does not exist
|
||||||
|
if (err.message === 'Cannot parse response as JSON: not found\n') {
|
||||||
|
err.code = 404
|
||||||
|
}
|
||||||
|
throw PersistorHelper.wrapError(
|
||||||
|
err,
|
||||||
|
'failed to copy file in GCS',
|
||||||
|
{ bucketName, sourceKey, destKey },
|
||||||
|
WriteError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,6 +4,7 @@ module.exports = {
|
||||||
getConvertedFolderKey,
|
getConvertedFolderKey,
|
||||||
addCachingToKey,
|
addCachingToKey,
|
||||||
userFileKeyMiddleware,
|
userFileKeyMiddleware,
|
||||||
|
userProjectKeyMiddleware,
|
||||||
publicFileKeyMiddleware,
|
publicFileKeyMiddleware,
|
||||||
publicProjectKeyMiddleware,
|
publicProjectKeyMiddleware,
|
||||||
bucketFileKeyMiddleware,
|
bucketFileKeyMiddleware,
|
||||||
|
@ -37,6 +38,13 @@ function userFileKeyMiddleware(req, res, next) {
|
||||||
next()
|
next()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function userProjectKeyMiddleware(req, res, next) {
|
||||||
|
const { project_id: projectId } = req.params
|
||||||
|
req.key = `${projectId}/`
|
||||||
|
req.bucket = settings.filestore.stores.user_files
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
|
||||||
function publicFileKeyMiddleware(req, res, next) {
|
function publicFileKeyMiddleware(req, res, next) {
|
||||||
if (settings.filestore.stores.public_files == null) {
|
if (settings.filestore.stores.public_files == null) {
|
||||||
return res.status(501).send('public files not available')
|
return res.status(501).send('public files not available')
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
const crypto = require('crypto')
|
const crypto = require('crypto')
|
||||||
|
const metrics = require('metrics-sharelatex')
|
||||||
const meter = require('stream-meter')
|
const meter = require('stream-meter')
|
||||||
const Stream = require('stream')
|
const Stream = require('stream')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
|
@ -12,7 +13,9 @@ module.exports = {
|
||||||
verifyMd5,
|
verifyMd5,
|
||||||
getMeteredStream,
|
getMeteredStream,
|
||||||
waitForStreamReady,
|
waitForStreamReady,
|
||||||
wrapError
|
wrapError,
|
||||||
|
hexToBase64,
|
||||||
|
base64ToHex
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns a promise which resolves with the md5 hash of the stream
|
// returns a promise which resolves with the md5 hash of the stream
|
||||||
|
@ -52,16 +55,16 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) {
|
||||||
|
|
||||||
// returns the next stream in the pipeline, and calls the callback with the byte count
|
// returns the next stream in the pipeline, and calls the callback with the byte count
|
||||||
// when the stream finishes or receives an error
|
// when the stream finishes or receives an error
|
||||||
function getMeteredStream(stream, callback) {
|
function getMeteredStream(stream, metricName) {
|
||||||
const meteredStream = meter()
|
const meteredStream = meter()
|
||||||
|
|
||||||
pipeline(stream, meteredStream)
|
pipeline(stream, meteredStream)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
callback(null, meteredStream.bytes)
|
metrics.count(metricName, meteredStream.bytes)
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(() => {
|
||||||
// on error, just send how many bytes we received before the stream stopped
|
// on error, just send how many bytes we received before the stream stopped
|
||||||
callback(err, meteredStream.bytes)
|
metrics.count(metricName, meteredStream.bytes)
|
||||||
})
|
})
|
||||||
|
|
||||||
return meteredStream
|
return meteredStream
|
||||||
|
@ -90,7 +93,8 @@ function wrapError(error, message, params, ErrorType) {
|
||||||
error instanceof NotFoundError ||
|
error instanceof NotFoundError ||
|
||||||
['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes(
|
['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes(
|
||||||
error.code
|
error.code
|
||||||
)
|
) ||
|
||||||
|
(error.response && error.response.statusCode === 404)
|
||||||
) {
|
) {
|
||||||
return new NotFoundError({
|
return new NotFoundError({
|
||||||
message: 'no such file',
|
message: 'no such file',
|
||||||
|
@ -103,3 +107,11 @@ function wrapError(error, message, params, ErrorType) {
|
||||||
}).withCause(error)
|
}).withCause(error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function base64ToHex(base64) {
|
||||||
|
return Buffer.from(base64, 'base64').toString('hex')
|
||||||
|
}
|
||||||
|
|
||||||
|
function hexToBase64(hex) {
|
||||||
|
return Buffer.from(hex, 'hex').toString('base64')
|
||||||
|
}
|
||||||
|
|
|
@ -19,6 +19,8 @@ function getPersistor(backend) {
|
||||||
return require('./S3Persistor')
|
return require('./S3Persistor')
|
||||||
case 'fs':
|
case 'fs':
|
||||||
return require('./FSPersistor')
|
return require('./FSPersistor')
|
||||||
|
case 'gcs':
|
||||||
|
return require('./GcsPersistor')
|
||||||
default:
|
default:
|
||||||
throw new Error(`unknown filestore backend: ${backend}`)
|
throw new Error(`unknown filestore backend: ${backend}`)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ http.globalAgent.maxSockets = 300
|
||||||
https.globalAgent.maxSockets = 300
|
https.globalAgent.maxSockets = 300
|
||||||
|
|
||||||
const settings = require('settings-sharelatex')
|
const settings = require('settings-sharelatex')
|
||||||
const metrics = require('metrics-sharelatex')
|
|
||||||
|
|
||||||
const PersistorHelper = require('./PersistorHelper')
|
const PersistorHelper = require('./PersistorHelper')
|
||||||
|
|
||||||
|
@ -46,23 +45,8 @@ const S3Persistor = {
|
||||||
|
|
||||||
module.exports = S3Persistor
|
module.exports = S3Persistor
|
||||||
|
|
||||||
function hexToBase64(hex) {
|
|
||||||
return Buffer.from(hex, 'hex').toString('base64')
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sendFile(bucketName, key, fsPath) {
|
async function sendFile(bucketName, key, fsPath) {
|
||||||
let readStream
|
return sendStream(bucketName, key, fs.createReadStream(fsPath))
|
||||||
try {
|
|
||||||
readStream = fs.createReadStream(fsPath)
|
|
||||||
} catch (err) {
|
|
||||||
throw PersistorHelper.wrapError(
|
|
||||||
err,
|
|
||||||
'error reading file from disk',
|
|
||||||
{ bucketName, key, fsPath },
|
|
||||||
ReadError
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return sendStream(bucketName, key, readStream)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sendStream(bucketName, key, readStream, sourceMd5) {
|
async function sendStream(bucketName, key, readStream, sourceMd5) {
|
||||||
|
@ -72,17 +56,14 @@ async function sendStream(bucketName, key, readStream, sourceMd5) {
|
||||||
let b64Hash
|
let b64Hash
|
||||||
|
|
||||||
if (sourceMd5) {
|
if (sourceMd5) {
|
||||||
b64Hash = hexToBase64(sourceMd5)
|
b64Hash = PersistorHelper.hexToBase64(sourceMd5)
|
||||||
} else {
|
} else {
|
||||||
hashPromise = PersistorHelper.calculateStreamMd5(readStream)
|
hashPromise = PersistorHelper.calculateStreamMd5(readStream)
|
||||||
}
|
}
|
||||||
|
|
||||||
const meteredStream = PersistorHelper.getMeteredStream(
|
const meteredStream = PersistorHelper.getMeteredStream(
|
||||||
readStream,
|
readStream,
|
||||||
(_, byteCount) => {
|
's3.egress' // egress from us to s3
|
||||||
// ignore the error parameter and just log the byte count
|
|
||||||
metrics.count('s3.egress', byteCount)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// if we have an md5 hash, pass this to S3 to verify the upload
|
// if we have an md5 hash, pass this to S3 to verify the upload
|
||||||
|
@ -149,10 +130,7 @@ async function getFileStream(bucketName, key, opts) {
|
||||||
|
|
||||||
const meteredStream = PersistorHelper.getMeteredStream(
|
const meteredStream = PersistorHelper.getMeteredStream(
|
||||||
stream,
|
stream,
|
||||||
(_, byteCount) => {
|
's3.ingress' // ingress to us from s3
|
||||||
// ignore the error parameter and just log the byte count
|
|
||||||
metrics.count('s3.ingress', byteCount)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -31,8 +31,19 @@ settings =
|
||||||
# Choices are
|
# Choices are
|
||||||
# s3 - Amazon S3
|
# s3 - Amazon S3
|
||||||
# fs - local filesystem
|
# fs - local filesystem
|
||||||
|
# gcs - Google Cloud Storage
|
||||||
backend: process.env['BACKEND']
|
backend: process.env['BACKEND']
|
||||||
|
|
||||||
|
gcs:
|
||||||
|
endpoint:
|
||||||
|
if process.env['GCS_API_ENDPOINT']
|
||||||
|
apiEndpoint: process.env['GCS_API_ENDPOINT']
|
||||||
|
apiScheme: process.env['GCS_API_SCHEME']
|
||||||
|
projectId: process.env['GCS_PROJECT_ID']
|
||||||
|
unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] == "true" # unlock an event-based hold before deleting. default false
|
||||||
|
deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'] # if present, copy file to another bucket on delete. default null
|
||||||
|
deleteConcurrency: parseInt(process.env['GCS_DELETE_CONCURRENCY']) || 50
|
||||||
|
|
||||||
s3:
|
s3:
|
||||||
if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']?
|
if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']?
|
||||||
key: process.env['AWS_ACCESS_KEY_ID']
|
key: process.env['AWS_ACCESS_KEY_ID']
|
||||||
|
@ -41,6 +52,9 @@ settings =
|
||||||
pathStyle: process.env['AWS_S3_PATH_STYLE']
|
pathStyle: process.env['AWS_S3_PATH_STYLE']
|
||||||
partSize: process.env['AWS_S3_PARTSIZE'] or (100 * 1024 * 1024)
|
partSize: process.env['AWS_S3_PARTSIZE'] or (100 * 1024 * 1024)
|
||||||
|
|
||||||
|
# GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS,
|
||||||
|
# which will be picked up automatically.
|
||||||
|
|
||||||
stores:
|
stores:
|
||||||
user_files: process.env['USER_FILES_BUCKET_NAME']
|
user_files: process.env['USER_FILES_BUCKET_NAME']
|
||||||
template_files: process.env['TEMPLATE_FILES_BUCKET_NAME']
|
template_files: process.env['TEMPLATE_FILES_BUCKET_NAME']
|
||||||
|
@ -67,7 +81,7 @@ settings =
|
||||||
|
|
||||||
sentry:
|
sentry:
|
||||||
dsn: process.env.SENTRY_DSN
|
dsn: process.env.SENTRY_DSN
|
||||||
|
|
||||||
# Filestore health check
|
# Filestore health check
|
||||||
# ----------------------
|
# ----------------------
|
||||||
# Project and file details to check in persistor when calling /health_check
|
# Project and file details to check in persistor when calling /health_check
|
||||||
|
|
|
@ -22,10 +22,6 @@ services:
|
||||||
REDIS_HOST: redis
|
REDIS_HOST: redis
|
||||||
MONGO_HOST: mongo
|
MONGO_HOST: mongo
|
||||||
POSTGRES_HOST: postgres
|
POSTGRES_HOST: postgres
|
||||||
AWS_S3_ENDPOINT: http://s3:9090
|
|
||||||
AWS_S3_PATH_STYLE: 'true'
|
|
||||||
AWS_ACCESS_KEY_ID: fake
|
|
||||||
AWS_SECRET_ACCESS_KEY: fake
|
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
ENABLE_CONVERSIONS: "true"
|
ENABLE_CONVERSIONS: "true"
|
||||||
|
@ -33,9 +29,21 @@ services:
|
||||||
AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files
|
AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files
|
||||||
AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files
|
AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files
|
||||||
AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files
|
AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files
|
||||||
|
AWS_S3_ENDPOINT: http://s3:9090
|
||||||
|
AWS_ACCESS_KEY_ID: fake
|
||||||
|
AWS_SECRET_ACCESS_KEY: fake
|
||||||
|
AWS_S3_PATH_STYLE: 'true'
|
||||||
|
GCS_API_ENDPOINT: gcs:9090
|
||||||
|
GCS_API_SCHEME: http
|
||||||
|
GCS_USER_FILES_BUCKET_NAME: fake_userfiles
|
||||||
|
GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles
|
||||||
|
GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles
|
||||||
|
STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1
|
||||||
depends_on:
|
depends_on:
|
||||||
s3:
|
s3:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
gcs:
|
||||||
|
condition: service_healthy
|
||||||
user: node
|
user: node
|
||||||
command: npm run test:acceptance:_run
|
command: npm run test:acceptance:_run
|
||||||
|
|
||||||
|
@ -48,8 +56,13 @@ services:
|
||||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||||
user: root
|
user: root
|
||||||
s3:
|
s3:
|
||||||
image: adobe/s3mock
|
build:
|
||||||
|
context: test/acceptance/deps
|
||||||
|
dockerfile: Dockerfile.s3mock
|
||||||
environment:
|
environment:
|
||||||
- initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
|
- initialBuckets=fake_user_files,fake_template_files,fake_public_files
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:9090"]
|
gcs:
|
||||||
|
build:
|
||||||
|
context: test/acceptance/deps
|
||||||
|
dockerfile: Dockerfile.fake-gcs
|
||||||
|
|
|
@ -31,10 +31,6 @@ services:
|
||||||
REDIS_HOST: redis
|
REDIS_HOST: redis
|
||||||
MONGO_HOST: mongo
|
MONGO_HOST: mongo
|
||||||
POSTGRES_HOST: postgres
|
POSTGRES_HOST: postgres
|
||||||
AWS_S3_ENDPOINT: http://s3:9090
|
|
||||||
AWS_S3_PATH_STYLE: 'true'
|
|
||||||
AWS_ACCESS_KEY_ID: fake
|
|
||||||
AWS_SECRET_ACCESS_KEY: fake
|
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
LOG_LEVEL: ERROR
|
LOG_LEVEL: ERROR
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
|
@ -43,15 +39,32 @@ services:
|
||||||
AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files
|
AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files
|
||||||
AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files
|
AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files
|
||||||
AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files
|
AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files
|
||||||
|
AWS_S3_ENDPOINT: http://s3:9090
|
||||||
|
AWS_S3_PATH_STYLE: 'true'
|
||||||
|
AWS_ACCESS_KEY_ID: fake
|
||||||
|
AWS_SECRET_ACCESS_KEY: fake
|
||||||
|
GCS_API_ENDPOINT: gcs:9090
|
||||||
|
GCS_API_SCHEME: http
|
||||||
|
GCS_USER_FILES_BUCKET_NAME: fake_userfiles
|
||||||
|
GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles
|
||||||
|
GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles
|
||||||
|
STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1
|
||||||
user: node
|
user: node
|
||||||
depends_on:
|
depends_on:
|
||||||
s3:
|
s3:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
gcs:
|
||||||
|
condition: service_healthy
|
||||||
command: npm run test:acceptance
|
command: npm run test:acceptance
|
||||||
|
|
||||||
s3:
|
s3:
|
||||||
image: adobe/s3mock
|
build:
|
||||||
|
context: test/acceptance/deps
|
||||||
|
dockerfile: Dockerfile.s3mock
|
||||||
environment:
|
environment:
|
||||||
- initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
|
- initialBuckets=fake_user_files,fake_template_files,fake_public_files
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:9090"]
|
gcs:
|
||||||
|
build:
|
||||||
|
context: test/acceptance/deps
|
||||||
|
dockerfile: Dockerfile.fake-gcs
|
||||||
|
|
324
services/filestore/package-lock.json
generated
324
services/filestore/package-lock.json
generated
|
@ -603,6 +603,52 @@
|
||||||
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz",
|
||||||
"integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ=="
|
"integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ=="
|
||||||
},
|
},
|
||||||
|
"@google-cloud/storage": {
|
||||||
|
"version": "4.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.4.0.tgz",
|
||||||
|
"integrity": "sha512-R64ey4dLIG3IgiKw0CL5MdZ4ZtZdGhN75171vjiL+ioZG+hlLFkjsrCTRuIdE35v42nNe5nXmVhBHQQTuPozHA==",
|
||||||
|
"requires": {
|
||||||
|
"@google-cloud/common": "^2.1.1",
|
||||||
|
"@google-cloud/paginator": "^2.0.0",
|
||||||
|
"@google-cloud/promisify": "^1.0.0",
|
||||||
|
"arrify": "^2.0.0",
|
||||||
|
"compressible": "^2.0.12",
|
||||||
|
"concat-stream": "^2.0.0",
|
||||||
|
"date-and-time": "^0.12.0",
|
||||||
|
"duplexify": "^3.5.0",
|
||||||
|
"extend": "^3.0.2",
|
||||||
|
"gaxios": "^2.0.1",
|
||||||
|
"gcs-resumable-upload": "^2.2.4",
|
||||||
|
"hash-stream-validation": "^0.2.2",
|
||||||
|
"mime": "^2.2.0",
|
||||||
|
"mime-types": "^2.0.8",
|
||||||
|
"onetime": "^5.1.0",
|
||||||
|
"p-limit": "^2.2.0",
|
||||||
|
"pumpify": "^2.0.0",
|
||||||
|
"readable-stream": "^3.4.0",
|
||||||
|
"snakeize": "^0.1.0",
|
||||||
|
"stream-events": "^1.0.1",
|
||||||
|
"through2": "^3.0.0",
|
||||||
|
"xdg-basedir": "^4.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"mime": {
|
||||||
|
"version": "2.4.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz",
|
||||||
|
"integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA=="
|
||||||
|
},
|
||||||
|
"readable-stream": {
|
||||||
|
"version": "3.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||||
|
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
|
||||||
|
"requires": {
|
||||||
|
"inherits": "^2.0.3",
|
||||||
|
"string_decoder": "^1.1.1",
|
||||||
|
"util-deprecate": "^1.0.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"@google-cloud/trace-agent": {
|
"@google-cloud/trace-agent": {
|
||||||
"version": "3.6.1",
|
"version": "3.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz",
|
||||||
|
@ -1369,6 +1415,16 @@
|
||||||
"resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz",
|
||||||
"integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g=="
|
"integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g=="
|
||||||
},
|
},
|
||||||
|
"bl": {
|
||||||
|
"version": "2.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/bl/-/bl-2.2.0.tgz",
|
||||||
|
"integrity": "sha512-wbgvOpqopSr7uq6fJrLH8EsvYMJf9gzfo2jCsL2eTy75qXPukA4pCgHamOQkZtY5vmfVtjB+P3LNlMHW5CEZXA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"readable-stream": "^2.3.5",
|
||||||
|
"safe-buffer": "^5.1.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"body-parser": {
|
"body-parser": {
|
||||||
"version": "1.19.0",
|
"version": "1.19.0",
|
||||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz",
|
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz",
|
||||||
|
@ -1407,6 +1463,12 @@
|
||||||
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
|
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"bson": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/bson/-/bson-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-TdiJxMVnodVS7r0BdL42y/pqC9cL2iKynVwA0Ho3qbsQYr428veL3l7BQyuqiw+Q5SqqoT0m4srSY/BlZ9AxXg==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
"buffer": {
|
"buffer": {
|
||||||
"version": "4.9.1",
|
"version": "4.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz",
|
"resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz",
|
||||||
|
@ -1422,6 +1484,11 @@
|
||||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||||
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
||||||
},
|
},
|
||||||
|
"buffer-from": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
|
||||||
|
},
|
||||||
"builtin-modules": {
|
"builtin-modules": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz",
|
||||||
|
@ -1608,11 +1675,55 @@
|
||||||
"integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==",
|
"integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"compressible": {
|
||||||
|
"version": "2.0.18",
|
||||||
|
"resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
|
||||||
|
"integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==",
|
||||||
|
"requires": {
|
||||||
|
"mime-db": ">= 1.43.0 < 2"
|
||||||
|
}
|
||||||
|
},
|
||||||
"concat-map": {
|
"concat-map": {
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||||
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
|
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
|
||||||
},
|
},
|
||||||
|
"concat-stream": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==",
|
||||||
|
"requires": {
|
||||||
|
"buffer-from": "^1.0.0",
|
||||||
|
"inherits": "^2.0.3",
|
||||||
|
"readable-stream": "^3.0.2",
|
||||||
|
"typedarray": "^0.0.6"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"readable-stream": {
|
||||||
|
"version": "3.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||||
|
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
|
||||||
|
"requires": {
|
||||||
|
"inherits": "^2.0.3",
|
||||||
|
"string_decoder": "^1.1.1",
|
||||||
|
"util-deprecate": "^1.0.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"configstore": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==",
|
||||||
|
"requires": {
|
||||||
|
"dot-prop": "^5.2.0",
|
||||||
|
"graceful-fs": "^4.1.2",
|
||||||
|
"make-dir": "^3.0.0",
|
||||||
|
"unique-string": "^2.0.0",
|
||||||
|
"write-file-atomic": "^3.0.0",
|
||||||
|
"xdg-basedir": "^4.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"console-log-level": {
|
"console-log-level": {
|
||||||
"version": "1.4.1",
|
"version": "1.4.1",
|
||||||
"resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz",
|
"resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz",
|
||||||
|
@ -1688,6 +1799,11 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"crypto-random-string": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA=="
|
||||||
|
},
|
||||||
"d64": {
|
"d64": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz",
|
||||||
|
@ -1701,6 +1817,11 @@
|
||||||
"assert-plus": "^1.0.0"
|
"assert-plus": "^1.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"date-and-time": {
|
||||||
|
"version": "0.12.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.12.0.tgz",
|
||||||
|
"integrity": "sha512-n2RJIAp93AucgF/U/Rz5WRS2Hjg5Z+QxscaaMCi6pVZT1JpJKRH+C08vyH/lRR1kxNXnPxgo3lWfd+jCb/UcuQ=="
|
||||||
|
},
|
||||||
"debug": {
|
"debug": {
|
||||||
"version": "2.6.9",
|
"version": "2.6.9",
|
||||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||||
|
@ -1749,6 +1870,12 @@
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
|
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
|
||||||
},
|
},
|
||||||
|
"denque": {
|
||||||
|
"version": "1.4.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz",
|
||||||
|
"integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
"depd": {
|
"depd": {
|
||||||
"version": "1.1.2",
|
"version": "1.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
|
||||||
|
@ -2525,6 +2652,19 @@
|
||||||
"json-bigint": "^0.3.0"
|
"json-bigint": "^0.3.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"gcs-resumable-upload": {
|
||||||
|
"version": "2.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.2.tgz",
|
||||||
|
"integrity": "sha512-OPS0iAmPCV+r7PziOIhyxmQOzsazFCy76yYDOS/Z80O/7cuny1KMfqDQa2T0jLaL8EreTU7EMZG5pUuqBKgzHA==",
|
||||||
|
"requires": {
|
||||||
|
"abort-controller": "^3.0.0",
|
||||||
|
"configstore": "^5.0.0",
|
||||||
|
"gaxios": "^2.0.0",
|
||||||
|
"google-auth-library": "^5.0.0",
|
||||||
|
"pumpify": "^2.0.0",
|
||||||
|
"stream-events": "^1.0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"get-caller-file": {
|
"get-caller-file": {
|
||||||
"version": "2.0.5",
|
"version": "2.0.5",
|
||||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||||
|
@ -2628,8 +2768,7 @@
|
||||||
"graceful-fs": {
|
"graceful-fs": {
|
||||||
"version": "4.2.3",
|
"version": "4.2.3",
|
||||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
|
||||||
"integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==",
|
"integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"growl": {
|
"growl": {
|
||||||
"version": "1.10.5",
|
"version": "1.10.5",
|
||||||
|
@ -2707,6 +2846,25 @@
|
||||||
"integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
|
"integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"hash-stream-validation": {
|
||||||
|
"version": "0.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.2.tgz",
|
||||||
|
"integrity": "sha512-cMlva5CxWZOrlS/cY0C+9qAzesn5srhFA8IT1VPiHc9bWWBLkJfEUIZr7MWoi89oOOGmpg8ymchaOjiArsGu5A==",
|
||||||
|
"requires": {
|
||||||
|
"through2": "^2.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"through2": {
|
||||||
|
"version": "2.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz",
|
||||||
|
"integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==",
|
||||||
|
"requires": {
|
||||||
|
"readable-stream": "~2.3.6",
|
||||||
|
"xtend": "~4.0.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"he": {
|
"he": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz",
|
||||||
|
@ -2827,8 +2985,7 @@
|
||||||
"imurmurhash": {
|
"imurmurhash": {
|
||||||
"version": "0.1.4",
|
"version": "0.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
|
||||||
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
|
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"indent-string": {
|
"indent-string": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
|
@ -3330,6 +3487,14 @@
|
||||||
"statsd-parser": "~0.0.4"
|
"statsd-parser": "~0.0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"make-dir": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.2.tgz",
|
||||||
|
"integrity": "sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==",
|
||||||
|
"requires": {
|
||||||
|
"semver": "^6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"make-plural": {
|
"make-plural": {
|
||||||
"version": "4.3.0",
|
"version": "4.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz",
|
||||||
|
@ -3358,6 +3523,13 @@
|
||||||
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||||
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="
|
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="
|
||||||
},
|
},
|
||||||
|
"memory-pager": {
|
||||||
|
"version": "1.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
|
||||||
|
"integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
"merge-descriptors": {
|
"merge-descriptors": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
|
||||||
|
@ -3432,8 +3604,7 @@
|
||||||
"mimic-fn": {
|
"mimic-fn": {
|
||||||
"version": "2.1.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
|
||||||
"integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
|
"integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"minimatch": {
|
"minimatch": {
|
||||||
"version": "3.0.4",
|
"version": "3.0.4",
|
||||||
|
@ -3511,6 +3682,20 @@
|
||||||
"integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==",
|
"integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
|
"mongodb": {
|
||||||
|
"version": "3.5.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.4.tgz",
|
||||||
|
"integrity": "sha512-xGH41Ig4dkSH5ROGezkgDbsgt/v5zbNUwE3TcFsSbDc6Qn3Qil17dhLsESSDDPTiyFDCPJRpfd4887dtsPgKtA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"bl": "^2.2.0",
|
||||||
|
"bson": "^1.1.1",
|
||||||
|
"denque": "^1.4.1",
|
||||||
|
"require_optional": "^1.0.1",
|
||||||
|
"safe-buffer": "^5.1.2",
|
||||||
|
"saslprep": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"ms": {
|
"ms": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
|
@ -3720,7 +3905,6 @@
|
||||||
"version": "5.1.0",
|
"version": "5.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz",
|
||||||
"integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==",
|
"integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"mimic-fn": "^2.1.0"
|
"mimic-fn": "^2.1.0"
|
||||||
}
|
}
|
||||||
|
@ -4798,6 +4982,30 @@
|
||||||
"integrity": "sha512-AKGr4qvHiryxRb19m3PsLRGuKVAbJLUD7E6eOaHkfKhwc+vSgVOCY5xNvm9EkolBKTOf0GrQAZKLimOCz81Khg==",
|
"integrity": "sha512-AKGr4qvHiryxRb19m3PsLRGuKVAbJLUD7E6eOaHkfKhwc+vSgVOCY5xNvm9EkolBKTOf0GrQAZKLimOCz81Khg==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"require_optional": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"resolve-from": "^2.0.0",
|
||||||
|
"semver": "^5.1.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"resolve-from": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz",
|
||||||
|
"integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"semver": {
|
||||||
|
"version": "5.7.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
|
||||||
|
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
|
||||||
|
"dev": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"resolve": {
|
"resolve": {
|
||||||
"version": "1.15.1",
|
"version": "1.15.1",
|
||||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz",
|
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz",
|
||||||
|
@ -4851,11 +5059,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"rimraf": {
|
|
||||||
"version": "2.2.8",
|
|
||||||
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz",
|
|
||||||
"integrity": "sha512-R5KMKHnPAQaZMqLOsyuyUmcIjSeDm+73eoqQpaXA7AZ22BL+6C+1mcUscgOsNd8WVlJuvlgAPsegcx7pjlV0Dg=="
|
|
||||||
},
|
|
||||||
"run-async": {
|
"run-async": {
|
||||||
"version": "2.4.0",
|
"version": "2.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz",
|
||||||
|
@ -4900,6 +5103,16 @@
|
||||||
"stack-trace": "0.0.9"
|
"stack-trace": "0.0.9"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"saslprep": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"requires": {
|
||||||
|
"sparse-bitfield": "^3.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sax": {
|
"sax": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz",
|
||||||
|
@ -4990,8 +5203,7 @@
|
||||||
"signal-exit": {
|
"signal-exit": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
|
||||||
"integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA==",
|
"integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"sinon": {
|
"sinon": {
|
||||||
"version": "7.1.1",
|
"version": "7.1.1",
|
||||||
|
@ -5055,11 +5267,26 @@
|
||||||
"to-snake-case": "^1.0.0"
|
"to-snake-case": "^1.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"snakeize": {
|
||||||
|
"version": "0.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/snakeize/-/snakeize-0.1.0.tgz",
|
||||||
|
"integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0="
|
||||||
|
},
|
||||||
"source-map": {
|
"source-map": {
|
||||||
"version": "0.6.1",
|
"version": "0.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
|
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
|
||||||
},
|
},
|
||||||
|
"sparse-bitfield": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
|
||||||
|
"integrity": "sha1-/0rm5oZWBWuks+eSqzM004JzyhE=",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"requires": {
|
||||||
|
"memory-pager": "^1.0.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
"spdx-correct": {
|
"spdx-correct": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz",
|
||||||
|
@ -5350,6 +5577,28 @@
|
||||||
"readable-stream": "2 || 3"
|
"readable-stream": "2 || 3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"timekeeper": {
|
||||||
|
"version": "2.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz",
|
||||||
|
"integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"tiny-async-pool": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/tiny-async-pool/-/tiny-async-pool-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-jIglyHF/9QdCC3662m/UMVADE6SlocBDpXdFLMZyiAfrw8MSG1pml7lwRtBMT6L/z4dddAxfzw2lpW2Vm42fyQ==",
|
||||||
|
"requires": {
|
||||||
|
"semver": "^5.5.0",
|
||||||
|
"yaassertion": "^1.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"semver": {
|
||||||
|
"version": "5.7.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
|
||||||
|
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"tmp": {
|
"tmp": {
|
||||||
"version": "0.0.33",
|
"version": "0.0.33",
|
||||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
|
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
|
||||||
|
@ -5455,6 +5704,19 @@
|
||||||
"mime-types": "~2.1.24"
|
"mime-types": "~2.1.24"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"typedarray": {
|
||||||
|
"version": "0.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz",
|
||||||
|
"integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c="
|
||||||
|
},
|
||||||
|
"typedarray-to-buffer": {
|
||||||
|
"version": "3.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz",
|
||||||
|
"integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==",
|
||||||
|
"requires": {
|
||||||
|
"is-typedarray": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"typescript": {
|
"typescript": {
|
||||||
"version": "3.8.2",
|
"version": "3.8.2",
|
||||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz",
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz",
|
||||||
|
@ -5466,6 +5728,14 @@
|
||||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
|
||||||
"integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ=="
|
"integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ=="
|
||||||
},
|
},
|
||||||
|
"unique-string": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==",
|
||||||
|
"requires": {
|
||||||
|
"crypto-random-string": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"unpipe": {
|
"unpipe": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||||
|
@ -5686,6 +5956,22 @@
|
||||||
"mkdirp": "^0.5.1"
|
"mkdirp": "^0.5.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"write-file-atomic": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==",
|
||||||
|
"requires": {
|
||||||
|
"imurmurhash": "^0.1.4",
|
||||||
|
"is-typedarray": "^1.0.0",
|
||||||
|
"signal-exit": "^3.0.2",
|
||||||
|
"typedarray-to-buffer": "^3.1.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"xdg-basedir": {
|
||||||
|
"version": "4.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q=="
|
||||||
|
},
|
||||||
"xml2js": {
|
"xml2js": {
|
||||||
"version": "0.4.19",
|
"version": "0.4.19",
|
||||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
|
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
|
||||||
|
@ -5700,12 +5986,22 @@
|
||||||
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
|
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
|
||||||
"integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ=="
|
"integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ=="
|
||||||
},
|
},
|
||||||
|
"xtend": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
|
||||||
|
},
|
||||||
"y18n": {
|
"y18n": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
|
||||||
"integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==",
|
"integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"yaassertion": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/yaassertion/-/yaassertion-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-sBoJBg5vTr3lOpRX0yFD+tz7wv/l2UPMFthag4HGTMPrypBRKerjjS8jiEnNMjcAEtPXjbHiKE0UwRR1W1GXBg=="
|
||||||
|
},
|
||||||
"yallist": {
|
"yallist": {
|
||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js"
|
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@google-cloud/storage": "^4.3.0",
|
||||||
"@overleaf/o-error": "^2.1.0",
|
"@overleaf/o-error": "^2.1.0",
|
||||||
"aws-sdk": "^2.628.0",
|
"aws-sdk": "^2.628.0",
|
||||||
"body-parser": "^1.2.0",
|
"body-parser": "^1.2.0",
|
||||||
|
@ -32,10 +33,10 @@
|
||||||
"range-parser": "^1.0.2",
|
"range-parser": "^1.0.2",
|
||||||
"request": "^2.88.0",
|
"request": "^2.88.0",
|
||||||
"request-promise-native": "^1.0.8",
|
"request-promise-native": "^1.0.8",
|
||||||
"rimraf": "2.2.8",
|
|
||||||
"settings-sharelatex": "^1.1.0",
|
"settings-sharelatex": "^1.1.0",
|
||||||
"stream-buffers": "~0.2.5",
|
"stream-buffers": "~0.2.5",
|
||||||
"stream-meter": "^1.0.4"
|
"stream-meter": "^1.0.4",
|
||||||
|
"tiny-async-pool": "^1.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"babel-eslint": "^10.0.3",
|
"babel-eslint": "^10.0.3",
|
||||||
|
@ -55,11 +56,13 @@
|
||||||
"eslint-plugin-promise": "^4.2.1",
|
"eslint-plugin-promise": "^4.2.1",
|
||||||
"eslint-plugin-standard": "^4.0.1",
|
"eslint-plugin-standard": "^4.0.1",
|
||||||
"mocha": "5.2.0",
|
"mocha": "5.2.0",
|
||||||
|
"mongodb": "^3.5.4",
|
||||||
"prettier-eslint": "^9.0.1",
|
"prettier-eslint": "^9.0.1",
|
||||||
"prettier-eslint-cli": "^5.0.0",
|
"prettier-eslint-cli": "^5.0.0",
|
||||||
"sandboxed-module": "2.0.3",
|
"sandboxed-module": "2.0.3",
|
||||||
"sinon": "7.1.1",
|
"sinon": "7.1.1",
|
||||||
"sinon-chai": "^3.3.0",
|
"sinon-chai": "^3.3.0",
|
||||||
"streamifier": "^0.1.1"
|
"streamifier": "^0.1.1",
|
||||||
|
"timekeeper": "^2.2.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
FROM fsouza/fake-gcs-server
|
||||||
|
RUN apk add --update --no-cache curl
|
||||||
|
COPY healthcheck.sh /healthcheck.sh
|
||||||
|
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
|
||||||
|
CMD ["--port=9090", "--scheme=http"]
|
|
@ -0,0 +1,4 @@
|
||||||
|
FROM adobe/s3mock
|
||||||
|
RUN apk add --update --no-cache curl
|
||||||
|
COPY healthcheck.sh /healthcheck.sh
|
||||||
|
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
|
9
services/filestore/test/acceptance/deps/healthcheck.sh
Executable file
9
services/filestore/test/acceptance/deps/healthcheck.sh
Executable file
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# health check to allow 404 status code as valid
|
||||||
|
STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" $1)
|
||||||
|
# will be 000 on non-http error (e.g. connection failure)
|
||||||
|
if test $STATUSCODE -ge 500 || test $STATUSCODE -lt 200; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
exit 0
|
|
@ -4,6 +4,7 @@ const fs = require('fs')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('settings-sharelatex')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const FilestoreApp = require('./FilestoreApp')
|
const FilestoreApp = require('./FilestoreApp')
|
||||||
|
const TestHelper = require('./TestHelper')
|
||||||
const rp = require('request-promise-native').defaults({
|
const rp = require('request-promise-native').defaults({
|
||||||
resolveWithFullResponse: true
|
resolveWithFullResponse: true
|
||||||
})
|
})
|
||||||
|
@ -11,130 +12,32 @@ const S3 = require('aws-sdk/clients/s3')
|
||||||
const Stream = require('stream')
|
const Stream = require('stream')
|
||||||
const request = require('request')
|
const request = require('request')
|
||||||
const { promisify } = require('util')
|
const { promisify } = require('util')
|
||||||
|
const { Storage } = require('@google-cloud/storage')
|
||||||
const streamifier = require('streamifier')
|
const streamifier = require('streamifier')
|
||||||
chai.use(require('chai-as-promised'))
|
chai.use(require('chai-as-promised'))
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const tk = require('timekeeper')
|
||||||
|
|
||||||
const fsWriteFile = promisify(fs.writeFile)
|
const fsWriteFile = promisify(fs.writeFile)
|
||||||
const fsStat = promisify(fs.stat)
|
const fsStat = promisify(fs.stat)
|
||||||
const pipeline = promisify(Stream.pipeline)
|
const pipeline = promisify(Stream.pipeline)
|
||||||
|
|
||||||
async function getMetric(filestoreUrl, metric) {
|
|
||||||
const res = await rp.get(`${filestoreUrl}/metrics`)
|
|
||||||
expect(res.statusCode).to.equal(200)
|
|
||||||
const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'm')
|
|
||||||
const found = metricRegex.exec(res.body)
|
|
||||||
return parseInt(found ? found[1] : 0) || 0
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!process.env.AWS_ACCESS_KEY_ID) {
|
if (!process.env.AWS_ACCESS_KEY_ID) {
|
||||||
throw new Error('please provide credentials for the AWS S3 test server')
|
throw new Error('please provide credentials for the AWS S3 test server')
|
||||||
}
|
}
|
||||||
|
|
||||||
function streamToString(stream) {
|
|
||||||
const chunks = []
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
stream.on('data', chunk => chunks.push(chunk))
|
|
||||||
stream.on('error', reject)
|
|
||||||
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
|
||||||
stream.resume()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// store settings for multiple backends, so that we can test each one.
|
// store settings for multiple backends, so that we can test each one.
|
||||||
// fs will always be available - add others if they are configured
|
// fs will always be available - add others if they are configured
|
||||||
const BackendSettings = {
|
const BackendSettings = require('./TestConfig')
|
||||||
FSPersistor: {
|
|
||||||
backend: 'fs',
|
|
||||||
stores: {
|
|
||||||
user_files: Path.resolve(__dirname, '../../../user_files'),
|
|
||||||
public_files: Path.resolve(__dirname, '../../../public_files'),
|
|
||||||
template_files: Path.resolve(__dirname, '../../../template_files')
|
|
||||||
}
|
|
||||||
},
|
|
||||||
S3Persistor: {
|
|
||||||
backend: 's3',
|
|
||||||
s3: {
|
|
||||||
key: process.env.AWS_ACCESS_KEY_ID,
|
|
||||||
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
|
||||||
endpoint: process.env.AWS_S3_ENDPOINT,
|
|
||||||
pathStyle: true,
|
|
||||||
partSize: 100 * 1024 * 1024
|
|
||||||
},
|
|
||||||
stores: {
|
|
||||||
user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME,
|
|
||||||
template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME,
|
|
||||||
public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME
|
|
||||||
}
|
|
||||||
},
|
|
||||||
FallbackS3ToFSPersistor: {
|
|
||||||
backend: 's3',
|
|
||||||
s3: {
|
|
||||||
key: process.env.AWS_ACCESS_KEY_ID,
|
|
||||||
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
|
||||||
endpoint: process.env.AWS_S3_ENDPOINT,
|
|
||||||
pathStyle: true,
|
|
||||||
partSize: 100 * 1024 * 1024
|
|
||||||
},
|
|
||||||
stores: {
|
|
||||||
user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME,
|
|
||||||
template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME,
|
|
||||||
public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME
|
|
||||||
},
|
|
||||||
fallback: {
|
|
||||||
backend: 'fs',
|
|
||||||
buckets: {
|
|
||||||
[process.env.AWS_S3_USER_FILES_BUCKET_NAME]: Path.resolve(
|
|
||||||
__dirname,
|
|
||||||
'../../../user_files'
|
|
||||||
),
|
|
||||||
[process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME]: Path.resolve(
|
|
||||||
__dirname,
|
|
||||||
'../../../public_files'
|
|
||||||
),
|
|
||||||
[process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]: Path.resolve(
|
|
||||||
__dirname,
|
|
||||||
'../../../template_files'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
FallbackFSToS3Persistor: {
|
|
||||||
backend: 'fs',
|
|
||||||
s3: {
|
|
||||||
key: process.env.AWS_ACCESS_KEY_ID,
|
|
||||||
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
|
||||||
endpoint: process.env.AWS_S3_ENDPOINT,
|
|
||||||
pathStyle: true,
|
|
||||||
partSize: 100 * 1024 * 1024
|
|
||||||
},
|
|
||||||
stores: {
|
|
||||||
user_files: Path.resolve(__dirname, '../../../user_files'),
|
|
||||||
public_files: Path.resolve(__dirname, '../../../public_files'),
|
|
||||||
template_files: Path.resolve(__dirname, '../../../template_files')
|
|
||||||
},
|
|
||||||
fallback: {
|
|
||||||
backend: 's3',
|
|
||||||
buckets: {
|
|
||||||
[Path.resolve(__dirname, '../../../user_files')]: process.env
|
|
||||||
.AWS_S3_USER_FILES_BUCKET_NAME,
|
|
||||||
[Path.resolve(__dirname, '../../../public_files')]: process.env
|
|
||||||
.AWS_S3_PUBLIC_FILES_BUCKET_NAME,
|
|
||||||
[Path.resolve(__dirname, '../../../template_files')]: process.env
|
|
||||||
.AWS_S3_TEMPLATE_FILES_BUCKET_NAME
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('Filestore', function() {
|
describe('Filestore', function() {
|
||||||
this.timeout(1000 * 10)
|
this.timeout(1000 * 10)
|
||||||
const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}`
|
const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}`
|
||||||
const directoryName = 'directory'
|
|
||||||
|
|
||||||
// redefine the test suite for every available backend
|
// redefine the test suite for every available backend
|
||||||
Object.keys(BackendSettings).forEach(backend => {
|
Object.keys(BackendSettings).forEach(backend => {
|
||||||
describe(backend, function() {
|
describe(backend, function() {
|
||||||
let app, previousEgress, previousIngress, projectId
|
let app, previousEgress, previousIngress, metricPrefix, projectId
|
||||||
|
|
||||||
before(async function() {
|
before(async function() {
|
||||||
// create the app with the relevant filestore settings
|
// create the app with the relevant filestore settings
|
||||||
|
@ -143,15 +46,38 @@ describe('Filestore', function() {
|
||||||
await app.runServer()
|
await app.runServer()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (BackendSettings[backend].gcs) {
|
||||||
|
before(async function() {
|
||||||
|
const storage = new Storage(Settings.filestore.gcs.endpoint)
|
||||||
|
await storage.createBucket(process.env.GCS_USER_FILES_BUCKET_NAME)
|
||||||
|
await storage.createBucket(process.env.GCS_PUBLIC_FILES_BUCKET_NAME)
|
||||||
|
await storage.createBucket(process.env.GCS_TEMPLATE_FILES_BUCKET_NAME)
|
||||||
|
await storage.createBucket(
|
||||||
|
`${process.env.GCS_USER_FILES_BUCKET_NAME}-deleted`
|
||||||
|
)
|
||||||
|
await storage.createBucket(
|
||||||
|
`${process.env.GCS_PUBLIC_FILES_BUCKET_NAME}-deleted`
|
||||||
|
)
|
||||||
|
await storage.createBucket(
|
||||||
|
`${process.env.GCS_TEMPLATE_FILES_BUCKET_NAME}-deleted`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
after(async function() {
|
after(async function() {
|
||||||
return app.stop()
|
return app.stop()
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
if (Settings.filestore.backend === 's3') {
|
// retrieve previous metrics from the app
|
||||||
previousEgress = await getMetric(filestoreUrl, 's3_egress')
|
if (['s3', 'gcs'].includes(Settings.filestore.backend)) {
|
||||||
|
metricPrefix = Settings.filestore.backend
|
||||||
|
previousEgress = await TestHelper.getMetric(
|
||||||
|
filestoreUrl,
|
||||||
|
`${metricPrefix}_egress`
|
||||||
|
)
|
||||||
}
|
}
|
||||||
projectId = `acceptance_tests_${Math.random()}`
|
projectId = ObjectId().toString()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should send a 200 for the status endpoint', async function() {
|
it('should send a 200 for the status endpoint', async function() {
|
||||||
|
@ -174,8 +100,8 @@ describe('Filestore', function() {
|
||||||
'/tmp/filestore_acceptance_tests_file_read.txt'
|
'/tmp/filestore_acceptance_tests_file_read.txt'
|
||||||
|
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
fileId = Math.random()
|
fileId = ObjectId().toString()
|
||||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}`
|
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||||
constantFileContent = [
|
constantFileContent = [
|
||||||
'hello world',
|
'hello world',
|
||||||
`line 2 goes here ${Math.random()}`,
|
`line 2 goes here ${Math.random()}`,
|
||||||
|
@ -195,8 +121,11 @@ describe('Filestore', function() {
|
||||||
// The upload request can bump the ingress metric.
|
// The upload request can bump the ingress metric.
|
||||||
// The content hash validation might require a full download
|
// The content hash validation might require a full download
|
||||||
// in case the ETag field of the upload response is not a md5 sum.
|
// in case the ETag field of the upload response is not a md5 sum.
|
||||||
if (Settings.filestore.backend === 's3') {
|
if (['s3', 'gcs'].includes(Settings.filestore.backend)) {
|
||||||
previousIngress = await getMetric(filestoreUrl, 's3_ingress')
|
previousIngress = await TestHelper.getMetric(
|
||||||
|
filestoreUrl,
|
||||||
|
`${metricPrefix}_ingress`
|
||||||
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -252,16 +181,16 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should be able to copy files', async function() {
|
it('should be able to copy files', async function() {
|
||||||
const newProjectID = `acceptance_tests_copied_project_${Math.random()}`
|
const newProjectID = ObjectId().toString()
|
||||||
const newFileId = Math.random()
|
const newFileId = ObjectId().toString()
|
||||||
const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}`
|
const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}`
|
||||||
const opts = {
|
const opts = {
|
||||||
method: 'put',
|
method: 'put',
|
||||||
uri: newFileUrl,
|
uri: newFileUrl,
|
||||||
json: {
|
json: {
|
||||||
source: {
|
source: {
|
||||||
project_id: projectId,
|
project_id: projectId,
|
||||||
file_id: `${directoryName}/${fileId}`
|
file_id: fileId
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -285,15 +214,21 @@ describe('Filestore', function() {
|
||||||
expect(response.body).to.equal(newContent)
|
expect(response.body).to.equal(newContent)
|
||||||
})
|
})
|
||||||
|
|
||||||
if (backend === 'S3Persistor') {
|
if (['S3Persistor', 'GcsPersistor'].includes(backend)) {
|
||||||
it('should record an egress metric for the upload', async function() {
|
it('should record an egress metric for the upload', async function() {
|
||||||
const metric = await getMetric(filestoreUrl, 's3_egress')
|
const metric = await TestHelper.getMetric(
|
||||||
|
filestoreUrl,
|
||||||
|
`${metricPrefix}_egress`
|
||||||
|
)
|
||||||
expect(metric - previousEgress).to.equal(constantFileContent.length)
|
expect(metric - previousEgress).to.equal(constantFileContent.length)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should record an ingress metric when downloading the file', async function() {
|
it('should record an ingress metric when downloading the file', async function() {
|
||||||
await rp.get(fileUrl)
|
await rp.get(fileUrl)
|
||||||
const metric = await getMetric(filestoreUrl, 's3_ingress')
|
const metric = await TestHelper.getMetric(
|
||||||
|
filestoreUrl,
|
||||||
|
`${metricPrefix}_ingress`
|
||||||
|
)
|
||||||
expect(metric - previousIngress).to.equal(
|
expect(metric - previousIngress).to.equal(
|
||||||
constantFileContent.length
|
constantFileContent.length
|
||||||
)
|
)
|
||||||
|
@ -307,15 +242,17 @@ describe('Filestore', function() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await rp.get(options)
|
await rp.get(options)
|
||||||
const metric = await getMetric(filestoreUrl, 's3_ingress')
|
const metric = await TestHelper.getMetric(
|
||||||
|
filestoreUrl,
|
||||||
|
`${metricPrefix}_ingress`
|
||||||
|
)
|
||||||
expect(metric - previousIngress).to.equal(9)
|
expect(metric - previousIngress).to.equal(9)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with multiple files', function() {
|
describe('with multiple files', function() {
|
||||||
let fileIds, fileUrls
|
let fileIds, fileUrls, projectUrl
|
||||||
const directoryName = 'directory'
|
|
||||||
const localFileReadPaths = [
|
const localFileReadPaths = [
|
||||||
'/tmp/filestore_acceptance_tests_file_read_1.txt',
|
'/tmp/filestore_acceptance_tests_file_read_1.txt',
|
||||||
'/tmp/filestore_acceptance_tests_file_read_2.txt'
|
'/tmp/filestore_acceptance_tests_file_read_2.txt'
|
||||||
|
@ -341,10 +278,11 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
fileIds = [Math.random(), Math.random()]
|
projectUrl = `${filestoreUrl}/project/${projectId}`
|
||||||
|
fileIds = [ObjectId().toString(), ObjectId().toString()]
|
||||||
fileUrls = [
|
fileUrls = [
|
||||||
`${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[0]}`,
|
`${projectUrl}/file/${fileIds[0]}`,
|
||||||
`${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[1]}`
|
`${projectUrl}/file/${fileIds[1]}`
|
||||||
]
|
]
|
||||||
|
|
||||||
const writeStreams = [
|
const writeStreams = [
|
||||||
|
@ -374,14 +312,42 @@ describe('Filestore', function() {
|
||||||
constantFileContents[0].length + constantFileContents[1].length
|
constantFileContents[0].length + constantFileContents[1].length
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should store the files', async function() {
|
||||||
|
for (const index in fileUrls) {
|
||||||
|
await expect(rp.get(fileUrls[index])).to.eventually.have.property(
|
||||||
|
'body',
|
||||||
|
constantFileContents[index]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should be able to delete the project', async function() {
|
||||||
|
await expect(rp.delete(projectUrl)).to.eventually.have.property(
|
||||||
|
'statusCode',
|
||||||
|
204
|
||||||
|
)
|
||||||
|
|
||||||
|
for (const index in fileUrls) {
|
||||||
|
await expect(
|
||||||
|
rp.get(fileUrls[index])
|
||||||
|
).to.eventually.be.rejected.and.have.property('statusCode', 404)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not delete a partial project id', async function() {
|
||||||
|
await expect(
|
||||||
|
rp.delete(`${filestoreUrl}/project/5`)
|
||||||
|
).to.eventually.be.rejected.and.have.property('statusCode', 400)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with a large file', function() {
|
describe('with a large file', function() {
|
||||||
let fileId, fileUrl, largeFileContent, error
|
let fileId, fileUrl, largeFileContent, error
|
||||||
|
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
fileId = Math.random()
|
fileId = ObjectId().toString()
|
||||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}`
|
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||||
|
|
||||||
largeFileContent = '_wombat_'.repeat(1024 * 1024) // 8 megabytes
|
largeFileContent = '_wombat_'.repeat(1024 * 1024) // 8 megabytes
|
||||||
largeFileContent += Math.random()
|
largeFileContent += Math.random()
|
||||||
|
@ -414,8 +380,8 @@ describe('Filestore', function() {
|
||||||
|
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
constantFileContent = `This is a file in a different S3 bucket ${Math.random()}`
|
constantFileContent = `This is a file in a different S3 bucket ${Math.random()}`
|
||||||
fileId = Math.random().toString()
|
fileId = ObjectId().toString()
|
||||||
bucketName = Math.random().toString()
|
bucketName = ObjectId().toString()
|
||||||
fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}`
|
fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}`
|
||||||
|
|
||||||
const s3ClientSettings = {
|
const s3ClientSettings = {
|
||||||
|
@ -450,50 +416,60 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (backend === 'GcsPersistor') {
|
||||||
|
describe('when deleting a file in GCS', function() {
|
||||||
|
let fileId, fileUrl, content, error, date
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
date = new Date()
|
||||||
|
tk.freeze(date)
|
||||||
|
fileId = ObjectId()
|
||||||
|
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||||
|
|
||||||
|
content = '_wombat_' + Math.random()
|
||||||
|
|
||||||
|
const writeStream = request.post(fileUrl)
|
||||||
|
const readStream = streamifier.createReadStream(content)
|
||||||
|
// hack to consume the result to ensure the http request has been fully processed
|
||||||
|
const resultStream = fs.createWriteStream('/dev/null')
|
||||||
|
|
||||||
|
try {
|
||||||
|
await pipeline(readStream, writeStream, resultStream)
|
||||||
|
await rp.delete(fileUrl)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(function() {
|
||||||
|
tk.reset()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not throw an error', function() {
|
||||||
|
expect(error).not.to.exist
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should copy the file to the deleted-files bucket', async function() {
|
||||||
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
|
app.persistor,
|
||||||
|
`${Settings.filestore.stores.user_files}-deleted`,
|
||||||
|
`${projectId}/${fileId}-${date.toISOString()}`,
|
||||||
|
content
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should remove the file from the original bucket', async function() {
|
||||||
|
await TestHelper.expectPersistorNotToHaveFile(
|
||||||
|
app.persistor,
|
||||||
|
Settings.filestore.stores.user_files,
|
||||||
|
`${projectId}/${fileId}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (BackendSettings[backend].fallback) {
|
if (BackendSettings[backend].fallback) {
|
||||||
describe('with a fallback', function() {
|
describe('with a fallback', function() {
|
||||||
async function uploadStringToPersistor(
|
|
||||||
persistor,
|
|
||||||
bucket,
|
|
||||||
key,
|
|
||||||
content
|
|
||||||
) {
|
|
||||||
const fileStream = streamifier.createReadStream(content)
|
|
||||||
await persistor.promises.sendStream(bucket, key, fileStream)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getStringFromPersistor(persistor, bucket, key) {
|
|
||||||
const stream = await persistor.promises.getFileStream(
|
|
||||||
bucket,
|
|
||||||
key,
|
|
||||||
{}
|
|
||||||
)
|
|
||||||
return streamToString(stream)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function expectPersistorToHaveFile(
|
|
||||||
persistor,
|
|
||||||
bucket,
|
|
||||||
key,
|
|
||||||
content
|
|
||||||
) {
|
|
||||||
const foundContent = await getStringFromPersistor(
|
|
||||||
persistor,
|
|
||||||
bucket,
|
|
||||||
key
|
|
||||||
)
|
|
||||||
expect(foundContent).to.equal(content)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function expectPersistorNotToHaveFile(persistor, bucket, key) {
|
|
||||||
await expect(
|
|
||||||
getStringFromPersistor(persistor, bucket, key)
|
|
||||||
).to.eventually.have.been.rejected.with.property(
|
|
||||||
'name',
|
|
||||||
'NotFoundError'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let constantFileContent,
|
let constantFileContent,
|
||||||
fileId,
|
fileId,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -503,9 +479,9 @@ describe('Filestore', function() {
|
||||||
|
|
||||||
beforeEach(function() {
|
beforeEach(function() {
|
||||||
constantFileContent = `This is yet more file content ${Math.random()}`
|
constantFileContent = `This is yet more file content ${Math.random()}`
|
||||||
fileId = Math.random().toString()
|
fileId = ObjectId().toString()
|
||||||
fileKey = `${projectId}/${directoryName}/${fileId}`
|
fileKey = `${projectId}/${fileId}`
|
||||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}`
|
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||||
|
|
||||||
bucket = Settings.filestore.stores.user_files
|
bucket = Settings.filestore.stores.user_files
|
||||||
fallbackBucket = Settings.filestore.fallback.buckets[bucket]
|
fallbackBucket = Settings.filestore.fallback.buckets[bucket]
|
||||||
|
@ -513,7 +489,7 @@ describe('Filestore', function() {
|
||||||
|
|
||||||
describe('with a file in the fallback bucket', function() {
|
describe('with a file in the fallback bucket', function() {
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
await uploadStringToPersistor(
|
await TestHelper.uploadStringToPersistor(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -522,7 +498,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not find file in the primary', async function() {
|
it('should not find file in the primary', async function() {
|
||||||
await expectPersistorNotToHaveFile(
|
await TestHelper.expectPersistorNotToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey
|
fileKey
|
||||||
|
@ -530,7 +506,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should find the file in the fallback', async function() {
|
it('should find the file in the fallback', async function() {
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -551,7 +527,7 @@ describe('Filestore', function() {
|
||||||
it('should not copy the file to the primary', async function() {
|
it('should not copy the file to the primary', async function() {
|
||||||
await rp.get(fileUrl)
|
await rp.get(fileUrl)
|
||||||
|
|
||||||
await expectPersistorNotToHaveFile(
|
await TestHelper.expectPersistorNotToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey
|
fileKey
|
||||||
|
@ -574,7 +550,7 @@ describe('Filestore', function() {
|
||||||
// wait for the file to copy in the background
|
// wait for the file to copy in the background
|
||||||
await promisify(setTimeout)(1000)
|
await promisify(setTimeout)(1000)
|
||||||
|
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -587,10 +563,10 @@ describe('Filestore', function() {
|
||||||
let newFileId, newFileUrl, newFileKey, opts
|
let newFileId, newFileUrl, newFileKey, opts
|
||||||
|
|
||||||
beforeEach(function() {
|
beforeEach(function() {
|
||||||
const newProjectID = `acceptance_tests_copied_project_${Math.random()}`
|
const newProjectID = ObjectId().toString()
|
||||||
newFileId = Math.random()
|
newFileId = ObjectId().toString()
|
||||||
newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}`
|
newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}`
|
||||||
newFileKey = `${newProjectID}/${directoryName}/${newFileId}`
|
newFileKey = `${newProjectID}/${newFileId}`
|
||||||
|
|
||||||
opts = {
|
opts = {
|
||||||
method: 'put',
|
method: 'put',
|
||||||
|
@ -598,7 +574,7 @@ describe('Filestore', function() {
|
||||||
json: {
|
json: {
|
||||||
source: {
|
source: {
|
||||||
project_id: projectId,
|
project_id: projectId,
|
||||||
file_id: `${directoryName}/${fileId}`
|
file_id: fileId
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -613,7 +589,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should leave the old file in the old bucket', async function() {
|
it('should leave the old file in the old bucket', async function() {
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -622,7 +598,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not create a new file in the old bucket', async function() {
|
it('should not create a new file in the old bucket', async function() {
|
||||||
await expectPersistorNotToHaveFile(
|
await TestHelper.expectPersistorNotToHaveFile(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
newFileKey
|
newFileKey
|
||||||
|
@ -630,7 +606,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should create a new file in the new bucket', async function() {
|
it('should create a new file in the new bucket', async function() {
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
newFileKey,
|
newFileKey,
|
||||||
|
@ -642,7 +618,7 @@ describe('Filestore', function() {
|
||||||
// wait for the file to copy in the background
|
// wait for the file to copy in the background
|
||||||
await promisify(setTimeout)(1000)
|
await promisify(setTimeout)(1000)
|
||||||
|
|
||||||
await expectPersistorNotToHaveFile(
|
await TestHelper.expectPersistorNotToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey
|
fileKey
|
||||||
|
@ -659,7 +635,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should leave the old file in the old bucket', async function() {
|
it('should leave the old file in the old bucket', async function() {
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -668,7 +644,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not create a new file in the old bucket', async function() {
|
it('should not create a new file in the old bucket', async function() {
|
||||||
await expectPersistorNotToHaveFile(
|
await TestHelper.expectPersistorNotToHaveFile(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
newFileKey
|
newFileKey
|
||||||
|
@ -676,7 +652,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should create a new file in the new bucket', async function() {
|
it('should create a new file in the new bucket', async function() {
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
newFileKey,
|
newFileKey,
|
||||||
|
@ -688,7 +664,7 @@ describe('Filestore', function() {
|
||||||
// wait for the file to copy in the background
|
// wait for the file to copy in the background
|
||||||
await promisify(setTimeout)(1000)
|
await promisify(setTimeout)(1000)
|
||||||
|
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -711,7 +687,7 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should store the file on the primary', async function() {
|
it('should store the file on the primary', async function() {
|
||||||
await expectPersistorToHaveFile(
|
await TestHelper.expectPersistorToHaveFile(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -720,10 +696,10 @@ describe('Filestore', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not store the file on the fallback', async function() {
|
it('should not store the file on the fallback', async function() {
|
||||||
await expectPersistorNotToHaveFile(
|
await TestHelper.expectPersistorNotToHaveFile(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
`${projectId}/${directoryName}/${fileId}`
|
`${projectId}/${fileId}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -731,7 +707,7 @@ describe('Filestore', function() {
|
||||||
describe('when deleting a file', function() {
|
describe('when deleting a file', function() {
|
||||||
describe('when the file exists on the primary', function() {
|
describe('when the file exists on the primary', function() {
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
await uploadStringToPersistor(
|
await TestHelper.uploadStringToPersistor(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -750,7 +726,7 @@ describe('Filestore', function() {
|
||||||
|
|
||||||
describe('when the file exists on the fallback', function() {
|
describe('when the file exists on the fallback', function() {
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
await uploadStringToPersistor(
|
await TestHelper.uploadStringToPersistor(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -769,13 +745,13 @@ describe('Filestore', function() {
|
||||||
|
|
||||||
describe('when the file exists on both the primary and the fallback', function() {
|
describe('when the file exists on both the primary and the fallback', function() {
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
await uploadStringToPersistor(
|
await TestHelper.uploadStringToPersistor(
|
||||||
app.persistor.primaryPersistor,
|
app.persistor.primaryPersistor,
|
||||||
bucket,
|
bucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
constantFileContent
|
constantFileContent
|
||||||
)
|
)
|
||||||
await uploadStringToPersistor(
|
await TestHelper.uploadStringToPersistor(
|
||||||
app.persistor.fallbackPersistor,
|
app.persistor.fallbackPersistor,
|
||||||
fallbackBucket,
|
fallbackBucket,
|
||||||
fileKey,
|
fileKey,
|
||||||
|
@ -812,8 +788,8 @@ describe('Filestore', function() {
|
||||||
)
|
)
|
||||||
|
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
fileId = Math.random()
|
fileId = ObjectId().toString()
|
||||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}`
|
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||||
const stat = await fsStat(localFileReadPath)
|
const stat = await fsStat(localFileReadPath)
|
||||||
localFileSize = stat.size
|
localFileSize = stat.size
|
||||||
const writeStream = request.post(fileUrl)
|
const writeStream = request.post(fileUrl)
|
||||||
|
@ -827,9 +803,12 @@ describe('Filestore', function() {
|
||||||
expect(response.body.substring(0, 8)).to.equal('%PDF-1.5')
|
expect(response.body.substring(0, 8)).to.equal('%PDF-1.5')
|
||||||
})
|
})
|
||||||
|
|
||||||
if (backend === 'S3Persistor') {
|
if (['S3Persistor', 'GcsPersistor'].includes(backend)) {
|
||||||
it('should record an egress metric for the upload', async function() {
|
it('should record an egress metric for the upload', async function() {
|
||||||
const metric = await getMetric(filestoreUrl, 's3_egress')
|
const metric = await TestHelper.getMetric(
|
||||||
|
filestoreUrl,
|
||||||
|
`${metricPrefix}_egress`
|
||||||
|
)
|
||||||
expect(metric - previousEgress).to.equal(localFileSize)
|
expect(metric - previousEgress).to.equal(localFileSize)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
113
services/filestore/test/acceptance/js/TestConfig.js
Normal file
113
services/filestore/test/acceptance/js/TestConfig.js
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
const Path = require('path')
|
||||||
|
|
||||||
|
// use functions to get a fresh copy, not a reference, each time
|
||||||
|
function s3Config() {
|
||||||
|
return {
|
||||||
|
key: process.env.AWS_ACCESS_KEY_ID,
|
||||||
|
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
||||||
|
endpoint: process.env.AWS_S3_ENDPOINT,
|
||||||
|
pathStyle: true,
|
||||||
|
partSize: 100 * 1024 * 1024
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function s3Stores() {
|
||||||
|
return {
|
||||||
|
user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME,
|
||||||
|
template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME,
|
||||||
|
public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function gcsConfig() {
|
||||||
|
return {
|
||||||
|
endpoint: {
|
||||||
|
apiEndpoint: process.env.GCS_API_ENDPOINT,
|
||||||
|
apiScheme: process.env.GCS_API_SCHEME,
|
||||||
|
projectId: 'fake'
|
||||||
|
},
|
||||||
|
directoryKeyRegex: new RegExp('^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}'),
|
||||||
|
unlockBeforeDelete: false, // fake-gcs does not support this
|
||||||
|
deletedBucketSuffix: '-deleted'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function gcsStores() {
|
||||||
|
return {
|
||||||
|
user_files: process.env.GCS_USER_FILES_BUCKET_NAME,
|
||||||
|
template_files: process.env.GCS_TEMPLATE_FILES_BUCKET_NAME,
|
||||||
|
public_files: process.env.GCS_PUBLIC_FILES_BUCKET_NAME
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function fsStores() {
|
||||||
|
return {
|
||||||
|
user_files: Path.resolve(__dirname, '../../../user_files'),
|
||||||
|
public_files: Path.resolve(__dirname, '../../../public_files'),
|
||||||
|
template_files: Path.resolve(__dirname, '../../../template_files')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function fallbackStores(primaryConfig, fallbackConfig) {
|
||||||
|
return {
|
||||||
|
[primaryConfig.user_files]: fallbackConfig.user_files,
|
||||||
|
[primaryConfig.public_files]: fallbackConfig.public_files,
|
||||||
|
[primaryConfig.template_files]: fallbackConfig.template_files
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
FSPersistor: {
|
||||||
|
backend: 'fs',
|
||||||
|
stores: fsStores()
|
||||||
|
},
|
||||||
|
S3Persistor: {
|
||||||
|
backend: 's3',
|
||||||
|
s3: s3Config(),
|
||||||
|
stores: s3Stores()
|
||||||
|
},
|
||||||
|
GcsPersistor: {
|
||||||
|
backend: 'gcs',
|
||||||
|
gcs: gcsConfig(),
|
||||||
|
stores: gcsStores()
|
||||||
|
},
|
||||||
|
FallbackS3ToFSPersistor: {
|
||||||
|
backend: 's3',
|
||||||
|
s3: s3Config(),
|
||||||
|
stores: s3Stores(),
|
||||||
|
fallback: {
|
||||||
|
backend: 'fs',
|
||||||
|
buckets: fallbackStores(s3Stores(), fsStores())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FallbackFSToS3Persistor: {
|
||||||
|
backend: 'fs',
|
||||||
|
s3: s3Config(),
|
||||||
|
stores: fsStores(),
|
||||||
|
fallback: {
|
||||||
|
backend: 's3',
|
||||||
|
buckets: fallbackStores(fsStores(), s3Stores())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FallbackGcsToS3Persistor: {
|
||||||
|
backend: 'gcs',
|
||||||
|
gcs: gcsConfig(),
|
||||||
|
stores: gcsStores(),
|
||||||
|
s3: s3Config(),
|
||||||
|
fallback: {
|
||||||
|
backend: 's3',
|
||||||
|
buckets: fallbackStores(gcsStores(), s3Stores())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FallbackS3ToGcsPersistor: {
|
||||||
|
backend: 's3',
|
||||||
|
// can use the same bucket names for gcs and s3 (in tests)
|
||||||
|
stores: s3Stores(),
|
||||||
|
s3: s3Config(),
|
||||||
|
gcs: gcsConfig(),
|
||||||
|
fallback: {
|
||||||
|
backend: 'gcs',
|
||||||
|
buckets: fallbackStores(s3Stores(), gcsStores())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
54
services/filestore/test/acceptance/js/TestHelper.js
Normal file
54
services/filestore/test/acceptance/js/TestHelper.js
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
const streamifier = require('streamifier')
|
||||||
|
const rp = require('request-promise-native').defaults({
|
||||||
|
resolveWithFullResponse: true
|
||||||
|
})
|
||||||
|
|
||||||
|
const { expect } = require('chai')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
uploadStringToPersistor,
|
||||||
|
getStringFromPersistor,
|
||||||
|
expectPersistorToHaveFile,
|
||||||
|
expectPersistorNotToHaveFile,
|
||||||
|
streamToString,
|
||||||
|
getMetric
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getMetric(filestoreUrl, metric) {
|
||||||
|
const res = await rp.get(`${filestoreUrl}/metrics`)
|
||||||
|
expect(res.statusCode).to.equal(200)
|
||||||
|
const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'm')
|
||||||
|
const found = metricRegex.exec(res.body)
|
||||||
|
return parseInt(found ? found[1] : 0) || 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function streamToString(stream) {
|
||||||
|
const chunks = []
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream.on('data', chunk => chunks.push(chunk))
|
||||||
|
stream.on('error', reject)
|
||||||
|
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
||||||
|
stream.resume()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadStringToPersistor(persistor, bucket, key, content) {
|
||||||
|
const fileStream = streamifier.createReadStream(content)
|
||||||
|
await persistor.promises.sendStream(bucket, key, fileStream)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getStringFromPersistor(persistor, bucket, key) {
|
||||||
|
const stream = await persistor.promises.getFileStream(bucket, key, {})
|
||||||
|
return streamToString(stream)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function expectPersistorToHaveFile(persistor, bucket, key, content) {
|
||||||
|
const foundContent = await getStringFromPersistor(persistor, bucket, key)
|
||||||
|
expect(foundContent).to.equal(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function expectPersistorNotToHaveFile(persistor, bucket, key) {
|
||||||
|
await expect(
|
||||||
|
getStringFromPersistor(persistor, bucket, key)
|
||||||
|
).to.eventually.have.been.rejected.with.property('name', 'NotFoundError')
|
||||||
|
}
|
|
@ -22,15 +22,7 @@ describe('FSPersistorTests', function() {
|
||||||
const files = ['animals/wombat.tex', 'vegetables/potato.tex']
|
const files = ['animals/wombat.tex', 'vegetables/potato.tex']
|
||||||
const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`]
|
const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`]
|
||||||
const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex']
|
const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex']
|
||||||
let fs,
|
let fs, stream, LocalFileWriter, FSPersistor, glob, readStream, crypto, Hash
|
||||||
rimraf,
|
|
||||||
stream,
|
|
||||||
LocalFileWriter,
|
|
||||||
FSPersistor,
|
|
||||||
glob,
|
|
||||||
readStream,
|
|
||||||
crypto,
|
|
||||||
Hash
|
|
||||||
|
|
||||||
beforeEach(function() {
|
beforeEach(function() {
|
||||||
readStream = {
|
readStream = {
|
||||||
|
@ -46,7 +38,6 @@ describe('FSPersistorTests', function() {
|
||||||
stat: sinon.stub().yields(null, stat)
|
stat: sinon.stub().yields(null, stat)
|
||||||
}
|
}
|
||||||
glob = sinon.stub().yields(null, globs)
|
glob = sinon.stub().yields(null, globs)
|
||||||
rimraf = sinon.stub().yields()
|
|
||||||
stream = { pipeline: sinon.stub().yields() }
|
stream = { pipeline: sinon.stub().yields() }
|
||||||
LocalFileWriter = {
|
LocalFileWriter = {
|
||||||
promises: {
|
promises: {
|
||||||
|
@ -68,12 +59,12 @@ describe('FSPersistorTests', function() {
|
||||||
'./Errors': Errors,
|
'./Errors': Errors,
|
||||||
fs,
|
fs,
|
||||||
glob,
|
glob,
|
||||||
rimraf,
|
|
||||||
stream,
|
stream,
|
||||||
crypto,
|
crypto,
|
||||||
// imported by PersistorHelper but otherwise unused here
|
// imported by PersistorHelper but otherwise unused here
|
||||||
'stream-meter': {},
|
'stream-meter': {},
|
||||||
'logger-sharelatex': {}
|
'logger-sharelatex': {},
|
||||||
|
'metrics-sharelatex': {}
|
||||||
},
|
},
|
||||||
globals: { console }
|
globals: { console }
|
||||||
})
|
})
|
||||||
|
@ -270,15 +261,22 @@ describe('FSPersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('deleteDirectory', function() {
|
describe('deleteDirectory', function() {
|
||||||
it('Should call rmdir(rimraf) with correct options', async function() {
|
it('Should call glob with correct options', async function() {
|
||||||
await FSPersistor.promises.deleteDirectory(location, files[0])
|
await FSPersistor.promises.deleteDirectory(location, files[0])
|
||||||
expect(rimraf).to.have.been.calledWith(
|
expect(glob).to.have.been.calledWith(
|
||||||
`${location}/${filteredFilenames[0]}`
|
`${location}/${filteredFilenames[0]}*`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('Should call unlink on the returned files', async function() {
|
||||||
|
await FSPersistor.promises.deleteDirectory(location, files[0])
|
||||||
|
for (const filename of globs) {
|
||||||
|
expect(fs.unlink).to.have.been.calledWith(filename)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
it('Should propagate the error', async function() {
|
it('Should propagate the error', async function() {
|
||||||
rimraf.yields(error)
|
glob.yields(error)
|
||||||
await expect(
|
await expect(
|
||||||
FSPersistor.promises.deleteDirectory(location, files[0])
|
FSPersistor.promises.deleteDirectory(location, files[0])
|
||||||
).to.eventually.be.rejected.and.have.property('cause', error)
|
).to.eventually.be.rejected.and.have.property('cause', error)
|
||||||
|
|
|
@ -40,6 +40,7 @@ describe('FileController', function() {
|
||||||
getFile: sinon.stub().yields(null, fileStream),
|
getFile: sinon.stub().yields(null, fileStream),
|
||||||
getFileSize: sinon.stub().yields(null, fileSize),
|
getFileSize: sinon.stub().yields(null, fileSize),
|
||||||
deleteFile: sinon.stub().yields(),
|
deleteFile: sinon.stub().yields(),
|
||||||
|
deleteProject: sinon.stub().yields(),
|
||||||
insertFile: sinon.stub().yields(),
|
insertFile: sinon.stub().yields(),
|
||||||
getDirectorySize: sinon.stub().yields(null, fileSize)
|
getDirectorySize: sinon.stub().yields(null, fileSize)
|
||||||
}
|
}
|
||||||
|
@ -67,6 +68,7 @@ describe('FileController', function() {
|
||||||
req = {
|
req = {
|
||||||
key: key,
|
key: key,
|
||||||
bucket: bucket,
|
bucket: bucket,
|
||||||
|
project_id: projectId,
|
||||||
query: {},
|
query: {},
|
||||||
params: {
|
params: {
|
||||||
project_id: projectId,
|
project_id: projectId,
|
||||||
|
@ -257,6 +259,23 @@ describe('FileController', function() {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('delete project', function() {
|
||||||
|
it('should tell the file handler', function(done) {
|
||||||
|
res.sendStatus = code => {
|
||||||
|
code.should.equal(204)
|
||||||
|
expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, key)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
FileController.deleteProject(req, res, next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should send a 500 if there was an error', function() {
|
||||||
|
FileHandler.deleteProject.yields(error)
|
||||||
|
FileController.deleteProject(req, res, next)
|
||||||
|
expect(next).to.have.been.calledWith(error)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe('directorySize', function() {
|
describe('directorySize', function() {
|
||||||
it('should return total directory size bytes', function(done) {
|
it('should return total directory size bytes', function(done) {
|
||||||
FileController.directorySize(req, {
|
FileController.directorySize(req, {
|
||||||
|
|
|
@ -3,6 +3,7 @@ const chai = require('chai')
|
||||||
const { expect } = chai
|
const { expect } = chai
|
||||||
const modulePath = '../../../app/js/FileHandler.js'
|
const modulePath = '../../../app/js/FileHandler.js'
|
||||||
const SandboxedModule = require('sandboxed-module')
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
|
||||||
chai.use(require('sinon-chai'))
|
chai.use(require('sinon-chai'))
|
||||||
chai.use(require('chai-as-promised'))
|
chai.use(require('chai-as-promised'))
|
||||||
|
@ -24,8 +25,9 @@ describe('FileHandler', function() {
|
||||||
}
|
}
|
||||||
|
|
||||||
const bucket = 'my_bucket'
|
const bucket = 'my_bucket'
|
||||||
const key = 'key/here'
|
const key = `${ObjectId()}/${ObjectId()}`
|
||||||
const convertedFolderKey = 'convertedFolder'
|
const convertedFolderKey = `${ObjectId()}/${ObjectId()}`
|
||||||
|
const projectKey = `${ObjectId()}/`
|
||||||
const sourceStream = 'sourceStream'
|
const sourceStream = 'sourceStream'
|
||||||
const convertedKey = 'convertedKey'
|
const convertedKey = 'convertedKey'
|
||||||
const readStream = {
|
const readStream = {
|
||||||
|
@ -112,6 +114,14 @@ describe('FileHandler', function() {
|
||||||
done()
|
done()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should throw an error when the key is in the wrong format', function(done) {
|
||||||
|
KeyBuilder.getConvertedFolderKey.returns('wombat')
|
||||||
|
FileHandler.insertFile(bucket, key, stream, err => {
|
||||||
|
expect(err).to.exist
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('deleteFile', function() {
|
describe('deleteFile', function() {
|
||||||
|
@ -135,6 +145,33 @@ describe('FileHandler', function() {
|
||||||
done()
|
done()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should throw an error when the key is in the wrong format', function(done) {
|
||||||
|
KeyBuilder.getConvertedFolderKey.returns('wombat')
|
||||||
|
FileHandler.deleteFile(bucket, key, err => {
|
||||||
|
expect(err).to.exist
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('deleteProject', function() {
|
||||||
|
it('should tell the filestore manager to delete the folder', function(done) {
|
||||||
|
FileHandler.deleteProject(bucket, projectKey, err => {
|
||||||
|
expect(err).not.to.exist
|
||||||
|
expect(
|
||||||
|
PersistorManager.promises.deleteDirectory
|
||||||
|
).to.have.been.calledWith(bucket, projectKey)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw an error when the key is in the wrong format', function(done) {
|
||||||
|
FileHandler.deleteProject(bucket, 'wombat', err => {
|
||||||
|
expect(err).to.exist
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('getFile', function() {
|
describe('getFile', function() {
|
||||||
|
|
678
services/filestore/test/unit/js/GcsPersistorTests.js
Normal file
678
services/filestore/test/unit/js/GcsPersistorTests.js
Normal file
|
@ -0,0 +1,678 @@
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const chai = require('chai')
|
||||||
|
const { expect } = chai
|
||||||
|
const modulePath = '../../../app/js/GcsPersistor.js'
|
||||||
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const asyncPool = require('tiny-async-pool')
|
||||||
|
|
||||||
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
|
||||||
|
describe('GcsPersistorTests', function() {
|
||||||
|
const filename = '/wombat/potato.tex'
|
||||||
|
const bucket = 'womBucket'
|
||||||
|
const key = 'monKey'
|
||||||
|
const destKey = 'donKey'
|
||||||
|
const objectSize = 5555
|
||||||
|
const genericError = new Error('guru meditation error')
|
||||||
|
const filesSize = 33
|
||||||
|
const md5 = 'ffffffff00000000ffffffff00000000'
|
||||||
|
const WriteStream = 'writeStream'
|
||||||
|
|
||||||
|
let Metrics,
|
||||||
|
Logger,
|
||||||
|
Storage,
|
||||||
|
Fs,
|
||||||
|
GcsNotFoundError,
|
||||||
|
Meter,
|
||||||
|
MeteredStream,
|
||||||
|
ReadStream,
|
||||||
|
Stream,
|
||||||
|
GcsBucket,
|
||||||
|
GcsFile,
|
||||||
|
GcsPersistor,
|
||||||
|
FileNotFoundError,
|
||||||
|
Hash,
|
||||||
|
settings,
|
||||||
|
crypto,
|
||||||
|
files
|
||||||
|
|
||||||
|
beforeEach(function() {
|
||||||
|
settings = {
|
||||||
|
filestore: {
|
||||||
|
backend: 'gcs',
|
||||||
|
stores: {
|
||||||
|
user_files: 'user_files'
|
||||||
|
},
|
||||||
|
gcs: {
|
||||||
|
directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
metadata: { size: 11, md5Hash: '/////wAAAAD/////AAAAAA==' },
|
||||||
|
delete: sinon.stub()
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metadata: { size: 22, md5Hash: '/////wAAAAD/////AAAAAA==' },
|
||||||
|
delete: sinon.stub()
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
ReadStream = {
|
||||||
|
pipe: sinon.stub().returns('readStream'),
|
||||||
|
on: sinon
|
||||||
|
.stub()
|
||||||
|
.withArgs('end')
|
||||||
|
.yields(),
|
||||||
|
removeListener: sinon.stub()
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream = {
|
||||||
|
pipeline: sinon.stub().yields()
|
||||||
|
}
|
||||||
|
|
||||||
|
Metrics = {
|
||||||
|
count: sinon.stub()
|
||||||
|
}
|
||||||
|
|
||||||
|
GcsFile = {
|
||||||
|
delete: sinon.stub().resolves(),
|
||||||
|
createReadStream: sinon.stub().returns(ReadStream),
|
||||||
|
getMetadata: sinon.stub().resolves([files[0].metadata]),
|
||||||
|
createWriteStream: sinon.stub().returns(WriteStream),
|
||||||
|
copy: sinon.stub().resolves(),
|
||||||
|
exists: sinon.stub().resolves([true])
|
||||||
|
}
|
||||||
|
|
||||||
|
GcsBucket = {
|
||||||
|
file: sinon.stub().returns(GcsFile),
|
||||||
|
getFiles: sinon.stub().resolves([files])
|
||||||
|
}
|
||||||
|
|
||||||
|
Storage = class {
|
||||||
|
constructor() {
|
||||||
|
this.interceptors = []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Storage.prototype.bucket = sinon.stub().returns(GcsBucket)
|
||||||
|
|
||||||
|
GcsNotFoundError = new Error('File not found')
|
||||||
|
GcsNotFoundError.code = 404
|
||||||
|
|
||||||
|
Fs = {
|
||||||
|
createReadStream: sinon.stub().returns(ReadStream)
|
||||||
|
}
|
||||||
|
|
||||||
|
FileNotFoundError = new Error('File not found')
|
||||||
|
FileNotFoundError.code = 'ENOENT'
|
||||||
|
|
||||||
|
MeteredStream = {
|
||||||
|
type: 'metered',
|
||||||
|
on: sinon.stub(),
|
||||||
|
bytes: objectSize
|
||||||
|
}
|
||||||
|
MeteredStream.on.withArgs('finish').yields()
|
||||||
|
MeteredStream.on.withArgs('readable').yields()
|
||||||
|
Meter = sinon.stub().returns(MeteredStream)
|
||||||
|
|
||||||
|
Hash = {
|
||||||
|
end: sinon.stub(),
|
||||||
|
read: sinon.stub().returns(md5),
|
||||||
|
setEncoding: sinon.stub()
|
||||||
|
}
|
||||||
|
crypto = {
|
||||||
|
createHash: sinon.stub().returns(Hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
Logger = {
|
||||||
|
warn: sinon.stub()
|
||||||
|
}
|
||||||
|
|
||||||
|
GcsPersistor = SandboxedModule.require(modulePath, {
|
||||||
|
requires: {
|
||||||
|
'@google-cloud/storage': { Storage },
|
||||||
|
'settings-sharelatex': settings,
|
||||||
|
'logger-sharelatex': Logger,
|
||||||
|
'tiny-async-pool': asyncPool,
|
||||||
|
'./Errors': Errors,
|
||||||
|
fs: Fs,
|
||||||
|
'stream-meter': Meter,
|
||||||
|
stream: Stream,
|
||||||
|
'metrics-sharelatex': Metrics,
|
||||||
|
crypto
|
||||||
|
},
|
||||||
|
globals: { console, Buffer }
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getFileStream', function() {
|
||||||
|
describe('when called with valid parameters', function() {
|
||||||
|
let stream
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
stream = await GcsPersistor.promises.getFileStream(bucket, key)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns a metered stream', function() {
|
||||||
|
expect(stream).to.equal(MeteredStream)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('fetches the right key from the right bucket', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.createReadStream).to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('pipes the stream through the meter', function() {
|
||||||
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
|
ReadStream,
|
||||||
|
MeteredStream
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('records an ingress metric', function() {
|
||||||
|
expect(Metrics.count).to.have.been.calledWith('gcs.ingress', objectSize)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when called with a byte range', function() {
|
||||||
|
let stream
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
stream = await GcsPersistor.promises.getFileStream(bucket, key, {
|
||||||
|
start: 5,
|
||||||
|
end: 10
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns a metered stream', function() {
|
||||||
|
expect(stream).to.equal(MeteredStream)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('passes the byte range on to GCS', function() {
|
||||||
|
expect(GcsFile.createReadStream).to.have.been.calledWith({
|
||||||
|
start: 5,
|
||||||
|
end: 11 // we increment the end because Google's 'end' is exclusive
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("when the file doesn't exist", function() {
|
||||||
|
let error, stream
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
ReadStream.on = sinon.stub()
|
||||||
|
ReadStream.on.withArgs('error').yields(GcsNotFoundError)
|
||||||
|
try {
|
||||||
|
stream = await GcsPersistor.promises.getFileStream(bucket, key)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not return a stream', function() {
|
||||||
|
expect(stream).not.to.exist
|
||||||
|
})
|
||||||
|
|
||||||
|
it('throws a NotFoundError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('wraps the error', function() {
|
||||||
|
expect(error.cause).to.exist
|
||||||
|
})
|
||||||
|
|
||||||
|
it('stores the bucket and key in the error', function() {
|
||||||
|
expect(error.info).to.include({ bucketName: bucket, key: key })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when Gcs encounters an unkown error', function() {
|
||||||
|
let error, stream
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
ReadStream.on = sinon.stub()
|
||||||
|
ReadStream.on.withArgs('error').yields(genericError)
|
||||||
|
try {
|
||||||
|
stream = await GcsPersistor.promises.getFileStream(bucket, key)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not return a stream', function() {
|
||||||
|
expect(stream).not.to.exist
|
||||||
|
})
|
||||||
|
|
||||||
|
it('throws a ReadError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.ReadError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('wraps the error', function() {
|
||||||
|
expect(error.cause).to.exist
|
||||||
|
})
|
||||||
|
|
||||||
|
it('stores the bucket and key in the error', function() {
|
||||||
|
expect(error.info).to.include({ bucketName: bucket, key: key })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getFileSize', function() {
|
||||||
|
describe('when called with valid parameters', function() {
|
||||||
|
let size
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
size = await GcsPersistor.promises.getFileSize(bucket, key)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the object size', function() {
|
||||||
|
expect(size).to.equal(files[0].metadata.size)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should pass the bucket and key to GCS', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.getMetadata).to.have.been.called
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the object is not found', function() {
|
||||||
|
let error
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsFile.getMetadata = sinon.stub().rejects(GcsNotFoundError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.getFileSize(bucket, key)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return a NotFoundError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should wrap the error', function() {
|
||||||
|
expect(error.cause).to.equal(GcsNotFoundError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when GCS returns an error', function() {
|
||||||
|
let error
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsFile.getMetadata = sinon.stub().rejects(genericError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.getFileSize(bucket, key)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return a ReadError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.ReadError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should wrap the error', function() {
|
||||||
|
expect(error.cause).to.equal(genericError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('sendStream', function() {
|
||||||
|
describe('with valid parameters', function() {
|
||||||
|
beforeEach(async function() {
|
||||||
|
return GcsPersistor.promises.sendStream(bucket, key, ReadStream)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should upload the stream', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.createWriteStream).to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not try to create a resumable upload', function() {
|
||||||
|
expect(GcsFile.createWriteStream).to.have.been.calledWith({
|
||||||
|
resumable: false
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should meter the stream', function() {
|
||||||
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
|
ReadStream,
|
||||||
|
MeteredStream
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should pipe the metered stream to GCS', function() {
|
||||||
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
|
MeteredStream,
|
||||||
|
WriteStream
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should record an egress metric', function() {
|
||||||
|
expect(Metrics.count).to.have.been.calledWith('gcs.egress', objectSize)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('calculates the md5 hash of the file', function() {
|
||||||
|
expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when a hash is supplied', function() {
|
||||||
|
beforeEach(async function() {
|
||||||
|
return GcsPersistor.promises.sendStream(
|
||||||
|
bucket,
|
||||||
|
key,
|
||||||
|
ReadStream,
|
||||||
|
'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not calculate the md5 hash of the file', function() {
|
||||||
|
expect(Stream.pipeline).not.to.have.been.calledWith(
|
||||||
|
sinon.match.any,
|
||||||
|
Hash
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('sends the hash in base64', function() {
|
||||||
|
expect(GcsFile.createWriteStream).to.have.been.calledWith({
|
||||||
|
validation: 'md5',
|
||||||
|
metadata: {
|
||||||
|
md5Hash: 'qqqqqru7u7uqqqqqu7u7uw=='
|
||||||
|
},
|
||||||
|
resumable: false
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not fetch the md5 hash of the uploaded file', function() {
|
||||||
|
expect(GcsFile.getMetadata).not.to.have.been.called
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the upload fails', function() {
|
||||||
|
let error
|
||||||
|
beforeEach(async function() {
|
||||||
|
Stream.pipeline
|
||||||
|
.withArgs(MeteredStream, WriteStream, sinon.match.any)
|
||||||
|
.yields(genericError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.sendStream(bucket, key, ReadStream)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('throws a WriteError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.WriteError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('wraps the error', function() {
|
||||||
|
expect(error.cause).to.equal(genericError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('sendFile', function() {
|
||||||
|
describe('with valid parameters', function() {
|
||||||
|
beforeEach(async function() {
|
||||||
|
return GcsPersistor.promises.sendFile(bucket, key, filename)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should create a read stream for the file', function() {
|
||||||
|
expect(Fs.createReadStream).to.have.been.calledWith(filename)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should create a write stream', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.createWriteStream).to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should upload the stream via the meter', function() {
|
||||||
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
|
ReadStream,
|
||||||
|
MeteredStream
|
||||||
|
)
|
||||||
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
|
MeteredStream,
|
||||||
|
WriteStream
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('copyFile', function() {
|
||||||
|
const destinationFile = 'destFile'
|
||||||
|
|
||||||
|
beforeEach(function() {
|
||||||
|
GcsBucket.file.withArgs(destKey).returns(destinationFile)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with valid parameters', function() {
|
||||||
|
beforeEach(async function() {
|
||||||
|
return GcsPersistor.promises.copyFile(bucket, key, destKey)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should copy the object', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.copy).to.have.been.calledWith(destinationFile)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the file does not exist', function() {
|
||||||
|
let error
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsFile.copy = sinon.stub().rejects(GcsNotFoundError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.copyFile(bucket, key, destKey)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw a NotFoundError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('deleteFile', function() {
|
||||||
|
describe('with valid parameters', function() {
|
||||||
|
beforeEach(async function() {
|
||||||
|
return GcsPersistor.promises.deleteFile(bucket, key)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete the object', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.delete).to.have.been.called
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the file does not exist', function() {
|
||||||
|
let error
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsFile.delete = sinon.stub().rejects(GcsNotFoundError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.deleteFile(bucket, key)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not throw an error', function() {
|
||||||
|
expect(error).not.to.exist
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('deleteDirectory', function() {
|
||||||
|
const directoryName = `${ObjectId()}/${ObjectId()}`
|
||||||
|
describe('with valid parameters', function() {
|
||||||
|
beforeEach(async function() {
|
||||||
|
console.log(key)
|
||||||
|
return GcsPersistor.promises.deleteDirectory(bucket, directoryName)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should list the objects in the directory', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.getFiles).to.have.been.calledWith({
|
||||||
|
directory: directoryName
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete the files', function() {
|
||||||
|
expect(GcsFile.delete).to.have.been.calledTwice
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when there is an error listing the objects', function() {
|
||||||
|
let error
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsBucket.getFiles = sinon.stub().rejects(genericError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.deleteDirectory(bucket, directoryName)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should generate a WriteError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.WriteError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should wrap the error', function() {
|
||||||
|
expect(error.cause).to.equal(genericError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('directorySize', function() {
|
||||||
|
describe('with valid parameters', function() {
|
||||||
|
let size
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
size = await GcsPersistor.promises.directorySize(bucket, key)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should list the objects in the directory', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.getFiles).to.have.been.calledWith({ directory: key })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the directory size', function() {
|
||||||
|
expect(size).to.equal(filesSize)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when there are no files', function() {
|
||||||
|
let size
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsBucket.getFiles.resolves([[]])
|
||||||
|
size = await GcsPersistor.promises.directorySize(bucket, key)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should list the objects in the directory', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.getFiles).to.have.been.calledWith({ directory: key })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return zero', function() {
|
||||||
|
expect(size).to.equal(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when there is an error listing the objects', function() {
|
||||||
|
let error
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsBucket.getFiles.rejects(genericError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.directorySize(bucket, key)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should generate a ReadError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.ReadError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should wrap the error', function() {
|
||||||
|
expect(error.cause).to.equal(genericError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('checkIfFileExists', function() {
|
||||||
|
describe('when the file exists', function() {
|
||||||
|
let exists
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
exists = await GcsPersistor.promises.checkIfFileExists(bucket, key)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should ask the file if it exists', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.exists).to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return that the file exists', function() {
|
||||||
|
expect(exists).to.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the file does not exist', function() {
|
||||||
|
let exists
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsFile.exists = sinon.stub().resolves([false])
|
||||||
|
exists = await GcsPersistor.promises.checkIfFileExists(bucket, key)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the object header', function() {
|
||||||
|
expect(Storage.prototype.bucket).to.have.been.calledWith(bucket)
|
||||||
|
expect(GcsBucket.file).to.have.been.calledWith(key)
|
||||||
|
expect(GcsFile.exists).to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return that the file does not exist', function() {
|
||||||
|
expect(exists).to.equal(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when there is an error', function() {
|
||||||
|
let error
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
GcsFile.exists = sinon.stub().rejects(genericError)
|
||||||
|
try {
|
||||||
|
await GcsPersistor.promises.checkIfFileExists(bucket, key)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should generate a ReadError', function() {
|
||||||
|
expect(error).to.be.an.instanceOf(Errors.ReadError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should wrap the error', function() {
|
||||||
|
expect(error.cause).to.equal(genericError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -583,48 +583,6 @@ describe('S3PersistorTests', function() {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the file does not exist', function() {
|
|
||||||
let error
|
|
||||||
|
|
||||||
beforeEach(async function() {
|
|
||||||
Fs.createReadStream = sinon.stub().throws(FileNotFoundError)
|
|
||||||
try {
|
|
||||||
await S3Persistor.promises.sendFile(bucket, key, filename)
|
|
||||||
} catch (err) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('returns a NotFoundError', function() {
|
|
||||||
expect(error).to.be.an.instanceOf(Errors.NotFoundError)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('wraps the error', function() {
|
|
||||||
expect(error.cause).to.equal(FileNotFoundError)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('when reading the file throws an error', function() {
|
|
||||||
let error
|
|
||||||
|
|
||||||
beforeEach(async function() {
|
|
||||||
Fs.createReadStream = sinon.stub().throws(genericError)
|
|
||||||
try {
|
|
||||||
await S3Persistor.promises.sendFile(bucket, key, filename)
|
|
||||||
} catch (err) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('returns a ReadError', function() {
|
|
||||||
expect(error).to.be.an.instanceOf(Errors.ReadError)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('wraps the error', function() {
|
|
||||||
expect(error.cause).to.equal(genericError)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('copyFile', function() {
|
describe('copyFile', function() {
|
||||||
|
@ -675,25 +633,6 @@ describe('S3PersistorTests', function() {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the file does not exist', function() {
|
|
||||||
let error
|
|
||||||
|
|
||||||
beforeEach(async function() {
|
|
||||||
S3Client.deleteObject = sinon.stub().returns({
|
|
||||||
promise: sinon.stub().rejects(S3NotFoundError)
|
|
||||||
})
|
|
||||||
try {
|
|
||||||
await S3Persistor.promises.deleteFile(bucket, key)
|
|
||||||
} catch (err) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should throw a NotFoundError', function() {
|
|
||||||
expect(error).to.be.an.instanceOf(Errors.NotFoundError)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('deleteDirectory', function() {
|
describe('deleteDirectory', function() {
|
||||||
|
|
Loading…
Reference in a new issue