mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-07 20:31:06 -05:00
Merge pull request #106 from overleaf/spd-single-pipe
Use single pipeline when calculating md5
This commit is contained in:
commit
345696b067
8 changed files with 111 additions and 140 deletions
|
@ -65,17 +65,15 @@ async function sendFile(bucketName, key, fsPath) {
|
||||||
|
|
||||||
async function sendStream(bucketName, key, readStream, sourceMd5) {
|
async function sendStream(bucketName, key, readStream, sourceMd5) {
|
||||||
try {
|
try {
|
||||||
let hashPromise
|
// egress from us to gcs
|
||||||
|
const observeOptions = { metric: 'gcs.egress' }
|
||||||
|
|
||||||
// if there is no supplied md5 hash, we calculate the hash as the data passes through
|
|
||||||
if (!sourceMd5) {
|
if (!sourceMd5) {
|
||||||
hashPromise = PersistorHelper.calculateStreamMd5(readStream)
|
// if there is no supplied md5 hash, we calculate the hash as the data passes through
|
||||||
|
observeOptions.hash = 'md5'
|
||||||
}
|
}
|
||||||
|
|
||||||
const meteredStream = PersistorHelper.getMeteredStream(
|
const observer = new PersistorHelper.ObserverStream(observeOptions)
|
||||||
readStream,
|
|
||||||
'gcs.egress' // egress from us to gcs
|
|
||||||
)
|
|
||||||
|
|
||||||
const writeOptions = {
|
const writeOptions = {
|
||||||
// disabling of resumable uploads is recommended by Google:
|
// disabling of resumable uploads is recommended by Google:
|
||||||
|
@ -94,12 +92,12 @@ async function sendStream(bucketName, key, readStream, sourceMd5) {
|
||||||
.file(key)
|
.file(key)
|
||||||
.createWriteStream(writeOptions)
|
.createWriteStream(writeOptions)
|
||||||
|
|
||||||
await pipeline(meteredStream, uploadStream)
|
await pipeline(readStream, observer, uploadStream)
|
||||||
|
|
||||||
// if we didn't have an md5 hash, we should compare our computed one with Google's
|
// if we didn't have an md5 hash, we should compare our computed one with Google's
|
||||||
// as we couldn't tell GCS about it beforehand
|
// as we couldn't tell GCS about it beforehand
|
||||||
if (hashPromise) {
|
if (!sourceMd5) {
|
||||||
sourceMd5 = await hashPromise
|
sourceMd5 = observer.getHash()
|
||||||
// throws on mismatch
|
// throws on mismatch
|
||||||
await PersistorHelper.verifyMd5(GcsPersistor, bucketName, key, sourceMd5)
|
await PersistorHelper.verifyMd5(GcsPersistor, bucketName, key, sourceMd5)
|
||||||
}
|
}
|
||||||
|
@ -124,14 +122,15 @@ async function getFileStream(bucketName, key, _opts = {}) {
|
||||||
.file(key)
|
.file(key)
|
||||||
.createReadStream(opts)
|
.createReadStream(opts)
|
||||||
|
|
||||||
const meteredStream = PersistorHelper.getMeteredStream(
|
// ingress to us from gcs
|
||||||
stream,
|
const observer = new PersistorHelper.ObserverStream({
|
||||||
'gcs.ingress' // ingress to us from gcs
|
metric: 'gcs.ingress'
|
||||||
)
|
})
|
||||||
|
pipeline(stream, observer)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await PersistorHelper.waitForStreamReady(stream)
|
await PersistorHelper.waitForStreamReady(stream)
|
||||||
return meteredStream
|
return observer
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw PersistorHelper.wrapError(
|
throw PersistorHelper.wrapError(
|
||||||
err,
|
err,
|
||||||
|
|
|
@ -1,17 +1,54 @@
|
||||||
const crypto = require('crypto')
|
const crypto = require('crypto')
|
||||||
const metrics = require('metrics-sharelatex')
|
|
||||||
const meter = require('stream-meter')
|
|
||||||
const Stream = require('stream')
|
const Stream = require('stream')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
|
const metrics = require('metrics-sharelatex')
|
||||||
const { WriteError, ReadError, NotFoundError } = require('./Errors')
|
const { WriteError, ReadError, NotFoundError } = require('./Errors')
|
||||||
const { promisify } = require('util')
|
const { promisify } = require('util')
|
||||||
|
|
||||||
const pipeline = promisify(Stream.pipeline)
|
const pipeline = promisify(Stream.pipeline)
|
||||||
|
|
||||||
|
// Observes data that passes through and computes some metadata for it
|
||||||
|
// - specifically, it computes the number of bytes transferred, and optionally
|
||||||
|
// computes a cryptographic hash based on the 'hash' option. e.g., pass
|
||||||
|
// { hash: 'md5' } to compute the md5 hash of the stream
|
||||||
|
// - if 'metric' is supplied as an option, this metric will be incremented by
|
||||||
|
// the number of bytes transferred
|
||||||
|
class ObserverStream extends Stream.Transform {
|
||||||
|
constructor(options) {
|
||||||
|
super(options)
|
||||||
|
|
||||||
|
this.bytes = 0
|
||||||
|
|
||||||
|
if (options.hash) {
|
||||||
|
this.hash = crypto.createHash(options.hash)
|
||||||
|
}
|
||||||
|
if (options.metric) {
|
||||||
|
const onEnd = () => {
|
||||||
|
metrics.count(options.metric, this.bytes)
|
||||||
|
}
|
||||||
|
this.once('error', onEnd)
|
||||||
|
this.once('end', onEnd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_transform(chunk, encoding, done) {
|
||||||
|
if (this.hash) {
|
||||||
|
this.hash.update(chunk)
|
||||||
|
}
|
||||||
|
this.bytes += chunk.length
|
||||||
|
this.push(chunk)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
|
||||||
|
getHash() {
|
||||||
|
return this.hash && this.hash.digest('hex')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
ObserverStream,
|
||||||
calculateStreamMd5,
|
calculateStreamMd5,
|
||||||
verifyMd5,
|
verifyMd5,
|
||||||
getMeteredStream,
|
|
||||||
waitForStreamReady,
|
waitForStreamReady,
|
||||||
wrapError,
|
wrapError,
|
||||||
hexToBase64,
|
hexToBase64,
|
||||||
|
@ -19,6 +56,7 @@ module.exports = {
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns a promise which resolves with the md5 hash of the stream
|
// returns a promise which resolves with the md5 hash of the stream
|
||||||
|
// - consumes the stream
|
||||||
function calculateStreamMd5(stream) {
|
function calculateStreamMd5(stream) {
|
||||||
const hash = crypto.createHash('md5')
|
const hash = crypto.createHash('md5')
|
||||||
hash.setEncoding('hex')
|
hash.setEncoding('hex')
|
||||||
|
@ -53,23 +91,6 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns the next stream in the pipeline, and calls the callback with the byte count
|
|
||||||
// when the stream finishes or receives an error
|
|
||||||
function getMeteredStream(stream, metricName) {
|
|
||||||
const meteredStream = meter()
|
|
||||||
|
|
||||||
pipeline(stream, meteredStream)
|
|
||||||
.then(() => {
|
|
||||||
metrics.count(metricName, meteredStream.bytes)
|
|
||||||
})
|
|
||||||
.catch(() => {
|
|
||||||
// on error, just send how many bytes we received before the stream stopped
|
|
||||||
metrics.count(metricName, meteredStream.bytes)
|
|
||||||
})
|
|
||||||
|
|
||||||
return meteredStream
|
|
||||||
}
|
|
||||||
|
|
||||||
// resolves when a stream is 'readable', or rejects if the stream throws an error
|
// resolves when a stream is 'readable', or rejects if the stream throws an error
|
||||||
// before that happens - this lets us handle protocol-level errors before trying
|
// before that happens - this lets us handle protocol-level errors before trying
|
||||||
// to read them
|
// to read them
|
||||||
|
|
|
@ -10,13 +10,15 @@ const PersistorHelper = require('./PersistorHelper')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const S3 = require('aws-sdk/clients/s3')
|
const S3 = require('aws-sdk/clients/s3')
|
||||||
const { URL } = require('url')
|
const { URL } = require('url')
|
||||||
const { callbackify } = require('util')
|
const Stream = require('stream')
|
||||||
|
const { promisify, callbackify } = require('util')
|
||||||
const {
|
const {
|
||||||
WriteError,
|
WriteError,
|
||||||
ReadError,
|
ReadError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
SettingsError
|
SettingsError
|
||||||
} = require('./Errors')
|
} = require('./Errors')
|
||||||
|
const pipeline = promisify(Stream.pipeline)
|
||||||
|
|
||||||
const S3Persistor = {
|
const S3Persistor = {
|
||||||
sendFile: callbackify(sendFile),
|
sendFile: callbackify(sendFile),
|
||||||
|
@ -51,26 +53,25 @@ async function sendFile(bucketName, key, fsPath) {
|
||||||
|
|
||||||
async function sendStream(bucketName, key, readStream, sourceMd5) {
|
async function sendStream(bucketName, key, readStream, sourceMd5) {
|
||||||
try {
|
try {
|
||||||
// if there is no supplied md5 hash, we calculate the hash as the data passes through
|
// egress from us to S3
|
||||||
let hashPromise
|
const observeOptions = { metric: 's3.egress' }
|
||||||
let b64Hash
|
let b64Hash
|
||||||
|
|
||||||
if (sourceMd5) {
|
if (sourceMd5) {
|
||||||
b64Hash = PersistorHelper.hexToBase64(sourceMd5)
|
b64Hash = PersistorHelper.hexToBase64(sourceMd5)
|
||||||
} else {
|
} else {
|
||||||
hashPromise = PersistorHelper.calculateStreamMd5(readStream)
|
// if there is no supplied md5 hash, we calculate the hash as the data passes through
|
||||||
|
observeOptions.hash = 'md5'
|
||||||
}
|
}
|
||||||
|
|
||||||
const meteredStream = PersistorHelper.getMeteredStream(
|
const observer = new PersistorHelper.ObserverStream(observeOptions)
|
||||||
readStream,
|
pipeline(readStream, observer)
|
||||||
's3.egress' // egress from us to s3
|
|
||||||
)
|
|
||||||
|
|
||||||
// if we have an md5 hash, pass this to S3 to verify the upload
|
// if we have an md5 hash, pass this to S3 to verify the upload
|
||||||
const uploadOptions = {
|
const uploadOptions = {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: key,
|
Key: key,
|
||||||
Body: meteredStream
|
Body: observer
|
||||||
}
|
}
|
||||||
if (b64Hash) {
|
if (b64Hash) {
|
||||||
uploadOptions.ContentMD5 = b64Hash
|
uploadOptions.ContentMD5 = b64Hash
|
||||||
|
@ -92,8 +93,8 @@ async function sendStream(bucketName, key, readStream, sourceMd5) {
|
||||||
|
|
||||||
// if we didn't have an md5 hash, we should compare our computed one with S3's
|
// if we didn't have an md5 hash, we should compare our computed one with S3's
|
||||||
// as we couldn't tell S3 about it beforehand
|
// as we couldn't tell S3 about it beforehand
|
||||||
if (hashPromise) {
|
if (!sourceMd5) {
|
||||||
sourceMd5 = await hashPromise
|
sourceMd5 = observer.getHash()
|
||||||
// throws on mismatch
|
// throws on mismatch
|
||||||
await PersistorHelper.verifyMd5(
|
await PersistorHelper.verifyMd5(
|
||||||
S3Persistor,
|
S3Persistor,
|
||||||
|
@ -128,14 +129,13 @@ async function getFileStream(bucketName, key, opts) {
|
||||||
.getObject(params)
|
.getObject(params)
|
||||||
.createReadStream()
|
.createReadStream()
|
||||||
|
|
||||||
const meteredStream = PersistorHelper.getMeteredStream(
|
// ingress from S3 to us
|
||||||
stream,
|
const observer = new PersistorHelper.ObserverStream({ metric: 's3.ingress' })
|
||||||
's3.ingress' // ingress to us from s3
|
pipeline(stream, observer)
|
||||||
)
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await PersistorHelper.waitForStreamReady(stream)
|
await PersistorHelper.waitForStreamReady(stream)
|
||||||
return meteredStream
|
return observer
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw PersistorHelper.wrapError(
|
throw PersistorHelper.wrapError(
|
||||||
err,
|
err,
|
||||||
|
|
8
services/filestore/package-lock.json
generated
8
services/filestore/package-lock.json
generated
|
@ -5382,14 +5382,6 @@
|
||||||
"stubs": "^3.0.0"
|
"stubs": "^3.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"stream-meter": {
|
|
||||||
"version": "1.0.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz",
|
|
||||||
"integrity": "sha512-4sOEtrbgFotXwnEuzzsQBYEV1elAeFSO8rSGeTwabuX1RRn/kEq9JVH7I0MRBhKVRR0sJkr0M0QCH7yOLf9fhQ==",
|
|
||||||
"requires": {
|
|
||||||
"readable-stream": "^2.1.4"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"stream-shift": {
|
"stream-shift": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz",
|
||||||
|
|
|
@ -35,7 +35,6 @@
|
||||||
"request-promise-native": "^1.0.8",
|
"request-promise-native": "^1.0.8",
|
||||||
"settings-sharelatex": "^1.1.0",
|
"settings-sharelatex": "^1.1.0",
|
||||||
"stream-buffers": "~0.2.5",
|
"stream-buffers": "~0.2.5",
|
||||||
"stream-meter": "^1.0.4",
|
|
||||||
"tiny-async-pool": "^1.1.0"
|
"tiny-async-pool": "^1.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -3,6 +3,7 @@ const chai = require('chai')
|
||||||
const { expect } = chai
|
const { expect } = chai
|
||||||
const SandboxedModule = require('sandboxed-module')
|
const SandboxedModule = require('sandboxed-module')
|
||||||
const Errors = require('../../../app/js/Errors')
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
const StreamModule = require('stream')
|
||||||
|
|
||||||
chai.use(require('sinon-chai'))
|
chai.use(require('sinon-chai'))
|
||||||
chai.use(require('chai-as-promised'))
|
chai.use(require('chai-as-promised'))
|
||||||
|
@ -38,7 +39,10 @@ describe('FSPersistorTests', function() {
|
||||||
stat: sinon.stub().yields(null, stat)
|
stat: sinon.stub().yields(null, stat)
|
||||||
}
|
}
|
||||||
glob = sinon.stub().yields(null, globs)
|
glob = sinon.stub().yields(null, globs)
|
||||||
stream = { pipeline: sinon.stub().yields() }
|
stream = {
|
||||||
|
pipeline: sinon.stub().yields(),
|
||||||
|
Transform: StreamModule.Transform
|
||||||
|
}
|
||||||
LocalFileWriter = {
|
LocalFileWriter = {
|
||||||
promises: {
|
promises: {
|
||||||
writeStream: sinon.stub().resolves(tempFile),
|
writeStream: sinon.stub().resolves(tempFile),
|
||||||
|
@ -48,6 +52,7 @@ describe('FSPersistorTests', function() {
|
||||||
Hash = {
|
Hash = {
|
||||||
end: sinon.stub(),
|
end: sinon.stub(),
|
||||||
read: sinon.stub().returns(md5),
|
read: sinon.stub().returns(md5),
|
||||||
|
digest: sinon.stub().returns(md5),
|
||||||
setEncoding: sinon.stub()
|
setEncoding: sinon.stub()
|
||||||
}
|
}
|
||||||
crypto = {
|
crypto = {
|
||||||
|
@ -62,7 +67,6 @@ describe('FSPersistorTests', function() {
|
||||||
stream,
|
stream,
|
||||||
crypto,
|
crypto,
|
||||||
// imported by PersistorHelper but otherwise unused here
|
// imported by PersistorHelper but otherwise unused here
|
||||||
'stream-meter': {},
|
|
||||||
'logger-sharelatex': {},
|
'logger-sharelatex': {},
|
||||||
'metrics-sharelatex': {}
|
'metrics-sharelatex': {}
|
||||||
},
|
},
|
||||||
|
|
|
@ -5,6 +5,7 @@ const modulePath = '../../../app/js/GcsPersistor.js'
|
||||||
const SandboxedModule = require('sandboxed-module')
|
const SandboxedModule = require('sandboxed-module')
|
||||||
const { ObjectId } = require('mongodb')
|
const { ObjectId } = require('mongodb')
|
||||||
const asyncPool = require('tiny-async-pool')
|
const asyncPool = require('tiny-async-pool')
|
||||||
|
const StreamModule = require('stream')
|
||||||
|
|
||||||
const Errors = require('../../../app/js/Errors')
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
|
||||||
|
@ -13,7 +14,6 @@ describe('GcsPersistorTests', function() {
|
||||||
const bucket = 'womBucket'
|
const bucket = 'womBucket'
|
||||||
const key = 'monKey'
|
const key = 'monKey'
|
||||||
const destKey = 'donKey'
|
const destKey = 'donKey'
|
||||||
const objectSize = 5555
|
|
||||||
const genericError = new Error('guru meditation error')
|
const genericError = new Error('guru meditation error')
|
||||||
const filesSize = 33
|
const filesSize = 33
|
||||||
const md5 = 'ffffffff00000000ffffffff00000000'
|
const md5 = 'ffffffff00000000ffffffff00000000'
|
||||||
|
@ -24,8 +24,6 @@ describe('GcsPersistorTests', function() {
|
||||||
Storage,
|
Storage,
|
||||||
Fs,
|
Fs,
|
||||||
GcsNotFoundError,
|
GcsNotFoundError,
|
||||||
Meter,
|
|
||||||
MeteredStream,
|
|
||||||
ReadStream,
|
ReadStream,
|
||||||
Stream,
|
Stream,
|
||||||
GcsBucket,
|
GcsBucket,
|
||||||
|
@ -71,7 +69,8 @@ describe('GcsPersistorTests', function() {
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream = {
|
Stream = {
|
||||||
pipeline: sinon.stub().yields()
|
pipeline: sinon.stub().yields(),
|
||||||
|
Transform: StreamModule.Transform
|
||||||
}
|
}
|
||||||
|
|
||||||
Metrics = {
|
Metrics = {
|
||||||
|
@ -109,18 +108,10 @@ describe('GcsPersistorTests', function() {
|
||||||
FileNotFoundError = new Error('File not found')
|
FileNotFoundError = new Error('File not found')
|
||||||
FileNotFoundError.code = 'ENOENT'
|
FileNotFoundError.code = 'ENOENT'
|
||||||
|
|
||||||
MeteredStream = {
|
|
||||||
type: 'metered',
|
|
||||||
on: sinon.stub(),
|
|
||||||
bytes: objectSize
|
|
||||||
}
|
|
||||||
MeteredStream.on.withArgs('finish').yields()
|
|
||||||
MeteredStream.on.withArgs('readable').yields()
|
|
||||||
Meter = sinon.stub().returns(MeteredStream)
|
|
||||||
|
|
||||||
Hash = {
|
Hash = {
|
||||||
end: sinon.stub(),
|
end: sinon.stub(),
|
||||||
read: sinon.stub().returns(md5),
|
read: sinon.stub().returns(md5),
|
||||||
|
digest: sinon.stub().returns(md5),
|
||||||
setEncoding: sinon.stub()
|
setEncoding: sinon.stub()
|
||||||
}
|
}
|
||||||
crypto = {
|
crypto = {
|
||||||
|
@ -139,7 +130,6 @@ describe('GcsPersistorTests', function() {
|
||||||
'tiny-async-pool': asyncPool,
|
'tiny-async-pool': asyncPool,
|
||||||
'./Errors': Errors,
|
'./Errors': Errors,
|
||||||
fs: Fs,
|
fs: Fs,
|
||||||
'stream-meter': Meter,
|
|
||||||
stream: Stream,
|
stream: Stream,
|
||||||
'metrics-sharelatex': Metrics,
|
'metrics-sharelatex': Metrics,
|
||||||
crypto
|
crypto
|
||||||
|
@ -157,7 +147,7 @@ describe('GcsPersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns a metered stream', function() {
|
it('returns a metered stream', function() {
|
||||||
expect(stream).to.equal(MeteredStream)
|
expect(stream).to.be.instanceOf(StreamModule.Transform)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('fetches the right key from the right bucket', function() {
|
it('fetches the right key from the right bucket', function() {
|
||||||
|
@ -169,13 +159,9 @@ describe('GcsPersistorTests', function() {
|
||||||
it('pipes the stream through the meter', function() {
|
it('pipes the stream through the meter', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
ReadStream,
|
ReadStream,
|
||||||
MeteredStream
|
sinon.match.instanceOf(StreamModule.Transform)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('records an ingress metric', function() {
|
|
||||||
expect(Metrics.count).to.have.been.calledWith('gcs.ingress', objectSize)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when called with a byte range', function() {
|
describe('when called with a byte range', function() {
|
||||||
|
@ -189,7 +175,7 @@ describe('GcsPersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns a metered stream', function() {
|
it('returns a metered stream', function() {
|
||||||
expect(stream).to.equal(MeteredStream)
|
expect(stream).to.be.instanceOf(StreamModule.Transform)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('passes the byte range on to GCS', function() {
|
it('passes the byte range on to GCS', function() {
|
||||||
|
@ -341,26 +327,16 @@ describe('GcsPersistorTests', function() {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should meter the stream', function() {
|
it('should meter the stream and pass it to GCS', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
ReadStream,
|
ReadStream,
|
||||||
MeteredStream
|
sinon.match.instanceOf(StreamModule.Transform),
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should pipe the metered stream to GCS', function() {
|
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
|
||||||
MeteredStream,
|
|
||||||
WriteStream
|
WriteStream
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should record an egress metric', function() {
|
|
||||||
expect(Metrics.count).to.have.been.calledWith('gcs.egress', objectSize)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('calculates the md5 hash of the file', function() {
|
it('calculates the md5 hash of the file', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash)
|
expect(Hash.digest).to.have.been.called
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -375,10 +351,7 @@ describe('GcsPersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not calculate the md5 hash of the file', function() {
|
it('should not calculate the md5 hash of the file', function() {
|
||||||
expect(Stream.pipeline).not.to.have.been.calledWith(
|
expect(Hash.digest).not.to.have.been.called
|
||||||
sinon.match.any,
|
|
||||||
Hash
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('sends the hash in base64', function() {
|
it('sends the hash in base64', function() {
|
||||||
|
@ -400,7 +373,12 @@ describe('GcsPersistorTests', function() {
|
||||||
let error
|
let error
|
||||||
beforeEach(async function() {
|
beforeEach(async function() {
|
||||||
Stream.pipeline
|
Stream.pipeline
|
||||||
.withArgs(MeteredStream, WriteStream, sinon.match.any)
|
.withArgs(
|
||||||
|
ReadStream,
|
||||||
|
sinon.match.instanceOf(StreamModule.Transform),
|
||||||
|
WriteStream,
|
||||||
|
sinon.match.any
|
||||||
|
)
|
||||||
.yields(genericError)
|
.yields(genericError)
|
||||||
try {
|
try {
|
||||||
await GcsPersistor.promises.sendStream(bucket, key, ReadStream)
|
await GcsPersistor.promises.sendStream(bucket, key, ReadStream)
|
||||||
|
@ -438,10 +416,7 @@ describe('GcsPersistorTests', function() {
|
||||||
it('should upload the stream via the meter', function() {
|
it('should upload the stream via the meter', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
ReadStream,
|
ReadStream,
|
||||||
MeteredStream
|
sinon.match.instanceOf(StreamModule.Transform),
|
||||||
)
|
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
|
||||||
MeteredStream,
|
|
||||||
WriteStream
|
WriteStream
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
|
@ -3,6 +3,7 @@ const chai = require('chai')
|
||||||
const { expect } = chai
|
const { expect } = chai
|
||||||
const modulePath = '../../../app/js/S3Persistor.js'
|
const modulePath = '../../../app/js/S3Persistor.js'
|
||||||
const SandboxedModule = require('sandboxed-module')
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const StreamModule = require('stream')
|
||||||
|
|
||||||
const Errors = require('../../../app/js/Errors')
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
|
||||||
|
@ -32,8 +33,6 @@ describe('S3PersistorTests', function() {
|
||||||
Logger,
|
Logger,
|
||||||
S3,
|
S3,
|
||||||
Fs,
|
Fs,
|
||||||
Meter,
|
|
||||||
MeteredStream,
|
|
||||||
ReadStream,
|
ReadStream,
|
||||||
Stream,
|
Stream,
|
||||||
S3Persistor,
|
S3Persistor,
|
||||||
|
@ -63,7 +62,8 @@ describe('S3PersistorTests', function() {
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream = {
|
Stream = {
|
||||||
pipeline: sinon.stub().yields()
|
pipeline: sinon.stub().yields(),
|
||||||
|
Transform: StreamModule.Transform
|
||||||
}
|
}
|
||||||
|
|
||||||
EmptyPromise = {
|
EmptyPromise = {
|
||||||
|
@ -89,14 +89,6 @@ describe('S3PersistorTests', function() {
|
||||||
createReadStream: sinon.stub().returns(ReadStream)
|
createReadStream: sinon.stub().returns(ReadStream)
|
||||||
}
|
}
|
||||||
|
|
||||||
MeteredStream = {
|
|
||||||
type: 'metered',
|
|
||||||
on: sinon.stub(),
|
|
||||||
bytes: objectSize
|
|
||||||
}
|
|
||||||
MeteredStream.on.withArgs('finish').yields()
|
|
||||||
Meter = sinon.stub().returns(MeteredStream)
|
|
||||||
|
|
||||||
S3NotFoundError = new Error('not found')
|
S3NotFoundError = new Error('not found')
|
||||||
S3NotFoundError.code = 'NoSuchKey'
|
S3NotFoundError.code = 'NoSuchKey'
|
||||||
|
|
||||||
|
@ -136,6 +128,7 @@ describe('S3PersistorTests', function() {
|
||||||
Hash = {
|
Hash = {
|
||||||
end: sinon.stub(),
|
end: sinon.stub(),
|
||||||
read: sinon.stub().returns(md5),
|
read: sinon.stub().returns(md5),
|
||||||
|
digest: sinon.stub().returns(md5),
|
||||||
setEncoding: sinon.stub()
|
setEncoding: sinon.stub()
|
||||||
}
|
}
|
||||||
crypto = {
|
crypto = {
|
||||||
|
@ -153,7 +146,6 @@ describe('S3PersistorTests', function() {
|
||||||
'logger-sharelatex': Logger,
|
'logger-sharelatex': Logger,
|
||||||
'./Errors': Errors,
|
'./Errors': Errors,
|
||||||
fs: Fs,
|
fs: Fs,
|
||||||
'stream-meter': Meter,
|
|
||||||
stream: Stream,
|
stream: Stream,
|
||||||
'metrics-sharelatex': Metrics,
|
'metrics-sharelatex': Metrics,
|
||||||
crypto
|
crypto
|
||||||
|
@ -171,7 +163,7 @@ describe('S3PersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns a metered stream', function() {
|
it('returns a metered stream', function() {
|
||||||
expect(stream).to.equal(MeteredStream)
|
expect(stream).to.be.instanceOf(StreamModule.Transform)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('sets the AWS client up with credentials from settings', function() {
|
it('sets the AWS client up with credentials from settings', function() {
|
||||||
|
@ -188,13 +180,9 @@ describe('S3PersistorTests', function() {
|
||||||
it('pipes the stream through the meter', function() {
|
it('pipes the stream through the meter', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
S3ReadStream,
|
S3ReadStream,
|
||||||
MeteredStream
|
sinon.match.instanceOf(StreamModule.Transform)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('records an ingress metric', function() {
|
|
||||||
expect(Metrics.count).to.have.been.calledWith('s3.ingress', objectSize)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when called with a byte range', function() {
|
describe('when called with a byte range', function() {
|
||||||
|
@ -208,7 +196,7 @@ describe('S3PersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns a metered stream', function() {
|
it('returns a metered stream', function() {
|
||||||
expect(stream).to.equal(MeteredStream)
|
expect(stream).to.be.instanceOf(Stream.Transform)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('passes the byte range on to S3', function() {
|
it('passes the byte range on to S3', function() {
|
||||||
|
@ -242,7 +230,7 @@ describe('S3PersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns a metered stream', function() {
|
it('returns a metered stream', function() {
|
||||||
expect(stream).to.equal(MeteredStream)
|
expect(stream).to.be.instanceOf(Stream.Transform)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('sets the AWS client up with the alternative credentials', function() {
|
it('sets the AWS client up with the alternative credentials', function() {
|
||||||
|
@ -457,7 +445,7 @@ describe('S3PersistorTests', function() {
|
||||||
expect(S3Client.upload).to.have.been.calledWith({
|
expect(S3Client.upload).to.have.been.calledWith({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
Body: MeteredStream
|
Body: sinon.match.instanceOf(Stream.Transform)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -470,16 +458,12 @@ describe('S3PersistorTests', function() {
|
||||||
it('should meter the stream', function() {
|
it('should meter the stream', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
ReadStream,
|
ReadStream,
|
||||||
MeteredStream
|
sinon.match.instanceOf(Stream.Transform)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should record an egress metric', function() {
|
|
||||||
expect(Metrics.count).to.have.been.calledWith('s3.egress', objectSize)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('calculates the md5 hash of the file', function() {
|
it('calculates the md5 hash of the file', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash)
|
expect(Hash.digest).to.have.been.called
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -494,17 +478,14 @@ describe('S3PersistorTests', function() {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not calculate the md5 hash of the file', function() {
|
it('should not calculate the md5 hash of the file', function() {
|
||||||
expect(Stream.pipeline).not.to.have.been.calledWith(
|
expect(Hash.digest).not.to.have.been.called
|
||||||
sinon.match.any,
|
|
||||||
Hash
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('sends the hash in base64', function() {
|
it('sends the hash in base64', function() {
|
||||||
expect(S3Client.upload).to.have.been.calledWith({
|
expect(S3Client.upload).to.have.been.calledWith({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
Body: MeteredStream,
|
Body: sinon.match.instanceOf(StreamModule.Transform),
|
||||||
ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw=='
|
ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw=='
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -555,12 +536,12 @@ describe('S3PersistorTests', function() {
|
||||||
it('should meter the download', function() {
|
it('should meter the download', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(
|
expect(Stream.pipeline).to.have.been.calledWith(
|
||||||
S3ReadStream,
|
S3ReadStream,
|
||||||
MeteredStream
|
sinon.match.instanceOf(Stream.Transform)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should calculate the md5 hash from the file', function() {
|
it('should calculate the md5 hash from the file', function() {
|
||||||
expect(Stream.pipeline).to.have.been.calledWith(MeteredStream, Hash)
|
expect(Hash.digest).to.have.been.called
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -579,7 +560,7 @@ describe('S3PersistorTests', function() {
|
||||||
expect(S3Client.upload).to.have.been.calledWith({
|
expect(S3Client.upload).to.have.been.calledWith({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
Body: MeteredStream
|
Body: sinon.match.instanceOf(StreamModule.Transform)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in a new issue