From c59a3db4e8fb21e13b53a885fc469187f3e5c819 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 2 May 2019 02:04:59 +0200 Subject: [PATCH 01/20] [FSPersistorManager] fix the stream opening for node10+ Attaching a `readable` listener causes the stream to hang otherwise. Signed-off-by: Jakob Ackermann --- .../app/coffee/FSPersistorManager.coffee | 22 ++++++------- .../coffee/FSPersistorManagerTests.coffee | 33 +++++++++---------- 2 files changed, 25 insertions(+), 30 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 353e2ef099..7e2875fc28 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -41,20 +41,18 @@ module.exports = callback(err) # opts may be {start: Number, end: Number} - getFileStream: (location, name, opts, _callback = (err, res)->) -> - callback = _.once _callback + getFileStream: (location, name, opts, callback = (err, res)->) -> filteredName = filterName name logger.log location:location, name:filteredName, "getting file" - sourceStream = fs.createReadStream "#{location}/#{filteredName}", opts - sourceStream.on 'error', (err) -> - logger.err err:err, location:location, name:name, "Error reading from file" - if err.code == 'ENOENT' - return callback new Errors.NotFoundError(err.message), null - else - return callback err, null - sourceStream.on 'readable', () -> - # This can be called multiple times, but the callback wrapper - # ensures the callback is only called once + fs.open "#{location}/#{filteredName}", 'r', (err, fd) -> + if err? + logger.err err:err, location:location, name:name, "Error reading from file" + if err.code == 'ENOENT' + return callback new Errors.NotFoundError(err.message), null + else + return callback err, null + opts.fd = fd + sourceStream = fs.createReadStream null, opts return callback null, sourceStream diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 7df7adfd8b..4cfde16f70 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -19,6 +19,7 @@ describe "FSPersistorManagerTests", -> rmdir:sinon.stub() exists:sinon.stub() readdir:sinon.stub() + open:sinon.stub() openSync:sinon.stub() fstatSync:sinon.stub() closeSync:sinon.stub() @@ -103,20 +104,21 @@ describe "FSPersistorManagerTests", -> @opts = {} it "should use correct file location", (done) -> - @Fs.createReadStream.returns({on: ->}) @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res) => - @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal true + @Fs.open.calledWith("#{@location}/#{@name1Filtered}").should.equal true done() describe "with start and end options", -> beforeEach -> - @opts = {start: 0, end: 8} + @fd = 2019 + @opts_in = {start: 0, end: 8} + @opts = {start: 0, end: 8, fd: @fd} + @Fs.open.callsArgWith(2, null, @fd) it 'should pass the options to createReadStream', (done) -> - @Fs.createReadStream.returns({on: ->}) - @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> - @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}", @opts).should.equal true + @FSPersistorManager.getFileStream @location, @name1, @opts_in, (err,res)=> + @Fs.createReadStream.calledWith(null, @opts).should.equal true done() describe "error conditions", -> @@ -125,12 +127,10 @@ describe "FSPersistorManagerTests", -> beforeEach -> @fakeCode = 'ENOENT' - @Fs.createReadStream.returns( - on: (key, callback) => - err = new Error() - err.code = @fakeCode - callback(err, null) - ) + err = new Error() + err.code = @fakeCode + @Fs.open.callsArgWith(2, err, null) + it "should give a NotFoundError", (done) -> @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> expect(res).to.equal null @@ -142,12 +142,9 @@ describe "FSPersistorManagerTests", -> beforeEach -> @fakeCode = 'SOMETHINGHORRIBLE' - @Fs.createReadStream.returns( - on: (key, callback) => - err = new Error() - err.code = @fakeCode - callback(err, null) - ) + err = new Error() + err.code = @fakeCode + @Fs.open.callsArgWith(2, err, null) it "should give an Error", (done) -> @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> From 32a54a7e375917406ab0aebc26eb84b3ca69d059 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timoth=C3=A9e=20Alby?= Date: Tue, 7 May 2019 17:22:35 +0200 Subject: [PATCH 02/20] Update README.md --- services/filestore/README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/services/filestore/README.md b/services/filestore/README.md index cd126ab053..2772b71494 100644 --- a/services/filestore/README.md +++ b/services/filestore/README.md @@ -1,11 +1,9 @@ -filestore-sharelatex +overleaf/filestore ==================== An API for CRUD operations on binary files stored in S3 -[![Build Status](https://travis-ci.org/sharelatex/filestore-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/filestore-sharelatex) - -filestore acts as a proxy between the CLSIs and (currently) Amazon S3 storage, presenting a RESTful HTTP interface to the CLSIs on port 3009 by default. Urls are mapped to node functions in https://github.com/sharelatex/filestore-sharelatex/blob/master/app.coffee . URLs are of the form: +filestore acts as a proxy between the CLSIs and (currently) Amazon S3 storage, presenting a RESTful HTTP interface to the CLSIs on port 3009 by default. Urls are mapped to node functions in https://github.com/overleaf/filestore/blob/master/app.coffee . URLs are of the form: * `/project/:project_id/file/:file_id` * `/template/:template_id/v/:version/:format` @@ -22,4 +20,4 @@ License The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. -Copyright (c) ShareLaTeX, 2014. +Copyright (c) Overleaf, 2014-2019. From f081546ec0de3564719cc62fb37e29a038091c2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timoth=C3=A9e=20Alby?= Date: Tue, 7 May 2019 17:37:07 +0100 Subject: [PATCH 03/20] update Git URL in Jenkinsfile --- services/filestore/Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 35bd318ab2..dd741ce239 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -4,10 +4,10 @@ pipeline { agent any environment { - GIT_PROJECT = "filestore-sharelatex" + GIT_PROJECT = "filestore" JENKINS_WORKFLOW = "filestore-sharelatex" TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" - GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" + GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT" } triggers { From 3575c89d03216d537ba1e2fbdb0d05e3e7d944e0 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 13 Jun 2019 16:57:49 -0400 Subject: [PATCH 04/20] Return file size on HEAD request This will be used by the file preview feature when it gets partial content. --- services/filestore/app.coffee | 13 +-- .../app/coffee/FSPersistorManager.coffee | 15 +++- .../app/coffee/FileController.coffee | 24 ++++-- .../filestore/app/coffee/FileHandler.coffee | 3 + .../app/coffee/S3PersistorManager.coffee | 26 +++++- .../acceptance/coffee/SendingFileTest.coffee | 9 ++- .../coffee/FSPersistorManagerTests.coffee | 29 +++++++ .../unit/coffee/FileControllerTests.coffee | 37 ++++++++- .../test/unit/coffee/FileHandlerTests.coffee | 1 - .../coffee/S3PersistorManagerTests.coffee | 81 ++++++++++++------- 10 files changed, 191 insertions(+), 47 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 3e927a2b51..6c21686826 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -63,22 +63,23 @@ app.use (req, res, next) -> Metrics.injectMetricsRoute(app) +app.head "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFileHead app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile +app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile +app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile -app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile -app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile - +app.head "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFileHead app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile app.get "/template/:template_id/v/:version/:format/:sub_type", keyBuilder.templateFileKey, fileController.getFile app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile +app.head "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFileHead app.get "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile - -app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile -app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile +app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile +app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 353e2ef099..b33d0d85bf 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -1,5 +1,6 @@ logger = require("logger-sharelatex") fs = require("fs") +path = require("path") LocalFileWriter = require("./LocalFileWriter") Errors = require('./Errors') rimraf = require("rimraf") @@ -35,7 +36,7 @@ module.exports = if err? logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk" return callback err - @sendFile location, target, fsPath, (err) -> + @sendFile location, target, fsPath, (err) -> # delete the temporary file created above and return the original error LocalFileWriter.deleteFile fsPath, () -> callback(err) @@ -57,6 +58,18 @@ module.exports = # ensures the callback is only called once return callback null, sourceStream + getFileSize: (location, filename, callback) -> + fullPath = path.join(location, filterName(filename)) + fs.stat fullPath, (err, stats) -> + if err? + if err.code == 'ENOENT' + logger.log({location:location, filename:filename}, "file not found") + callback(new Errors.NotFoundError(err.message)) + else + logger.err({err:err, location:location, filename:filename}, "failed to stat file") + callback(err) + return + callback(null, stats.size) copyFile: (location, fromName, toName, callback = (err)->)-> filteredFromName=filterName fromName diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 60dcc207b7..f98dbd1e49 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -21,7 +21,7 @@ module.exports = FileController = style: style, } metrics.inc "getFile" - logger.log key:key, bucket:bucket, format:format, style: style, "reciving request to get file" + logger.log key:key, bucket:bucket, format:format, style: style, "receiving request to get file" if req.headers.range? range = FileController._get_range(req.headers.range) options.start = range.start @@ -41,10 +41,24 @@ module.exports = FileController = logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response" fileStream.pipe res + getFileHead: (req, res) -> + {key, bucket} = req + metrics.inc("getFileSize") + logger.log({ key: key, bucket: bucket }, "receiving request to get file metadata") + FileHandler.getFileSize bucket, key, (err, fileSize) -> + if err? + if err instanceof Errors.NotFoundError + res.status(404).end() + else + res.status(500).end() + return + res.set("Content-Length", fileSize) + res.status(200).end() + insertFile: (req, res)-> metrics.inc "insertFile" {key, bucket} = req - logger.log key:key, bucket:bucket, "reciving request to insert file" + logger.log key:key, bucket:bucket, "receiving request to insert file" FileHandler.insertFile bucket, key, req, (err)-> if err? logger.log err: err, key: key, bucket: bucket, "error inserting file" @@ -57,7 +71,7 @@ module.exports = FileController = {key, bucket} = req oldProject_id = req.body.source.project_id oldFile_id = req.body.source.file_id - logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file" + logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "receiving request to copy file" PersistorManager.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> if err? if err instanceof Errors.NotFoundError @@ -71,7 +85,7 @@ module.exports = FileController = deleteFile: (req, res)-> metrics.inc "deleteFile" {key, bucket} = req - logger.log key:key, bucket:bucket, "reciving request to delete file" + logger.log key:key, bucket:bucket, "receiving request to delete file" FileHandler.deleteFile bucket, key, (err)-> if err? logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file" @@ -90,7 +104,7 @@ module.exports = FileController = directorySize: (req, res)-> metrics.inc "projectSize" {project_id, bucket} = req - logger.log project_id:project_id, bucket:bucket, "reciving request to project size" + logger.log project_id:project_id, bucket:bucket, "receiving request to project size" FileHandler.getDirectorySize bucket, project_id, (err, size)-> if err? logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 6189c8a906..cb8d78a0fe 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -31,6 +31,9 @@ module.exports = FileHandler = else @_getConvertedFile bucket, key, opts, callback + getFileSize: (bucket, key, callback) -> + PersistorManager.getFileSize(bucket, key, callback) + _getStandardFile: (bucket, key, opts, callback)-> PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> if err? and !(err instanceof Errors.NotFoundError) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 2b183730d6..50486a0ce3 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -91,12 +91,36 @@ module.exports = else if res.statusCode not in [200, 206] logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}" return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null - else + else return callback null, res s3Stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" callback err + getFileSize: (bucketName, key, callback) -> + logger.log({ bucketName: bucketName, key: key }, "getting file size from S3") + s3.headObject { Bucket: bucketName, Key: key }, (err, data) -> + if err? + if err.statusCode in [403, 404] + # S3 returns a 403 instead of a 404 when the user doesn't have + # permission to list the bucket contents. + logger.log({ + bucketName: bucketName, + key: key + }, "file not found in s3") + callback( + new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}") + ) + else + logger.err({ + bucketName: bucketName, + key: key, + err: err + }, "error performing S3 HeadObject") + callback(err) + return + callback(null, data.ContentLength) + copyFile: (bucketName, sourceKey, destKey, callback)-> logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" source = bucketName + '/' + sourceKey diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index cee06e82a3..b77afb866b 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -32,7 +32,7 @@ describe "Filestore", -> - it "should send a 200 for status endpoing", (done)-> + it "should send a 200 for status endpoint", (done)-> request "#{@filestoreUrl}/status", (err, response, body)-> response.statusCode.should.equal 200 body.indexOf("filestore").should.not.equal -1 @@ -59,6 +59,13 @@ describe "Filestore", -> response.statusCode.should.equal 404 done() + it "should return the file size on a HEAD request", (done) -> + expectedLength = Buffer.byteLength(@constantFileContent) + request.head @fileUrl, (err, res) => + expect(res.statusCode).to.equal(200) + expect(res.headers['content-length']).to.equal(expectedLength.toString()) + done() + it "should be able get the file back", (done)-> @timeout(1000 * 10) request.get @fileUrl, (err, response, body)=> diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 7df7adfd8b..a08fc86682 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -22,6 +22,7 @@ describe "FSPersistorManagerTests", -> openSync:sinon.stub() fstatSync:sinon.stub() closeSync:sinon.stub() + stat:sinon.stub() @Rimraf = sinon.stub() @LocalFileWriter = writeStream: sinon.stub() @@ -156,7 +157,35 @@ describe "FSPersistorManagerTests", -> expect(err instanceof Error).to.equal true done() + describe "getFileSize", -> + it "should return the file size", (done) -> + expectedFileSize = 75382 + @Fs.stat.yields(new Error("fs.stat got unexpected arguments")) + @Fs.stat.withArgs("#{@location}/#{@name1Filtered}") + .yields(null, { size: expectedFileSize }) + @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => + if err? + return done(err) + expect(fileSize).to.equal(expectedFileSize) + done() + + it "should throw a NotFoundError if the file does not exist", (done) -> + error = new Error() + error.code = "ENOENT" + @Fs.stat.yields(error) + + @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => + expect(err).to.be.instanceof(@Errors.NotFoundError) + done() + + it "should rethrow any other error", (done) -> + error = new Error() + @Fs.stat.yields(error) + + @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => + expect(err).to.equal(error) + done() describe "copyFile", -> beforeEach -> diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 09398b8c01..821aadb68d 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -20,6 +20,7 @@ describe "FileController", -> user_files:"user_files" @FileHandler = getFile: sinon.stub() + getFileSize: sinon.stub() deleteFile: sinon.stub() insertFile: sinon.stub() getDirectorySize: sinon.stub() @@ -49,7 +50,8 @@ describe "FileController", -> file_id:@file_id headers: {} @res = - setHeader: -> + set: sinon.stub().returnsThis() + status: sinon.stub().returnsThis() @fileStream = {} describe "getFile", -> @@ -89,6 +91,39 @@ describe "FileController", -> done() @controller.getFile @req, @res + describe "getFileHead", -> + it "should return the file size in a Content-Length header", (done) -> + expectedFileSize = 84921 + @FileHandler.getFileSize.yields( + new Error("FileHandler.getFileSize: unexpected arguments") + ) + @FileHandler.getFileSize.withArgs(@bucket, @key).yields(null, expectedFileSize) + + @res.end = () => + expect(@res.status.lastCall.args[0]).to.equal(200) + expect(@res.set.calledWith("Content-Length", expectedFileSize)).to.equal(true) + done() + + @controller.getFileHead(@req, @res) + + it "should return a 404 is the file is not found", (done) -> + @FileHandler.getFileSize.yields(new @Errors.NotFoundError()) + + @res.end = () => + expect(@res.status.lastCall.args[0]).to.equal(404) + done() + + @controller.getFileHead(@req, @res) + + it "should return a 500 on internal errors", (done) -> + @FileHandler.getFileSize.yields(new Error()) + + @res.end = () => + expect(@res.status.lastCall.args[0]).to.equal(500) + done() + + @controller.getFileHead(@req, @res) + describe "insertFile", -> it "should send bucket name key and res to PersistorManager", (done)-> diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 50b8a17524..754366195e 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -108,7 +108,6 @@ describe "FileHandler", -> @handler._getConvertedFile.called.should.equal true done() - describe "_getStandardFile", -> beforeEach -> diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 1eee09dc29..2554461fd7 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -26,11 +26,13 @@ describe "S3PersistorManagerTests", -> @knox = createClient: sinon.stub().returns(@stubbedKnoxClient) @stubbedS3Client = - copyObject:sinon.stub() + copyObject: sinon.stub() + headObject: sinon.stub() @awsS3 = sinon.stub().returns @stubbedS3Client @LocalFileWriter = writeStream: sinon.stub() deleteFile: sinon.stub() + @request = sinon.stub() @requires = "knox": @knox "aws-sdk/clients/s3": @awsS3 @@ -39,15 +41,16 @@ describe "S3PersistorManagerTests", -> "logger-sharelatex": log:-> err:-> + "request": @request "./Errors": @Errors = NotFoundError: sinon.stub() @key = "my/key" @bucketName = "my-bucket" @error = "my errror" + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires describe "getFileStream", -> beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @opts = {} it "should use correct key", (done)-> @@ -74,7 +77,6 @@ describe "S3PersistorManagerTests", -> describe "with supplied auth", -> beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @credentials = auth_key: "that_key" auth_secret: "that_secret" @@ -156,10 +158,45 @@ describe "S3PersistorManagerTests", -> @Errors.NotFoundError.called.should.equal false done() + describe "getFileSize", -> + it "should obtain the file size from S3", (done) -> + expectedFileSize = 123 + @stubbedS3Client.headObject.yields(new Error( + "s3Client.headObject got unexpected arguments" + )) + @stubbedS3Client.headObject.withArgs({ + Bucket: @bucketName + Key: @key + }).yields(null, { ContentLength: expectedFileSize }) + + @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => + if err? + return done(err) + expect(fileSize).to.equal(expectedFileSize) + done() + + [403, 404].forEach (statusCode) -> + it "should throw NotFoundError when S3 responds with #{statusCode}", (done) -> + error = new Error() + error.statusCode = statusCode + @stubbedS3Client.headObject.yields(error) + + @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => + expect(err).to.be.an.instanceof(@Errors.NotFoundError) + done() + + it "should rethrow any other error", (done) -> + error = new Error() + @stubbedS3Client.headObject.yields(error) + @stubbedS3Client.headObject.yields(error) + + @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => + expect(err).to.equal(error) + done() + describe "sendFile", -> beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @stubbedKnoxClient.putFile.returns on:-> it "should put file with knox", (done)-> @@ -183,7 +220,6 @@ describe "S3PersistorManagerTests", -> @fsPath = "to/some/where" @origin = on:-> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @S3PersistorManager.sendFile = sinon.stub().callsArgWith(3) it "should send stream to LocalFileWriter", (done)-> @@ -211,7 +247,6 @@ describe "S3PersistorManagerTests", -> beforeEach -> @sourceKey = "my/key" @destKey = "my/dest/key" - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires it "should use AWS SDK to copy file", (done)-> @stubbedS3Client.copyObject.callsArgWith(1, @error) @@ -229,9 +264,6 @@ describe "S3PersistorManagerTests", -> describe "deleteDirectory", -> - beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - it "should list the contents passing them onto multi delete", (done)-> data = Contents: [{Key:"1234"}, {Key: "456"}] @@ -244,9 +276,7 @@ describe "S3PersistorManagerTests", -> describe "deleteFile", -> it "should use correct options", (done)-> - @request = sinon.stub().callsArgWith(1) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1) @S3PersistorManager.deleteFile @bucketName, @key, (err)=> opts = @request.args[0][0] @@ -257,9 +287,7 @@ describe "S3PersistorManagerTests", -> done() it "should return the error", (done)-> - @request = sinon.stub().callsArgWith(1, @error) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, @error) @S3PersistorManager.deleteFile @bucketName, @key, (err)=> err.should.equal @error @@ -268,9 +296,7 @@ describe "S3PersistorManagerTests", -> describe "checkIfFileExists", -> it "should use correct options", (done)-> - @request = sinon.stub().callsArgWith(1, null, statusCode:200) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, null, statusCode:200) @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> opts = @request.args[0][0] @@ -281,25 +307,21 @@ describe "S3PersistorManagerTests", -> done() it "should return true for a 200", (done)-> - @request = sinon.stub().callsArgWith(1, null, statusCode:200) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, null, statusCode:200) + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> exists.should.equal true done() it "should return false for a non 200", (done)-> - @request = sinon.stub().callsArgWith(1, null, statusCode:404) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, null, statusCode:404) + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> exists.should.equal false done() it "should return the error", (done)-> - @request = sinon.stub().callsArgWith(1, @error, {}) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, @error, {}) @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> err.should.equal @error @@ -307,13 +329,10 @@ describe "S3PersistorManagerTests", -> describe "directorySize", -> - beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - it "should sum directory files size", (done) -> data = Contents: [ {Size: 1024}, {Size: 2048} ] @stubbedKnoxClient.list.callsArgWith(1, null, data) @S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=> totalSize.should.equal 3072 - done() \ No newline at end of file + done() From f2521a29b9d8035782647cfbf6f84c6794837ae3 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Tue, 18 Jun 2019 08:25:14 -0400 Subject: [PATCH 05/20] Use AWS SDK for getFileStream() The AWS SDK has a retry strategy to deal with rate limiting or transient unavailability of S3. We hope it will reduce our error rates. --- .../app/coffee/S3PersistorManager.coffee | 62 +++-- .../coffee/S3PersistorManagerTests.coffee | 228 +++++++++--------- 2 files changed, 155 insertions(+), 135 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 50486a0ce3..940ba90a95 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -26,12 +26,22 @@ buildDefaultOptions = (bucketName, method, key)-> uri:"https://#{bucketName}.s3.amazonaws.com/#{key}" } -s3 = new awsS3({ +defaultS3Client = new awsS3({ credentials: accessKeyId: settings.filestore.s3.key, secretAccessKey: settings.filestore.s3.secret }) +getS3Client = (credentials) -> + if credentials? + return new awsS3({ + credentials: + accessKeyId: credentials.auth_key + secretAccessKey: credentials.auth_secret + }) + else + return defaultS3Client + module.exports = sendFile: (bucketName, key, fsPath, callback)-> @@ -71,34 +81,39 @@ module.exports = # opts may be {start: Number, end: Number} getFileStream: (bucketName, key, opts, callback = (err, res)->)-> opts = opts || {} - headers = {} - if opts.start? and opts.end? - headers['Range'] = "bytes=#{opts.start}-#{opts.end}" - callback = _.once callback + callback = _.once(callback) logger.log bucketName:bucketName, key:key, "getting file from s3" - s3Client = knox.createClient - key: opts.credentials?.auth_key || settings.filestore.s3.key - secret: opts.credentials?.auth_secret || settings.filestore.s3.secret - bucket: bucketName - s3Stream = s3Client.get(key, headers) - s3Stream.end() - s3Stream.on 'response', (res) -> - if res.statusCode in [403, 404] + + s3 = getS3Client(opts.credentials) + s3Params = { + Bucket: bucketName + Key: key + } + if opts.start? and opts.end? + s3Params['Range'] = "bytes=#{opts.start}-#{opts.end}" + request = s3.getObject(s3Params) + + request.on 'httpHeaders', (statusCode, headers, response, statusMessage) => + if statusCode in [403, 404] # S3 returns a 403 instead of a 404 when the user doesn't have # permission to list the bucket contents. - logger.log bucketName:bucketName, key:key, "file not found in s3" - return callback new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null - else if res.statusCode not in [200, 206] - logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}" - return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null - else - return callback null, res - s3Stream.on 'error', (err) -> - logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" - callback err + logger.log({ bucketName: bucketName, key: key }, "file not found in s3") + return callback(new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null) + if statusCode not in [200, 206] + logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}") + return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null) + stream = response.httpResponse.getUnbufferedStream() + callback(null, stream) + + request.on 'error', (err) => + logger.err({ err: err, bucketName: bucketName, key: key }, "error getting file stream from s3") + callback(err) + + request.send() getFileSize: (bucketName, key, callback) -> logger.log({ bucketName: bucketName, key: key }, "getting file size from S3") + s3 = getS3Client() s3.headObject { Bucket: bucketName, Key: key }, (err, data) -> if err? if err.statusCode in [403, 404] @@ -125,6 +140,7 @@ module.exports = logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" source = bucketName + '/' + sourceKey # use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) + s3 = getS3Client() s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> if err? if err.code is 'NoSuchKey' diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 2554461fd7..5244fcb8f2 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -17,18 +17,27 @@ describe "S3PersistorManagerTests", -> key: "this_key" stores: user_files:"sl_user_files" - @stubbedKnoxClient = + @knoxClient = putFile:sinon.stub() copyFile:sinon.stub() list: sinon.stub() deleteMultiple: sinon.stub() get: sinon.stub() @knox = - createClient: sinon.stub().returns(@stubbedKnoxClient) - @stubbedS3Client = + createClient: sinon.stub().returns(@knoxClient) + @s3EventHandlers = {} + @s3Request = + on: sinon.stub().callsFake (event, callback) => + @s3EventHandlers[event] = callback + send: sinon.stub() + @s3Response = + httpResponse: + getUnbufferedStream: sinon.stub() + @s3Client = copyObject: sinon.stub() headObject: sinon.stub() - @awsS3 = sinon.stub().returns @stubbedS3Client + getObject: sinon.stub().returns(@s3Request) + @awsS3 = sinon.stub().returns(@s3Client) @LocalFileWriter = writeStream: sinon.stub() deleteFile: sinon.stub() @@ -50,121 +59,116 @@ describe "S3PersistorManagerTests", -> @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires describe "getFileStream", -> - beforeEach -> - @opts = {} + describe "success", -> + beforeEach () -> + @expectedStream = { expectedStream: true } + @s3Request.send.callsFake () => + @s3EventHandlers.httpHeaders(200, {}, @s3Response, "OK") + @s3Response.httpResponse.getUnbufferedStream.returns(@expectedStream) - it "should use correct key", (done)-> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - @stubbedKnoxClient.get.calledWith(@key).should.equal true - done() + it "returns a stream", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + if err? + return done(err) + expect(stream).to.equal(@expectedStream) + done() - it "should use default auth", (done)-> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - clientParams = - key: @settings.filestore.s3.key - secret: @settings.filestore.s3.secret - bucket: @bucketName - @knox.createClient.calledWith(clientParams).should.equal true - done() + it "sets the AWS client up with credentials from settings", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + if err? + return done(err) + expect(@awsS3.lastCall.args).to.deep.equal([{ + credentials: + accessKeyId: @settings.filestore.s3.key + secretAccessKey: @settings.filestore.s3.secret + }]) + done() - describe "with supplied auth", -> - beforeEach -> - @credentials = - auth_key: "that_key" - auth_secret: "that_secret" - @opts = - credentials: @credentials + it "fetches the right key from the right bucket", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + if err? + return done(err) + expect(@s3Client.getObject.lastCall.args).to.deep.equal([{ + Bucket: @bucketName, + Key: @key + }]) + done() - it "should use supplied auth", (done)-> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - clientParams = - key: @credentials.auth_key - secret: @credentials.auth_secret - bucket: @bucketName - @knox.createClient.calledWith(clientParams).should.equal true - done() + it "accepts alternative credentials", (done) -> + accessKeyId = "that_key" + secret = "that_secret" + opts = { + credentials: + auth_key: accessKeyId + auth_secret: secret + } + @S3PersistorManager.getFileStream @bucketName, @key, opts, (err, stream) => + if err? + return done(err) + expect(@awsS3.lastCall.args).to.deep.equal([{ + credentials: + accessKeyId: accessKeyId + secretAccessKey: secret + }]) + expect(stream).to.equal(@expectedStream) + done() - describe "with start and end options", -> - beforeEach -> - @opts = - start: 0 - end: 8 - it "should pass headers to the knox.Client.get()", (done) -> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - @stubbedKnoxClient.get.calledWith(@key, {'Range': 'bytes=0-8'}).should.equal true - done() - - describe "error conditions", -> + it "accepts byte range", (done) -> + start = 0 + end = 8 + opts = { start: start, end: end } + @S3PersistorManager.getFileStream @bucketName, @key, opts, (err, stream) => + if err? + return done(err) + expect(@s3Client.getObject.lastCall.args).to.deep.equal([{ + Bucket: @bucketName + Key: @key + Range: "bytes=#{start}-#{end}" + }]) + expect(stream).to.equal(@expectedStream) + done() + describe "errors", -> describe "when the file doesn't exist", -> - beforeEach -> - @fakeResponse = - statusCode: 404 - @stubbedKnoxClient.get.returns( - on: (key, callback) => - if key == 'response' - callback(@fakeResponse) - end: -> - ) + @s3Request.send.callsFake () => + @s3EventHandlers.httpHeaders(404, {}, @s3Response, "Not found") - it "should produce a NotFoundError", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - expect(stream).to.equal null - expect(err).to.not.equal null - expect(err instanceof @Errors.NotFoundError).to.equal true + it "returns a NotFoundError that indicates the bucket and key", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + expect(err).to.be.instanceof(@Errors.NotFoundError) + errMsg = @Errors.NotFoundError.lastCall.args[0] + expect(errMsg).to.match(new RegExp(".*#{@bucketName}.*")) + expect(errMsg).to.match(new RegExp(".*#{@key}.*")) done() - it "should have bucket and key in the Error message", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - error_message = @Errors.NotFoundError.lastCall.args[0] - expect(error_message).to.not.equal null - error_message.should.match(new RegExp(".*#{@bucketName}.*")) - error_message.should.match(new RegExp(".*#{@key}.*")) + describe "when S3 encounters an unkown error", -> + beforeEach -> + @s3Request.send.callsFake () => + @s3EventHandlers.httpHeaders(500, {}, @s3Response, "Internal server error") + + it "returns an error", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + expect(err).to.be.instanceof(Error) done() - describe "when the S3 service produces an error", -> + describe "when the S3 request errors out before receiving HTTP headers", -> beforeEach -> - @fakeResponse = - statusCode: 500 - @stubbedKnoxClient.get.returns( - on: (key, callback) => - if key == 'response' - callback(@fakeResponse) - end: -> - ) + @s3Request.send.callsFake () => + @s3EventHandlers.error(new Error("connection failed")) - it "should produce an error", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - expect(stream).to.equal null - expect(err).to.not.equal null - expect(err instanceof Error).to.equal true - @Errors.NotFoundError.called.should.equal false + it "returns an error", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + expect(err).to.be.instanceof(Error) done() describe "getFileSize", -> it "should obtain the file size from S3", (done) -> expectedFileSize = 123 - @stubbedS3Client.headObject.yields(new Error( + @s3Client.headObject.yields(new Error( "s3Client.headObject got unexpected arguments" )) - @stubbedS3Client.headObject.withArgs({ + @s3Client.headObject.withArgs({ Bucket: @bucketName Key: @key }).yields(null, { ContentLength: expectedFileSize }) @@ -179,7 +183,7 @@ describe "S3PersistorManagerTests", -> it "should throw NotFoundError when S3 responds with #{statusCode}", (done) -> error = new Error() error.statusCode = statusCode - @stubbedS3Client.headObject.yields(error) + @s3Client.headObject.yields(error) @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => expect(err).to.be.an.instanceof(@Errors.NotFoundError) @@ -187,8 +191,8 @@ describe "S3PersistorManagerTests", -> it "should rethrow any other error", (done) -> error = new Error() - @stubbedS3Client.headObject.yields(error) - @stubbedS3Client.headObject.yields(error) + @s3Client.headObject.yields(error) + @s3Client.headObject.yields(error) @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => expect(err).to.equal(error) @@ -197,21 +201,21 @@ describe "S3PersistorManagerTests", -> describe "sendFile", -> beforeEach -> - @stubbedKnoxClient.putFile.returns on:-> + @knoxClient.putFile.returns on:-> it "should put file with knox", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) - @stubbedKnoxClient.putFile.callsArgWith(2, @error) + @knoxClient.putFile.callsArgWith(2, @error) @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> - @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true + @knoxClient.putFile.calledWith(@fsPath, @key).should.equal true err.should.equal @error done() it "should delete the file and pass the error with it", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) - @stubbedKnoxClient.putFile.callsArgWith(2, @error) + @knoxClient.putFile.callsArgWith(2, @error) @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> - @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true + @knoxClient.putFile.calledWith(@fsPath, @key).should.equal true err.should.equal @error done() @@ -249,15 +253,15 @@ describe "S3PersistorManagerTests", -> @destKey = "my/dest/key" it "should use AWS SDK to copy file", (done)-> - @stubbedS3Client.copyObject.callsArgWith(1, @error) + @s3Client.copyObject.callsArgWith(1, @error) @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> err.should.equal @error - @stubbedS3Client.copyObject.calledWith({Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key}).should.equal true + @s3Client.copyObject.calledWith({Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key}).should.equal true done() it "should return a NotFoundError object if the original file does not exist", (done)-> NoSuchKeyError = {code: "NoSuchKey"} - @stubbedS3Client.copyObject.callsArgWith(1, NoSuchKeyError) + @s3Client.copyObject.callsArgWith(1, NoSuchKeyError) @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> expect(err instanceof @Errors.NotFoundError).to.equal true done() @@ -267,10 +271,10 @@ describe "S3PersistorManagerTests", -> it "should list the contents passing them onto multi delete", (done)-> data = Contents: [{Key:"1234"}, {Key: "456"}] - @stubbedKnoxClient.list.callsArgWith(1, null, data) - @stubbedKnoxClient.deleteMultiple.callsArgWith(1) + @knoxClient.list.callsArgWith(1, null, data) + @knoxClient.deleteMultiple.callsArgWith(1) @S3PersistorManager.deleteDirectory @bucketName, @key, (err)=> - @stubbedKnoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true + @knoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true done() describe "deleteFile", -> @@ -332,7 +336,7 @@ describe "S3PersistorManagerTests", -> it "should sum directory files size", (done) -> data = Contents: [ {Size: 1024}, {Size: 2048} ] - @stubbedKnoxClient.list.callsArgWith(1, null, data) + @knoxClient.list.callsArgWith(1, null, data) @S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=> totalSize.should.equal 3072 done() From f865762c293b3746a36ebd6df7284c327b271954 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 19 Jun 2019 13:25:18 +0100 Subject: [PATCH 06/20] update logger and metrics --- services/filestore/Makefile | 6 +- services/filestore/buildscript.txt | 2 +- services/filestore/docker-compose.ci.yml | 6 +- services/filestore/docker-compose.yml | 7 +- services/filestore/npm-shrinkwrap.json | 515 ++++++++++++++++------- services/filestore/package.json | 4 +- 6 files changed, 381 insertions(+), 159 deletions(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index db33518816..75286c139a 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -26,7 +26,9 @@ test: test_unit test_acceptance test_unit: @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit -test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run +test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run + +test_acceptance_run: @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 0ba90519b3..51452eb242 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -5,4 +5,4 @@ filestore --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --build-target=docker ---script-version=1.1.12 +--script-version=1.1.21 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index e7ac6e84a7..d2bcca9ec6 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 version: "2" @@ -10,6 +10,8 @@ services: image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER user: node command: npm run test:unit:_run + environment: + NODE_ENV: test test_acceptance: @@ -21,7 +23,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} - ENABLE_CONVERSIONS: "true" + NODE_ENV: test depends_on: - mongo - redis diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 60d387bf51..234b93e236 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 version: "2" @@ -13,6 +13,7 @@ services: working_dir: /app environment: MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test command: npm run test:unit user: node @@ -27,7 +28,8 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} - ENABLE_CONVERSIONS: "true" + LOG_LEVEL: ERROR + NODE_ENV: test user: node depends_on: - mongo @@ -50,3 +52,4 @@ services: mongo: image: mongo:3.4 + diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 7caf791bae..66670c6e69 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -3,19 +3,19 @@ "version": "0.1.4", "dependencies": { "@google-cloud/common": { - "version": "0.27.0", - "from": "@google-cloud/common@>=0.27.0 <0.28.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.27.0.tgz" + "version": "0.32.1", + "from": "@google-cloud/common@>=0.32.0 <0.33.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz" }, "@google-cloud/debug-agent": { - "version": "3.0.1", + "version": "3.2.0", "from": "@google-cloud/debug-agent@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", "dependencies": { "coffeescript": { - "version": "2.3.2", + "version": "2.4.1", "from": "coffeescript@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.3.2.tgz" + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz" } } }, @@ -29,15 +29,47 @@ "from": "@google-cloud/common@>=0.26.0 <0.27.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" }, + "@google-cloud/promisify": { + "version": "0.3.1", + "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + }, + "arrify": { + "version": "1.0.1", + "from": "arrify@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + }, + "gcp-metadata": { + "version": "0.9.3", + "from": "gcp-metadata@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + }, + "google-auth-library": { + "version": "2.0.2", + "from": "google-auth-library@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "dependencies": { + "gcp-metadata": { + "version": "0.7.0", + "from": "gcp-metadata@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + } + } + }, "nan": { - "version": "2.12.1", + "version": "2.14.0", "from": "nan@>=2.11.1 <3.0.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz" + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz" }, "readable-stream": { - "version": "3.1.1", + "version": "3.4.0", "from": "readable-stream@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.1.1.tgz" + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz" + }, + "semver": { + "version": "5.7.0", + "from": "semver@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" }, "string_decoder": { "version": "1.2.0", @@ -45,37 +77,27 @@ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz" }, "through2": { - "version": "3.0.0", + "version": "3.0.1", "from": "through2@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz" } } }, "@google-cloud/projectify": { - "version": "0.3.2", - "from": "@google-cloud/projectify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.2.tgz" + "version": "0.3.3", + "from": "@google-cloud/projectify@>=0.3.3 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz" }, "@google-cloud/promisify": { - "version": "0.3.1", - "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + "version": "0.4.0", + "from": "@google-cloud/promisify@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz" }, "@google-cloud/trace-agent": { - "version": "3.5.2", + "version": "3.6.1", "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.2.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", "dependencies": { - "@google-cloud/common": { - "version": "0.30.2", - "from": "@google-cloud/common@>=0.30.0 <0.31.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.30.2.tgz" - }, - "google-auth-library": { - "version": "3.0.1", - "from": "google-auth-library@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.0.1.tgz" - }, "uuid": { "version": "3.3.2", "from": "uuid@>=3.0.1 <4.0.0", @@ -134,14 +156,46 @@ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" }, "@sindresorhus/is": { - "version": "0.13.0", - "from": "@sindresorhus/is@>=0.13.0 <0.14.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.13.0.tgz" + "version": "0.15.0", + "from": "@sindresorhus/is@>=0.15.0 <0.16.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz" + }, + "@sinonjs/commons": { + "version": "1.4.0", + "from": "@sinonjs/commons@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.4.0.tgz", + "dev": true + }, + "@sinonjs/formatio": { + "version": "3.2.1", + "from": "@sinonjs/formatio@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.1.tgz", + "dev": true, + "dependencies": { + "@sinonjs/samsam": { + "version": "3.3.2", + "from": "@sinonjs/samsam@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.2.tgz", + "dev": true + } + } + }, + "@sinonjs/samsam": { + "version": "2.1.3", + "from": "@sinonjs/samsam@>=2.1.2 <3.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.3.tgz", + "dev": true + }, + "@sinonjs/text-encoding": { + "version": "0.7.1", + "from": "@sinonjs/text-encoding@>=0.7.1 <0.8.0", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz", + "dev": true }, "@types/caseless": { - "version": "0.12.1", + "version": "0.12.2", "from": "@types/caseless@*", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.1.tgz" + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz" }, "@types/console-log-level": { "version": "1.4.0", @@ -164,9 +218,9 @@ "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" }, "@types/node": { - "version": "10.12.20", + "version": "12.0.8", "from": "@types/node@*", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.20.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz" }, "@types/request": { "version": "2.48.1", @@ -183,6 +237,11 @@ "from": "@types/tough-cookie@*", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz" }, + "abort-controller": { + "version": "3.0.0", + "from": "abort-controller@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" + }, "accept-encoding": { "version": "0.1.0", "from": "accept-encoding@>=0.1.0 <0.2.0", @@ -194,29 +253,35 @@ "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz" }, "acorn": { - "version": "5.7.3", - "from": "acorn@>=5.0.3 <6.0.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz" + "version": "6.1.1", + "from": "acorn@>=6.0.0 <7.0.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz" }, "agent-base": { - "version": "4.2.1", + "version": "4.3.0", "from": "agent-base@>=4.1.0 <5.0.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz" + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz" }, "ajv": { - "version": "6.7.0", + "version": "6.10.0", "from": "ajv@>=6.5.5 <7.0.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.7.0.tgz" + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz" }, "array-flatten": { "version": "1.1.1", "from": "array-flatten@1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" }, + "array-from": { + "version": "2.1.1", + "from": "array-from@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz", + "dev": true + }, "arrify": { - "version": "1.0.1", - "from": "arrify@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + "version": "2.0.1", + "from": "arrify@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz" }, "asn1": { "version": "0.2.4", @@ -228,6 +293,12 @@ "from": "assert-plus@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" }, + "assertion-error": { + "version": "1.1.0", + "from": "assertion-error@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "dev": true + }, "async": { "version": "0.2.10", "from": "async@>=0.2.10 <0.3.0", @@ -236,7 +307,14 @@ "async-listener": { "version": "0.6.10", "from": "async-listener@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz" + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", + "dependencies": { + "semver": { + "version": "5.7.0", + "from": "semver@>=5.3.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" + } + } }, "asynckit": { "version": "0.4.0", @@ -271,9 +349,9 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" }, "axios": { - "version": "0.18.0", + "version": "0.18.1", "from": "axios@>=0.18.0 <0.19.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.0.tgz" + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz" }, "balanced-match": { "version": "1.0.0", @@ -301,9 +379,9 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" }, "bindings": { - "version": "1.4.0", + "version": "1.5.0", "from": "bindings@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz" }, "bintrees": { "version": "1.0.1", @@ -346,14 +424,15 @@ "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" }, "builtin-modules": { - "version": "3.0.0", + "version": "3.1.0", "from": "builtin-modules@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz" }, "bunyan": { "version": "1.5.1", "from": "bunyan@1.5.1", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz" + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", + "dev": true }, "bytes": { "version": "3.0.0", @@ -365,10 +444,22 @@ "from": "caseless@>=0.3.0 <0.4.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz" }, + "chai": { + "version": "4.2.0", + "from": "chai@4.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", + "dev": true + }, + "check-error": { + "version": "1.0.2", + "from": "check-error@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "dev": true + }, "coffee-script": { - "version": "1.12.4", - "from": "coffee-script@1.12.4", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" }, "combined-stream": { "version": "0.0.7", @@ -381,9 +472,9 @@ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" }, "console-log-level": { - "version": "1.4.0", + "version": "1.4.1", "from": "console-log-level@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz" }, "content-disposition": { "version": "0.5.2", @@ -435,10 +526,16 @@ "from": "debug@2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" }, + "deep-eql": { + "version": "3.0.1", + "from": "deep-eql@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "dev": true + }, "delay": { - "version": "4.1.0", + "version": "4.3.0", "from": "delay@>=4.0.1 <5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.1.0.tgz" + "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz" }, "delayed-stream": { "version": "0.0.5", @@ -455,16 +552,23 @@ "from": "destroy@>=1.0.4 <1.1.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz" }, + "diff": { + "version": "3.5.0", + "from": "diff@>=3.5.0 <4.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "dev": true + }, "dtrace-provider": { "version": "0.6.0", "from": "dtrace-provider@>=0.6.0 <0.7.0", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "dev": true, "optional": true }, "duplexify": { - "version": "3.6.1", + "version": "3.7.1", "from": "duplexify@>=3.6.0 <4.0.0", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", "dependencies": { "readable-stream": { "version": "2.3.6", @@ -484,9 +588,9 @@ "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" }, "ecdsa-sig-formatter": { - "version": "1.0.10", - "from": "ecdsa-sig-formatter@1.0.10", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.10.tgz" + "version": "1.0.11", + "from": "ecdsa-sig-formatter@1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz" }, "ee-first": { "version": "1.1.1", @@ -514,9 +618,9 @@ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz" }, "es6-promise": { - "version": "4.2.5", + "version": "4.2.8", "from": "es6-promise@>=4.0.3 <5.0.0", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz" + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz" }, "es6-promisify": { "version": "5.0.0", @@ -538,6 +642,11 @@ "from": "etag@>=1.8.1 <1.9.0", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" }, + "event-target-shim": { + "version": "5.0.1", + "from": "event-target-shim@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" + }, "events": { "version": "1.1.1", "from": "events@1.1.1", @@ -640,9 +749,9 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" }, "follow-redirects": { - "version": "1.6.1", - "from": "follow-redirects@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz", + "version": "1.5.10", + "from": "follow-redirects@1.5.10", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", "dependencies": { "debug": { "version": "3.1.0", @@ -689,14 +798,20 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" }, "gaxios": { - "version": "1.2.7", - "from": "gaxios@>=1.0.4 <2.0.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.2.7.tgz" + "version": "1.8.4", + "from": "gaxios@>=1.2.1 <2.0.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz" }, "gcp-metadata": { - "version": "0.9.3", - "from": "gcp-metadata@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + "version": "1.0.0", + "from": "gcp-metadata@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz" + }, + "get-func-name": { + "version": "2.0.0", + "from": "get-func-name@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "dev": true }, "getpass": { "version": "0.1.7", @@ -715,21 +830,21 @@ "optional": true }, "google-auth-library": { - "version": "2.0.2", - "from": "google-auth-library@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "version": "3.1.2", + "from": "google-auth-library@>=3.1.1 <4.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", "dependencies": { - "gcp-metadata": { - "version": "0.7.0", - "from": "gcp-metadata@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + "semver": { + "version": "5.7.0", + "from": "semver@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" } } }, "google-p12-pem": { - "version": "1.0.3", + "version": "1.0.4", "from": "google-p12-pem@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz" }, "graceful-fs": { "version": "4.1.11", @@ -737,14 +852,14 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" }, "gtoken": { - "version": "2.3.2", - "from": "gtoken@>=2.3.0 <3.0.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.2.tgz", + "version": "2.3.3", + "from": "gtoken@>=2.3.2 <3.0.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", "dependencies": { "mime": { - "version": "2.4.0", + "version": "2.4.4", "from": "mime@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz" } } }, @@ -809,9 +924,9 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz" }, "ms": { - "version": "2.1.1", + "version": "2.1.2", "from": "ms@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" } } }, @@ -846,9 +961,9 @@ "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz" }, "is-buffer": { - "version": "1.1.6", - "from": "is-buffer@>=1.1.5 <2.0.0", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" + "version": "2.0.3", + "from": "is-buffer@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz" }, "is-typedarray": { "version": "1.0.0", @@ -905,15 +1020,21 @@ "from": "jsprim@>=1.2.2 <2.0.0", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz" }, + "just-extend": { + "version": "4.0.2", + "from": "just-extend@>=4.0.2 <5.0.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.0.2.tgz", + "dev": true + }, "jwa": { - "version": "1.2.0", - "from": "jwa@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.2.0.tgz" + "version": "1.4.1", + "from": "jwa@>=1.4.1 <2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz" }, "jws": { - "version": "3.2.1", + "version": "3.2.2", "from": "jws@>=3.1.5 <4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.1.tgz" + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz" }, "klaw": { "version": "1.3.1", @@ -932,31 +1053,54 @@ } } }, + "lodash": { + "version": "4.17.11", + "from": "lodash@>=4.17.11 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", + "dev": true + }, + "lodash.get": { + "version": "4.4.2", + "from": "lodash.get@>=4.4.2 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "dev": true + }, "lodash.pickby": { "version": "4.6.0", "from": "lodash.pickby@>=4.6.0 <5.0.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" }, "logger-sharelatex": { - "version": "1.6.0", - "from": "logger-sharelatex@1.6.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.6.0.tgz", + "version": "1.7.0", + "from": "logger-sharelatex@1.7.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.7.0.tgz", "dependencies": { + "bunyan": { + "version": "1.8.12", + "from": "bunyan@1.8.12", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz" + }, "caseless": { "version": "0.12.0", "from": "caseless@>=0.12.0 <0.13.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" }, "combined-stream": { - "version": "1.0.7", + "version": "1.0.8", "from": "combined-stream@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" }, "delayed-stream": { "version": "1.0.0", "from": "delayed-stream@>=1.0.0 <1.1.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" }, + "dtrace-provider": { + "version": "0.8.7", + "from": "dtrace-provider@>=0.8.0 <0.9.0", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", + "optional": true + }, "forever-agent": { "version": "0.6.1", "from": "forever-agent@>=0.6.1 <0.7.0", @@ -994,6 +1138,12 @@ } } }, + "lolex": { + "version": "3.1.0", + "from": "lolex@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.1.0.tgz", + "dev": true + }, "long": { "version": "4.0.0", "from": "long@>=4.0.0 <5.0.0", @@ -1035,15 +1185,10 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" }, "metrics-sharelatex": { - "version": "2.1.1", - "from": "metrics-sharelatex@2.1.1", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.1.1.tgz", + "version": "2.2.0", + "from": "metrics-sharelatex@2.2.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.2.0.tgz", "dependencies": { - "coffee-script": { - "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" - }, "underscore": { "version": "1.6.0", "from": "underscore@>=1.6.0 <1.7.0", @@ -1123,6 +1268,12 @@ "from": "module-details-from-path@>=1.0.3 <2.0.0", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" }, + "moment": { + "version": "2.24.0", + "from": "moment@>=2.10.6 <3.0.0", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", + "optional": true + }, "ms": { "version": "2.0.0", "from": "ms@2.0.0", @@ -1165,15 +1316,41 @@ "from": "negotiator@0.6.1", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" }, + "nise": { + "version": "1.5.0", + "from": "nise@>=1.4.6 <2.0.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.5.0.tgz", + "dev": true, + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "dev": true + }, + "lolex": { + "version": "4.1.0", + "from": "lolex@>=4.1.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-4.1.0.tgz", + "dev": true + }, + "path-to-regexp": { + "version": "1.7.0", + "from": "path-to-regexp@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "dev": true + } + } + }, "node-fetch": { - "version": "2.3.0", - "from": "node-fetch@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.3.0.tgz" + "version": "2.6.0", + "from": "node-fetch@>=2.3.0 <3.0.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz" }, "node-forge": { - "version": "0.7.6", - "from": "node-forge@>=0.7.4 <0.8.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.6.tgz" + "version": "0.8.4", + "from": "node-forge@>=0.8.0 <0.9.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz" }, "node-transloadit": { "version": "0.0.4", @@ -1228,14 +1405,14 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" }, "p-limit": { - "version": "2.1.0", - "from": "p-limit@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.1.0.tgz" + "version": "2.2.0", + "from": "p-limit@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz" }, "p-try": { - "version": "2.0.0", + "version": "2.2.0", "from": "p-try@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" }, "parse-duration": { "version": "0.1.1", @@ -1243,9 +1420,9 @@ "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz" }, "parse-ms": { - "version": "2.0.0", + "version": "2.1.0", "from": "parse-ms@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz" }, "parseurl": { "version": "1.3.2", @@ -1267,6 +1444,12 @@ "from": "path-to-regexp@0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" }, + "pathval": { + "version": "1.1.0", + "from": "pathval@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "dev": true + }, "performance-now": { "version": "2.1.0", "from": "performance-now@>=2.1.0 <3.0.0", @@ -1293,14 +1476,21 @@ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" }, "prom-client": { - "version": "11.2.1", + "version": "11.5.1", "from": "prom-client@>=11.1.3 <12.0.0", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.2.1.tgz" + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz" }, "protobufjs": { "version": "6.8.8", "from": "protobufjs@>=6.8.6 <6.9.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz" + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", + "dependencies": { + "@types/node": { + "version": "10.14.9", + "from": "@types/node@>=10.1.0 <11.0.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz" + } + } }, "proxy-addr": { "version": "2.0.4", @@ -1308,9 +1498,9 @@ "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz" }, "psl": { - "version": "1.1.31", + "version": "1.1.32", "from": "psl@>=1.1.24 <2.0.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz" + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz" }, "punycode": { "version": "1.3.2", @@ -1333,9 +1523,9 @@ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" }, "raven": { - "version": "1.2.1", - "from": "raven@>=1.1.3 <2.0.0", - "resolved": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", + "version": "1.1.3", + "from": "raven@1.1.3", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", "dependencies": { "uuid": { "version": "3.0.0", @@ -1398,9 +1588,21 @@ } }, "require-in-the-middle": { - "version": "3.1.0", - "from": "require-in-the-middle@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-3.1.0.tgz" + "version": "4.0.0", + "from": "require-in-the-middle@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.0.tgz", + "dependencies": { + "debug": { + "version": "4.1.1", + "from": "debug@>=4.1.1 <5.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + }, + "ms": { + "version": "2.1.2", + "from": "ms@^2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + } + } }, "require-like": { "version": "0.1.2", @@ -1409,9 +1611,9 @@ "dev": true }, "resolve": { - "version": "1.10.0", - "from": "resolve@>=1.5.0 <2.0.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz" + "version": "1.11.0", + "from": "resolve@>=1.10.0 <2.0.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz" }, "response": { "version": "0.14.0", @@ -1468,9 +1670,9 @@ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" }, "semver": { - "version": "5.6.0", - "from": "semver@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz" + "version": "6.1.1", + "from": "semver@>=6.0.0 <7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz" }, "send": { "version": "0.16.2", @@ -1511,6 +1713,12 @@ "from": "shimmer@>=1.2.0 <2.0.0", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz" }, + "sinon": { + "version": "7.1.1", + "from": "sinon@7.1.1", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.1.1.tgz", + "dev": true + }, "sntp": { "version": "0.1.4", "from": "sntp@>=0.1.0 <0.2.0", @@ -1583,10 +1791,11 @@ "from": "string_decoder@>=0.10.0 <0.11.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" }, - "symbol-observable": { - "version": "1.2.0", - "from": "symbol-observable@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz" + "supports-color": { + "version": "5.5.0", + "from": "supports-color@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "dev": true }, "tdigest": { "version": "0.1.1", @@ -1649,6 +1858,12 @@ "from": "tweetnacl@>=0.14.0 <0.15.0", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" }, + "type-detect": { + "version": "4.0.8", + "from": "type-detect@>=4.0.5 <5.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "dev": true + }, "type-is": { "version": "1.6.16", "from": "type-is@>=1.6.16 <1.7.0", diff --git a/services/filestore/package.json b/services/filestore/package.json index a9163666cd..bcd8011f10 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -29,8 +29,8 @@ "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "^1.6.0", - "metrics-sharelatex": "^2.1.1", + "logger-sharelatex": "^1.7.0", + "metrics-sharelatex": "^2.2.0", "mocha": "5.2.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", From 013400d7a4e5cc1380604c2d0141e7df96c17961 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 19 Jun 2019 14:04:57 +0100 Subject: [PATCH 07/20] Re-add environment variable for conversions --- services/filestore/docker-compose.ci.yml | 1 + services/filestore/docker-compose.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index d2bcca9ec6..765c79aac5 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -22,6 +22,7 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres + ENABLE_CONVERSIONS: "true" MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test depends_on: diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 234b93e236..e282295b14 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -28,6 +28,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} + ENABLE_CONVERSIONS: "true" LOG_LEVEL: ERROR NODE_ENV: test user: node From 7d900b57bfe1c0ebaa9fa03f5f18f1f74b101402 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 19 Jun 2019 12:58:17 -0400 Subject: [PATCH 08/20] Fix createUnbufferedStream() function call In 49a21155f642670dfea264ac73fb60241f37cb87, I managed to incorrectly write the `createUnbufferedStream()` function from the AWS SDK as `getUnbufferedStream()` and to consistently use that naming in the unit tests. This commit fixes that. I have tested again on S3. --- services/filestore/app/coffee/S3PersistorManager.coffee | 2 +- .../filestore/test/unit/coffee/S3PersistorManagerTests.coffee | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 940ba90a95..8f1b080efd 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -102,7 +102,7 @@ module.exports = if statusCode not in [200, 206] logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}") return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null) - stream = response.httpResponse.getUnbufferedStream() + stream = response.httpResponse.createUnbufferedStream() callback(null, stream) request.on 'error', (err) => diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 5244fcb8f2..8a1b2e4d49 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -32,7 +32,7 @@ describe "S3PersistorManagerTests", -> send: sinon.stub() @s3Response = httpResponse: - getUnbufferedStream: sinon.stub() + createUnbufferedStream: sinon.stub() @s3Client = copyObject: sinon.stub() headObject: sinon.stub() @@ -64,7 +64,7 @@ describe "S3PersistorManagerTests", -> @expectedStream = { expectedStream: true } @s3Request.send.callsFake () => @s3EventHandlers.httpHeaders(200, {}, @s3Response, "OK") - @s3Response.httpResponse.getUnbufferedStream.returns(@expectedStream) + @s3Response.httpResponse.createUnbufferedStream.returns(@expectedStream) it "returns a stream", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => From 47985892136faeac8d6c7dcc3add79ef55a3274d Mon Sep 17 00:00:00 2001 From: Nate Stemen Date: Fri, 25 Oct 2019 11:51:58 -0400 Subject: [PATCH 09/20] replace private link with public one --- services/filestore/.github/PULL_REQUEST_TEMPLATE.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/filestore/.github/PULL_REQUEST_TEMPLATE.md b/services/filestore/.github/PULL_REQUEST_TEMPLATE.md index ed25ee83c1..12bb2eeb3f 100644 --- a/services/filestore/.github/PULL_REQUEST_TEMPLATE.md +++ b/services/filestore/.github/PULL_REQUEST_TEMPLATE.md @@ -1,4 +1,7 @@ - + + + + ### Description From aba0d14eddcdecde52aa04332eb1a985701265e4 Mon Sep 17 00:00:00 2001 From: Nate Stemen Date: Fri, 25 Oct 2019 11:52:54 -0400 Subject: [PATCH 10/20] bump build script to 1.1.24 --- services/filestore/Makefile | 4 ++-- services/filestore/buildscript.txt | 4 +++- services/filestore/docker-compose.ci.yml | 2 +- services/filestore/docker-compose.yml | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 75286c139a..e83a0696e0 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -35,7 +35,7 @@ test_clean: $(DOCKER_COMPOSE) down -v -t 0 test_acceptance_pre_run: - @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run + @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 51452eb242..dc8c156383 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -5,4 +5,6 @@ filestore --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --build-target=docker ---script-version=1.1.21 +--script-version=1.1.24 +--env-pass-through= +--public-repo=True diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 765c79aac5..e5557b7e91 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 version: "2" diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index e282295b14..b7fd2afc71 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 version: "2" From 2ec38068aa6542bf66b37a8a771e7af9383c9fb7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 6 Dec 2019 14:35:13 +0000 Subject: [PATCH 11/20] add comments about aws-sdk and s3 backends --- services/filestore/app/coffee/AWSSDKPersistorManager.coffee | 5 +++++ services/filestore/app/coffee/S3PersistorManager.coffee | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index 5be80506f5..168fc68d54 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -1,3 +1,8 @@ +# This module is not used in production, which currently uses +# S3PersistorManager. The intention is to migrate S3PersistorManager to use the +# latest aws-sdk and delete this module so that PersistorManager would load the +# same backend for both the 's3' and 'aws-sdk' options. + logger = require "logger-sharelatex" aws = require "aws-sdk" _ = require "underscore" diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 8f1b080efd..ca74bdb013 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -1,3 +1,7 @@ +# This module is the one which is used in production. It needs to be migrated +# to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for +# details. The knox library is unmaintained and has bugs. + http = require('http') http.globalAgent.maxSockets = 300 https = require('https') From 86b9e4b53a349a808755b155b5050743db9127c1 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 13:51:27 +0000 Subject: [PATCH 12/20] Rename request -> s3Request to prevent overwriting main import --- services/filestore/app/coffee/S3PersistorManager.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index ca74bdb013..4c33fd84d2 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -95,9 +95,9 @@ module.exports = } if opts.start? and opts.end? s3Params['Range'] = "bytes=#{opts.start}-#{opts.end}" - request = s3.getObject(s3Params) + s3Request = s3.getObject(s3Params) - request.on 'httpHeaders', (statusCode, headers, response, statusMessage) => + s3Request.on 'httpHeaders', (statusCode, headers, response, statusMessage) => if statusCode in [403, 404] # S3 returns a 403 instead of a 404 when the user doesn't have # permission to list the bucket contents. @@ -109,11 +109,11 @@ module.exports = stream = response.httpResponse.createUnbufferedStream() callback(null, stream) - request.on 'error', (err) => + s3Request.on 'error', (err) => logger.err({ err: err, bucketName: bucketName, key: key }, "error getting file stream from s3") callback(err) - request.send() + s3Request.send() getFileSize: (bucketName, key, callback) -> logger.log({ bucketName: bucketName, key: key }, "getting file size from S3") From 96457597ac30dc23cc723072ae0fbb45c8701f81 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 13:55:08 +0000 Subject: [PATCH 13/20] Add fake s3 server and initial config --- services/filestore/config/settings.defaults.coffee | 1 + services/filestore/docker-compose.ci.yml | 11 +++++++++++ services/filestore/docker-compose.yml | 12 ++++++++++++ 3 files changed, 24 insertions(+) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 550cfd2694..29010e85ee 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -16,6 +16,7 @@ settings = s3: key: process.env['AWS_KEY'] secret: process.env['AWS_SECRET'] + endpoint: process.env['AWS_S3_ENDPOINT'] stores: user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME'] template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index e5557b7e91..790109f70b 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -25,9 +25,16 @@ services: ENABLE_CONVERSIONS: "true" MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test + AWS_KEY: fake + AWS_SECRET: fake + AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files + AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files + AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + AWS_S3_ENDPOINT: http://fakes3:9090 depends_on: - mongo - redis + - fakes3 user: node command: npm run test:acceptance:_run @@ -46,3 +53,7 @@ services: mongo: image: mongo:3.4 + + fakes3: + image: adobe/s3mock + command: --initialBuckets=fake_user_files,fake_template_files,fake_public_files diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index b7fd2afc71..cd03a9daf4 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -31,10 +31,17 @@ services: ENABLE_CONVERSIONS: "true" LOG_LEVEL: ERROR NODE_ENV: test + AWS_KEY: fake + AWS_SECRET: fake + AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files + AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files + AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + AWS_S3_ENDPOINT: http://fakes3:9090 user: node depends_on: - mongo - redis + - fakes3 command: npm run test:acceptance @@ -53,4 +60,9 @@ services: mongo: image: mongo:3.4 + fakes3: + image: adobe/s3mock + environment: + - initialBuckets=fake_user_files,fake_template_files,fake_public_files + From c01603b1e71afedeb2471c358e296c3677412a56 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 14:25:25 +0000 Subject: [PATCH 14/20] Support custom S3 endpoints --- .../app/coffee/S3PersistorManager.coffee | 60 ++++++++++++------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 4c33fd84d2..5ee9e25865 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -16,10 +16,15 @@ LocalFileWriter = require("./LocalFileWriter") Errors = require("./Errors") _ = require("underscore") awsS3 = require "aws-sdk/clients/s3" +URL = require('url') thirtySeconds = 30 * 1000 buildDefaultOptions = (bucketName, method, key)-> + if settings.filestore.s3.endpoint + endpoint = "#{settings.filestore.s3.endpoint}/#{bucketName}" + else + endpoint = "https://#{bucketName}.s3.amazonaws.com" return { aws: key: settings.filestore.s3.key @@ -27,32 +32,47 @@ buildDefaultOptions = (bucketName, method, key)-> bucket: bucketName method: method timeout: thirtySeconds - uri:"https://#{bucketName}.s3.amazonaws.com/#{key}" + uri:"#{endpoint}/#{key}" } -defaultS3Client = new awsS3({ - credentials: - accessKeyId: settings.filestore.s3.key, - secretAccessKey: settings.filestore.s3.secret -}) +getS3Options = (credentials) -> + options = + credentials: + accessKeyId: credentials.auth_key + secretAccessKey: credentials.auth_secret + + if settings.filestore.s3.endpoint + options.endpoint = settings.filestore.s3.endpoint + options.sslEnabled = false + + return options + +defaultS3Client = new awsS3(getS3Options({ + auth_key: settings.filestore.s3.key, + auth_secret: settings.filestore.s3.secret +})) getS3Client = (credentials) -> if credentials? - return new awsS3({ - credentials: - accessKeyId: credentials.auth_key - secretAccessKey: credentials.auth_secret - }) + return new awsS3(getS3Options(credentials)) else return defaultS3Client +getKnoxClient = (bucketName) => + options = + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret + bucket: bucketName + if settings.filestore.s3.endpoint + endpoint = URL.parse(settings.filestore.s3.endpoint) + options.endpoint = endpoint.hostname + options.port = endpoint.port + return knox.createClient(options) + module.exports = sendFile: (bucketName, key, fsPath, callback)-> - s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret - bucket: bucketName + s3Client = getKnoxClient(bucketName) putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> if err? logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" @@ -171,10 +191,7 @@ module.exports = _callback = () -> logger.log key: key, bucketName: bucketName, "deleting directory" - s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret - bucket: bucketName + s3Client = getKnoxClient(bucketName) s3Client.list prefix:key, (err, data)-> if err? logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" @@ -200,10 +217,7 @@ module.exports = directorySize:(bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "get project size in s3" - s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret - bucket: bucketName + s3Client = getKnoxClient(bucketName) s3Client.list prefix:key, (err, data)-> if err? logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" From 1d1106bc679e344653c5feef96dca4a3124d6a38 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 15:25:35 +0000 Subject: [PATCH 15/20] Add metric for s3 egress --- .../app/coffee/S3PersistorManager.coffee | 5 +++ services/filestore/docker-compose.ci.yml | 1 + services/filestore/docker-compose.yml | 1 + .../acceptance/coffee/SendingFileTest.coffee | 33 ++++++++++++++----- 4 files changed, 32 insertions(+), 8 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 5ee9e25865..14ab1bffe5 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -9,6 +9,7 @@ https.globalAgent.maxSockets = 300 settings = require("settings-sharelatex") request = require("request") logger = require("logger-sharelatex") +metrics = require("metrics-sharelatex") fs = require("fs") knox = require("knox") path = require("path") @@ -73,7 +74,9 @@ module.exports = sendFile: (bucketName, key, fsPath, callback)-> s3Client = getKnoxClient(bucketName) + uploaded = 0 putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> + metrics.count 's3.egress', uploaded if err? logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" return callback(err) @@ -88,6 +91,8 @@ module.exports = putEventEmiter.on "error", (err)-> logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" callback err + putEventEmiter.on "progress", (progress)-> + uploaded = progress.written sendStream: (bucketName, key, readStream, callback)-> logger.log bucketName:bucketName, key:key, "sending file to s3" diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 790109f70b..d7b97ac011 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -25,6 +25,7 @@ services: ENABLE_CONVERSIONS: "true" MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test + USE_PROM_METRICS: "true" AWS_KEY: fake AWS_SECRET: fake AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index cd03a9daf4..c2c2ed565d 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -31,6 +31,7 @@ services: ENABLE_CONVERSIONS: "true" LOG_LEVEL: ERROR NODE_ENV: test + USE_PROM_METRICS: "true" AWS_KEY: fake AWS_SECRET: fake AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index b77afb866b..fb03697cb0 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -10,8 +10,14 @@ request = require("request") settings = require("settings-sharelatex") FilestoreApp = require "./FilestoreApp" -describe "Filestore", -> +getMetric = (filestoreUrl, metric, cb) -> + request.get "#{filestoreUrl}/metrics", (err, res) -> + expect(res.statusCode).to.equal 200 + metricRegex = new RegExp("^#{metric}{[^}]+} ([0-9]+)$", "m") + cb(parseInt(metricRegex.exec(res.body)?[1] || '0')) + +describe "Filestore", -> before (done)-> @localFileReadPath = "/tmp/filestore_acceptence_tests_file_read.txt" @localFileWritePath = "/tmp/filestore_acceptence_tests_file_write.txt" @@ -27,10 +33,10 @@ describe "Filestore", -> beforeEach (done)-> FilestoreApp.ensureRunning => - fs.unlink @localFileWritePath, -> - done() - - + fs.unlink @localFileWritePath, => + getMetric @filestoreUrl, 's3_egress', (metric) => + @previousEgress = metric + done() it "should send a 200 for status endpoint", (done)-> request "#{@filestoreUrl}/status", (err, response, body)-> @@ -59,6 +65,11 @@ describe "Filestore", -> response.statusCode.should.equal 404 done() + it 'should record an egress metric for the upload', (done) -> + getMetric @filestoreUrl, 's3_egress', (metric) => + expect(metric - @previousEgress).to.equal @constantFileContent.length + done() + it "should return the file size on a HEAD request", (done) -> expectedLength = Buffer.byteLength(@constantFileContent) request.head @fileUrl, (err, res) => @@ -128,11 +139,17 @@ describe "Filestore", -> @file_id = Math.random() @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{@file_id}" @localFileReadPath = __dirname + '/../../fixtures/test.pdf' + fs.stat @localFileReadPath, (err, stat) => + @localFileSize = stat.size + writeStream = request.post(@fileUrl) - writeStream = request.post(@fileUrl) + writeStream.on "end", done + fs.createReadStream(@localFileReadPath).pipe writeStream - writeStream.on "end", done - fs.createReadStream(@localFileReadPath).pipe writeStream + it 'should record an egress metric for the upload', (done) -> + getMetric @filestoreUrl, 's3_egress', (metric) => + expect(metric - @previousEgress).to.equal @localFileSize + done() it "should be able get the file back", (done)-> @timeout(1000 * 10) From 48aa14159163217deb75181e4d7435f325e163e5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 15:46:14 +0000 Subject: [PATCH 16/20] Add metric for s3 ingress --- .../app/coffee/S3PersistorManager.coffee | 3 ++ .../acceptance/coffee/SendingFileTest.coffee | 38 ++++++++++++++++--- .../coffee/S3PersistorManagerTests.coffee | 1 + 3 files changed, 37 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 14ab1bffe5..b575001fb3 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -132,6 +132,9 @@ module.exports = logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}") return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null) stream = response.httpResponse.createUnbufferedStream() + stream.on 'data', (data) -> + metrics.count 's3.ingress', data.byteLength + callback(null, stream) s3Request.on 'error', (err) => diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index fb03697cb0..cd1fa167f5 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -9,6 +9,7 @@ fs = require("fs") request = require("request") settings = require("settings-sharelatex") FilestoreApp = require "./FilestoreApp" +async = require('async') getMetric = (filestoreUrl, metric, cb) -> @@ -33,10 +34,19 @@ describe "Filestore", -> beforeEach (done)-> FilestoreApp.ensureRunning => - fs.unlink @localFileWritePath, => - getMetric @filestoreUrl, 's3_egress', (metric) => - @previousEgress = metric - done() + async.parallel [ + (cb) => + fs.unlink @localFileWritePath, () -> + cb() + (cb) => + getMetric @filestoreUrl, 's3_egress', (metric) => + @previousEgress = metric + cb() + (cb) => + getMetric @filestoreUrl, 's3_ingress', (metric) => + @previousIngress = metric + cb() + ], done it "should send a 200 for status endpoint", (done)-> request "#{@filestoreUrl}/status", (err, response, body)-> @@ -83,7 +93,14 @@ describe "Filestore", -> body.should.equal @constantFileContent done() - it "should be able to get back the first 8 bytes of the file", (done) -> + it "should record an ingress metric when downloading the file", (done)-> + @timeout(1000 * 10) + request.get @fileUrl, () => + getMetric @filestoreUrl, 's3_ingress', (metric) => + expect(metric - @previousIngress).to.equal @constantFileContent.length + done() + + it "should be able to get back the first 9 bytes of the file", (done) -> @timeout(1000 * 10) options = uri: @fileUrl @@ -93,6 +110,17 @@ describe "Filestore", -> body.should.equal 'hello wor' done() + it "should record an ingress metric for a partial download", (done)-> + @timeout(1000 * 10) + options = + uri: @fileUrl + headers: + 'Range': 'bytes=0-8' + request.get options, ()=> + getMetric @filestoreUrl, 's3_ingress', (metric) => + expect(metric - @previousIngress).to.equal 9 + done() + it "should be able to get back bytes 4 through 10 of the file", (done) -> @timeout(1000 * 10) options = diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 8a1b2e4d49..a5ab5c2932 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -62,6 +62,7 @@ describe "S3PersistorManagerTests", -> describe "success", -> beforeEach () -> @expectedStream = { expectedStream: true } + @expectedStream.on = sinon.stub() @s3Request.send.callsFake () => @s3EventHandlers.httpHeaders(200, {}, @s3Response, "OK") @s3Response.httpResponse.createUnbufferedStream.returns(@expectedStream) From 6f326d56505f53106908b607d139729176fa2c93 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 9 Dec 2019 17:41:20 +0000 Subject: [PATCH 17/20] Use SSL setting based on url protocol --- services/filestore/app/coffee/S3PersistorManager.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index b575001fb3..89522a4643 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -43,8 +43,9 @@ getS3Options = (credentials) -> secretAccessKey: credentials.auth_secret if settings.filestore.s3.endpoint + endpoint = URL.parse(settings.filestore.s3.endpoint) options.endpoint = settings.filestore.s3.endpoint - options.sslEnabled = false + options.sslEnabled = endpoint.protocol == 'https' return options From cf684dcd9865bd6f9e49b6b8ceb7c2c2f6aa8d0b Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 10 Dec 2019 11:38:19 +0000 Subject: [PATCH 18/20] Fix fakes3 configuration in ci yml --- services/filestore/docker-compose.ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index d7b97ac011..c1929c03ca 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -57,4 +57,6 @@ services: fakes3: image: adobe/s3mock - command: --initialBuckets=fake_user_files,fake_template_files,fake_public_files + environment: + - initialBuckets=fake_user_files,fake_template_files,fake_public_files + From 237c4113cd0b4158b8445d19d4d0d1b4a9fa07af Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 10 Dec 2019 16:11:44 +0000 Subject: [PATCH 19/20] Ensure fakes3 is healthy before running tests --- services/filestore/docker-compose.ci.yml | 13 +++++++++---- services/filestore/docker-compose.yml | 13 +++++++++---- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index c1929c03ca..42c6ae37b5 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -3,7 +3,7 @@ # https://github.com/sharelatex/sharelatex-dev-environment # Version: 1.1.24 -version: "2" +version: "2.1" services: test_unit: @@ -33,9 +33,12 @@ services: AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files AWS_S3_ENDPOINT: http://fakes3:9090 depends_on: - - mongo - - redis - - fakes3 + mongo: + condition: service_healthy + redis: + condition: service_healthy + fakes3: + condition: service_healthy user: node command: npm run test:acceptance:_run @@ -59,4 +62,6 @@ services: image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9090"] diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index c2c2ed565d..65f18f4d78 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -3,7 +3,7 @@ # https://github.com/sharelatex/sharelatex-dev-environment # Version: 1.1.24 -version: "2" +version: "2.1" services: test_unit: @@ -40,9 +40,12 @@ services: AWS_S3_ENDPOINT: http://fakes3:9090 user: node depends_on: - - mongo - - redis - - fakes3 + mongo: + condition: service_healthy + redis: + condition: service_healthy + fakes3: + condition: service_healthy command: npm run test:acceptance @@ -65,5 +68,7 @@ services: image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9090"] From 56b38af678c8061eaddc88a5b211c4a5c57537b3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 10 Dec 2019 17:43:34 +0000 Subject: [PATCH 20/20] Explicitly wait for S3 in acceptance tests --- .../acceptance/coffee/FilestoreApp.coffee | 23 ++++++++++++++++++- .../acceptance/coffee/SendingFileTest.coffee | 4 +++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/services/filestore/test/acceptance/coffee/FilestoreApp.coffee b/services/filestore/test/acceptance/coffee/FilestoreApp.coffee index 818e90ec6f..1b4cc38834 100644 --- a/services/filestore/test/acceptance/coffee/FilestoreApp.coffee +++ b/services/filestore/test/acceptance/coffee/FilestoreApp.coffee @@ -2,6 +2,9 @@ app = require('../../../app') require("logger-sharelatex").logger.level("info") logger = require("logger-sharelatex") Settings = require("settings-sharelatex") +request = require('request') + +S3_TRIES = 30 module.exports = running: false @@ -21,4 +24,22 @@ module.exports = logger.log("filestore running in dev mode") for callback in @callbacks - callback() \ No newline at end of file + callback() + + waitForS3: (callback, tries) -> + return callback() unless Settings.filestore.s3?.endpoint + tries = 1 unless tries + + request.get "#{Settings.filestore.s3.endpoint}/", (err, response) => + console.log(err, response?.statusCode, tries) + if !err && [200, 404].includes(response?.statusCode) + return callback() + + if tries == S3_TRIES + return callback('timed out waiting for S3') + + setTimeout( + () => + @waitForS3 callback, tries + 1 + 1000 + ) diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index cd1fa167f5..4e9443fd88 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -29,8 +29,10 @@ describe "Filestore", -> "there are 3 lines in all" ].join("\n") - fs.writeFile(@localFileReadPath, @constantFileContent, done) @filestoreUrl = "http://localhost:#{settings.internal.filestore.port}" + fs.writeFile @localFileReadPath, @constantFileContent, (err) -> + return done(err) if err + FilestoreApp.waitForS3(done) beforeEach (done)-> FilestoreApp.ensureRunning =>