[misc] add support for offline s3 tests

update the build scripts to 1.3.4
This commit is contained in:
Jakob Ackermann 2020-01-24 18:31:56 +01:00
parent 265e30df3d
commit b0b96ee014
11 changed files with 69 additions and 43 deletions

View file

@ -1,7 +1,7 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.3.3 # Version: 1.3.4
FROM node:6.11.2 as app FROM node:6.11.2 as app

View file

@ -45,9 +45,7 @@ pipeline {
stage('Acceptance Tests') { stage('Acceptance Tests') {
steps { steps {
withCredentials([usernamePassword(credentialsId: 'S3_DOCSTORE_TEST_AWS_KEYS', passwordVariable: 'AWS_SECRET_ACCESS_KEY', usernameVariable: 'AWS_ACCESS_KEY_ID')]) { sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
sh 'AWS_BUCKET="sl-acceptance-tests" AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
}
} }
} }

View file

@ -1,7 +1,7 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.3.3 # Version: 1.3.4
BUILD_NUMBER ?= local BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
@ -11,9 +11,6 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
BRANCH_NAME=$(BRANCH_NAME) \ BRANCH_NAME=$(BRANCH_NAME) \
PROJECT_NAME=$(PROJECT_NAME) \ PROJECT_NAME=$(PROJECT_NAME) \
MOCHA_GREP=${MOCHA_GREP} \ MOCHA_GREP=${MOCHA_GREP} \
AWS_BUCKET=${AWS_BUCKET} \
AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} \
AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} \
docker-compose ${DOCKER_COMPOSE_FLAGS} docker-compose ${DOCKER_COMPOSE_FLAGS}
clean: clean:

View file

@ -14,6 +14,8 @@ createStream = (streamConstructor, project_id, doc_id, pack_id) ->
AWS_CONFIG = AWS_CONFIG =
accessKeyId: settings.trackchanges.s3.key accessKeyId: settings.trackchanges.s3.key
secretAccessKey: settings.trackchanges.s3.secret secretAccessKey: settings.trackchanges.s3.secret
endpoint: settings.trackchanges.s3.endpoint
s3ForcePathStyle: settings.trackchanges.s3.pathStyle
return streamConstructor new AWS.S3(AWS_CONFIG), { return streamConstructor new AWS.S3(AWS_CONFIG), {
"Bucket": settings.trackchanges.stores.doc_history, "Bucket": settings.trackchanges.stores.doc_history,

View file

@ -1,10 +1,10 @@
track-changes track-changes
--public-repo=True --public-repo=True
--language=coffeescript --language=coffeescript
--env-add= --env-add=AWS_BUCKET=bucket
--node-version=6.11.2 --node-version=6.11.2
--acceptance-creds=aws --acceptance-creds=None
--dependencies=mongo,redis --dependencies=mongo,redis,s3
--docker-repos=gcr.io/overleaf-ops --docker-repos=gcr.io/overleaf-ops
--env-pass-through= --env-pass-through=
--script-version=1.3.3 --script-version=1.3.4

View file

@ -38,6 +38,8 @@ module.exports =
s3: s3:
key: process.env['AWS_ACCESS_KEY_ID'] key: process.env['AWS_ACCESS_KEY_ID']
secret: process.env['AWS_SECRET_ACCESS_KEY'] secret: process.env['AWS_SECRET_ACCESS_KEY']
endpoint: process.env['AWS_S3_ENDPOINT']
pathStyle: process.env['AWS_S3_PATH_STYLE'] == 'true'
stores: stores:
doc_history: process.env['AWS_BUCKET'] doc_history: process.env['AWS_BUCKET']
continueOnError: process.env['TRACK_CHANGES_CONTINUE_ON_ERROR'] or false continueOnError: process.env['TRACK_CHANGES_CONTINUE_ON_ERROR'] or false

View file

@ -1,7 +1,7 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.3.3 # Version: 1.3.4
version: "2.1" version: "2.1"
@ -22,16 +22,20 @@ services:
REDIS_HOST: redis REDIS_HOST: redis
MONGO_HOST: mongo MONGO_HOST: mongo
POSTGRES_HOST: postgres POSTGRES_HOST: postgres
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_S3_ENDPOINT: http://s3:9090
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_S3_PATH_STYLE: 'true'
AWS_BUCKET: ${AWS_BUCKET} AWS_ACCESS_KEY_ID: fake
AWS_SECRET_ACCESS_KEY: fake
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
AWS_BUCKET: bucket
depends_on: depends_on:
mongo: mongo:
condition: service_healthy condition: service_healthy
redis: redis:
condition: service_healthy condition: service_healthy
s3:
condition: service_healthy
user: node user: node
command: npm run test:acceptance:_run command: npm run test:acceptance:_run
@ -48,3 +52,9 @@ services:
mongo: mongo:
image: mongo:3.6 image: mongo:3.6
s3:
image: adobe/s3mock
environment:
- initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9090"]

View file

@ -1,7 +1,7 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.3.3 # Version: 1.3.4
version: "2.1" version: "2.1"
@ -27,18 +27,22 @@ services:
REDIS_HOST: redis REDIS_HOST: redis
MONGO_HOST: mongo MONGO_HOST: mongo
POSTGRES_HOST: postgres POSTGRES_HOST: postgres
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} AWS_S3_ENDPOINT: http://s3:9090
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} AWS_S3_PATH_STYLE: 'true'
AWS_BUCKET: ${AWS_BUCKET} AWS_ACCESS_KEY_ID: fake
AWS_SECRET_ACCESS_KEY: fake
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
LOG_LEVEL: ERROR LOG_LEVEL: ERROR
NODE_ENV: test NODE_ENV: test
AWS_BUCKET: bucket
user: node user: node
depends_on: depends_on:
mongo: mongo:
condition: service_healthy condition: service_healthy
redis: redis:
condition: service_healthy condition: service_healthy
s3:
condition: service_healthy
command: npm run test:acceptance command: npm run test:acceptance
redis: redis:
@ -47,3 +51,9 @@ services:
mongo: mongo:
image: mongo:3.6 image: mongo:3.6
s3:
image: adobe/s3mock
environment:
- initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9090"]

View file

@ -9,7 +9,7 @@
"scripts": { "scripts": {
"compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", "compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')",
"start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js",
"test:acceptance:_run": "AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY AWS_BUCKET=$AWS_BUCKET AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js",
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js",
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP",

View file

@ -17,7 +17,7 @@ MockWebApi = require "./helpers/MockWebApi"
describe "Archiving updates", -> describe "Archiving updates", ->
before (done) -> before (done) ->
if Settings?.trackchanges?.s3?.key.length < 1 if Settings?.trackchanges?.s3?.key.length < 1
message = "s3 keys not setup, this test setup will fail" message = new Error("s3 keys not setup, this test setup will fail")
return done(message) return done(message)
@now = Date.now() @now = Date.now()

View file

@ -6,6 +6,15 @@ rclient = require("redis-sharelatex").createClient(Settings.redis.history) # Onl
Keys = Settings.redis.history.key_schema Keys = Settings.redis.history.key_schema
{db, ObjectId} = require "../../../../app/js/mongojs" {db, ObjectId} = require "../../../../app/js/mongojs"
aws = require "aws-sdk"
s3 = new aws.S3(
accessKeyId: Settings.trackchanges.s3.key
secretAccessKey: Settings.trackchanges.s3.secret
endpoint: Settings.trackchanges.s3.endpoint
s3ForcePathStyle: Settings.trackchanges.s3.pathStyle
)
S3_BUCKET = Settings.trackchanges.stores.doc_history
module.exports = TrackChangesClient = module.exports = TrackChangesClient =
flushAndGetCompressedUpdates: (project_id, doc_id, callback = (error, updates) ->) -> flushAndGetCompressedUpdates: (project_id, doc_id, callback = (error, updates) ->) ->
TrackChangesClient.flushDoc project_id, doc_id, (error) -> TrackChangesClient.flushDoc project_id, doc_id, (error) ->
@ -91,32 +100,30 @@ module.exports = TrackChangesClient =
response.statusCode.should.equal 204 response.statusCode.should.equal 204
callback(error) callback(error)
buildS3Options: (content, key)->
return {
aws:
key: Settings.trackchanges.s3.key
secret: Settings.trackchanges.s3.secret
bucket: Settings.trackchanges.stores.doc_history
timeout: 30 * 1000
json: content
uri:"https://#{Settings.trackchanges.stores.doc_history}.s3.amazonaws.com/#{key}"
}
getS3Doc: (project_id, doc_id, pack_id, callback = (error, body) ->) -> getS3Doc: (project_id, doc_id, pack_id, callback = (error, body) ->) ->
options = TrackChangesClient.buildS3Options(true, project_id+"/changes-"+doc_id+"/pack-"+pack_id) params =
options.encoding = null Bucket: S3_BUCKET
request.get options, (err, res, body) -> Key: "#{project_id}/changes-#{doc_id}/pack-#{pack_id}"
s3.getObject params, (error, data) ->
return callback(error) if error? return callback(error) if error?
body = data.Body
return callback(new Error("empty response from s3")) if not body? return callback(new Error("empty response from s3")) if not body?
zlib.gunzip body, (err, result) -> zlib.gunzip body, (err, result) ->
return callback(err) if err? return callback(err) if err?
callback(null, JSON.parse(result.toString())) callback(null, JSON.parse(result.toString()))
removeS3Doc: (project_id, doc_id, callback = (error, res, body) ->) -> removeS3Doc: (project_id, doc_id, callback = (error, res, body) ->) ->
options = TrackChangesClient.buildS3Options(true, "?prefix=" + project_id + "/changes-" +doc_id) params =
request.get options, (error, res, body) -> Bucket: S3_BUCKET
keys = body.match /[0-9a-f]{24}\/changes-[0-9a-f]{24}\/pack-[0-9a-f]{24}/g Prefix: "#{project_id}/changes-#{doc_id}"
async.eachSeries keys, (key, cb) ->
options = TrackChangesClient.buildS3Options(true, key) s3.listObjects params, (error, data) ->
request.del options, cb return callback(error) if error?
, callback
params =
Bucket: S3_BUCKET
Delete:
Objects: data.Contents.map((s3object) -> {Key: s3object.Key})
s3.deleteObjects params, callback