2018-01-29 07:07:35 -05:00
|
|
|
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
|
2017-08-16 05:18:05 -04:00
|
|
|
|
2018-01-29 07:07:35 -05:00
|
|
|
pipeline {
|
2017-08-16 05:18:05 -04:00
|
|
|
agent any
|
2018-01-29 07:07:35 -05:00
|
|
|
|
2017-08-16 05:18:05 -04:00
|
|
|
triggers {
|
|
|
|
pollSCM('* * * * *')
|
2018-01-29 07:07:35 -05:00
|
|
|
cron(cron_string)
|
2017-08-16 05:18:05 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
stages {
|
2018-05-22 12:25:17 -04:00
|
|
|
stage('Build') {
|
2017-08-16 05:18:05 -04:00
|
|
|
steps {
|
2018-05-22 12:25:17 -04:00
|
|
|
sh 'make build'
|
2017-08-16 05:18:05 -04:00
|
|
|
}
|
|
|
|
}
|
2018-01-29 07:07:35 -05:00
|
|
|
|
|
|
|
stage('Unit Tests') {
|
|
|
|
steps {
|
|
|
|
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-16 05:18:05 -04:00
|
|
|
stage('Acceptance Tests') {
|
|
|
|
steps {
|
2018-01-29 07:07:35 -05:00
|
|
|
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
|
2017-08-16 05:18:05 -04:00
|
|
|
}
|
|
|
|
}
|
2018-01-29 07:07:35 -05:00
|
|
|
|
|
|
|
stage('Package and publish build') {
|
2017-08-16 05:18:05 -04:00
|
|
|
steps {
|
2018-06-12 05:02:06 -04:00
|
|
|
|
|
|
|
withCredentials([file(credentialsId: 'gcr.io_csh-gcdm-test', variable: 'DOCKER_REPO_KEY_PATH')]) {
|
|
|
|
sh 'docker login -u _json_key --password-stdin https://gcr.io/csh-gcdm-test < ${DOCKER_REPO_KEY_PATH}'
|
|
|
|
}
|
|
|
|
sh 'DOCKER_REPO=gcr.io/csh-gcdm-test make publish'
|
|
|
|
sh 'docker logout https://gcr.io/csh-gcdm-test'
|
|
|
|
|
2017-08-16 05:18:05 -04:00
|
|
|
}
|
|
|
|
}
|
2018-01-29 07:07:35 -05:00
|
|
|
|
|
|
|
stage('Publish build number') {
|
2017-08-16 05:18:05 -04:00
|
|
|
steps {
|
2018-01-29 07:07:35 -05:00
|
|
|
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
|
2017-08-16 05:18:05 -04:00
|
|
|
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
2017-09-04 09:46:17 -04:00
|
|
|
// The deployment process uses this file to figure out the latest build
|
|
|
|
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
|
2017-08-16 05:18:05 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
post {
|
2018-01-29 07:07:35 -05:00
|
|
|
always {
|
|
|
|
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
|
2018-06-12 05:02:06 -04:00
|
|
|
sh 'make clean'
|
2018-01-29 07:07:35 -05:00
|
|
|
}
|
|
|
|
|
2017-08-16 05:18:05 -04:00
|
|
|
failure {
|
|
|
|
mail(from: "${EMAIL_ALERT_FROM}",
|
|
|
|
to: "${EMAIL_ALERT_TO}",
|
|
|
|
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
|
|
|
|
body: "Build: ${BUILD_URL}")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// The options directive is for configuration that applies to the whole job.
|
|
|
|
options {
|
|
|
|
// we'd like to make sure remove old builds, so we don't fill up our storage!
|
|
|
|
buildDiscarder(logRotator(numToKeepStr:'50'))
|
|
|
|
|
|
|
|
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
|
|
|
|
timeout(time: 30, unit: 'MINUTES')
|
|
|
|
}
|
|
|
|
}
|