2018-05-22 06:40:15 -04:00
|
|
|
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
|
2017-08-11 09:28:45 -04:00
|
|
|
|
2018-05-22 06:40:15 -04:00
|
|
|
pipeline {
|
|
|
|
agent any
|
2017-08-11 09:28:45 -04:00
|
|
|
|
2018-10-03 10:19:37 -04:00
|
|
|
environment {
|
2019-05-07 12:45:10 -04:00
|
|
|
GIT_PROJECT = "spelling"
|
2018-10-03 10:19:37 -04:00
|
|
|
JENKINS_WORKFLOW = "spelling-sharelatex"
|
|
|
|
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
|
2019-05-07 12:45:10 -04:00
|
|
|
GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT"
|
2018-10-03 10:19:37 -04:00
|
|
|
}
|
|
|
|
|
2017-08-11 09:28:45 -04:00
|
|
|
triggers {
|
|
|
|
pollSCM('* * * * *')
|
2018-05-22 06:40:15 -04:00
|
|
|
cron(cron_string)
|
2017-08-11 09:28:45 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
stages {
|
2018-10-03 10:19:37 -04:00
|
|
|
stage('Install') {
|
|
|
|
steps {
|
|
|
|
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
|
|
|
sh "curl $GIT_API_URL \
|
|
|
|
--data '{ \
|
|
|
|
\"state\" : \"pending\", \
|
|
|
|
\"target_url\": \"$TARGET_URL\", \
|
|
|
|
\"description\": \"Your build is underway\", \
|
|
|
|
\"context\": \"ci/jenkins\" }' \
|
|
|
|
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-22 06:40:15 -04:00
|
|
|
stage('Build') {
|
2017-08-11 09:28:45 -04:00
|
|
|
steps {
|
2018-05-22 06:40:15 -04:00
|
|
|
sh 'make build'
|
2017-08-11 09:28:45 -04:00
|
|
|
}
|
|
|
|
}
|
2018-05-22 06:40:15 -04:00
|
|
|
|
2019-07-03 08:41:01 -04:00
|
|
|
// should be enabled once Node version is updated to >=8
|
|
|
|
// stage('Linting') {
|
|
|
|
// steps {
|
|
|
|
// sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format'
|
|
|
|
// sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint'
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
2018-05-22 06:40:15 -04:00
|
|
|
stage('Unit Tests') {
|
2017-08-11 09:28:45 -04:00
|
|
|
steps {
|
2018-05-22 06:40:15 -04:00
|
|
|
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
|
2017-08-11 09:28:45 -04:00
|
|
|
}
|
|
|
|
}
|
2018-05-22 06:40:15 -04:00
|
|
|
|
|
|
|
stage('Acceptance Tests') {
|
2017-08-11 09:28:45 -04:00
|
|
|
steps {
|
2018-05-22 06:40:15 -04:00
|
|
|
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
|
2017-08-11 09:28:45 -04:00
|
|
|
}
|
|
|
|
}
|
2018-05-22 06:40:15 -04:00
|
|
|
|
2019-01-31 11:44:24 -05:00
|
|
|
stage('Package and docker push') {
|
2017-08-11 09:28:45 -04:00
|
|
|
steps {
|
2019-01-31 11:44:24 -05:00
|
|
|
sh 'echo ${BUILD_NUMBER} > build_number.txt'
|
|
|
|
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
|
|
|
|
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
|
2018-10-03 10:19:37 -04:00
|
|
|
|
|
|
|
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
|
|
|
|
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
|
|
|
|
}
|
|
|
|
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
|
|
|
|
sh 'docker logout https://gcr.io/overleaf-ops'
|
|
|
|
|
2017-08-11 09:28:45 -04:00
|
|
|
}
|
|
|
|
}
|
2018-05-22 06:40:15 -04:00
|
|
|
|
2019-01-31 11:44:24 -05:00
|
|
|
stage('Publish to s3') {
|
2017-08-11 09:28:45 -04:00
|
|
|
steps {
|
2018-05-22 06:40:15 -04:00
|
|
|
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
|
2019-01-31 11:44:24 -05:00
|
|
|
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
|
|
|
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
|
|
|
|
}
|
2017-08-11 09:28:45 -04:00
|
|
|
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
2017-09-04 09:54:54 -04:00
|
|
|
// The deployment process uses this file to figure out the latest build
|
|
|
|
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
|
2017-08-11 09:28:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-05-22 06:40:15 -04:00
|
|
|
|
2017-08-11 09:28:45 -04:00
|
|
|
post {
|
2018-05-22 06:40:15 -04:00
|
|
|
always {
|
|
|
|
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
|
2018-10-03 10:19:37 -04:00
|
|
|
sh 'make clean'
|
|
|
|
}
|
|
|
|
|
|
|
|
success {
|
|
|
|
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
|
|
|
sh "curl $GIT_API_URL \
|
|
|
|
--data '{ \
|
|
|
|
\"state\" : \"success\", \
|
|
|
|
\"target_url\": \"$TARGET_URL\", \
|
|
|
|
\"description\": \"Your build succeeded!\", \
|
|
|
|
\"context\": \"ci/jenkins\" }' \
|
|
|
|
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
|
|
|
}
|
2018-05-22 06:40:15 -04:00
|
|
|
}
|
|
|
|
|
2017-08-11 09:28:45 -04:00
|
|
|
failure {
|
2018-05-22 06:40:15 -04:00
|
|
|
mail(from: "${EMAIL_ALERT_FROM}",
|
|
|
|
to: "${EMAIL_ALERT_TO}",
|
2017-08-11 09:28:45 -04:00
|
|
|
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
|
|
|
|
body: "Build: ${BUILD_URL}")
|
2018-10-03 10:19:37 -04:00
|
|
|
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
|
|
|
sh "curl $GIT_API_URL \
|
|
|
|
--data '{ \
|
|
|
|
\"state\" : \"failure\", \
|
|
|
|
\"target_url\": \"$TARGET_URL\", \
|
|
|
|
\"description\": \"Your build failed\", \
|
|
|
|
\"context\": \"ci/jenkins\" }' \
|
|
|
|
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
|
|
|
}
|
2017-08-11 09:28:45 -04:00
|
|
|
}
|
|
|
|
}
|
2018-05-22 06:40:15 -04:00
|
|
|
|
2017-08-11 09:28:45 -04:00
|
|
|
// The options directive is for configuration that applies to the whole job.
|
|
|
|
options {
|
|
|
|
// we'd like to make sure remove old builds, so we don't fill up our storage!
|
|
|
|
buildDiscarder(logRotator(numToKeepStr:'50'))
|
2018-05-22 06:40:15 -04:00
|
|
|
|
2017-08-11 09:28:45 -04:00
|
|
|
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
|
|
|
|
timeout(time: 30, unit: 'MINUTES')
|
|
|
|
}
|
|
|
|
}
|