Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CLOUD-869 Enhance Jenkinsfile to skip builds for specified non-trigger files #1752

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .e2eignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
docs/**
code-of-conduct.md
CONTRIBUTING.md
README.md
.gitattributes
.gitignore
LICENSE
operator.png
kubernetes.svg
.e2eignore
173 changes: 123 additions & 50 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,15 @@ tests=[]
void createCluster(String CLUSTER_SUFFIX) {
withCredentials([string(credentialsId: 'GCP_PROJECT_ID', variable: 'GCP_PROJECT'), file(credentialsId: 'gcloud-key-file', variable: 'CLIENT_SECRET_FILE')]) {
sh """
NODES_NUM=3
export KUBECONFIG=/tmp/$CLUSTER_NAME-${CLUSTER_SUFFIX}
ret_num=0
while [ \${ret_num} -lt 15 ]; do
ret_val=0
gcloud auth activate-service-account --key-file $CLIENT_SECRET_FILE
gcloud config set project $GCP_PROJECT
gcloud container clusters list --filter $CLUSTER_NAME-${CLUSTER_SUFFIX} --zone $region --format='csv[no-heading](name)' | xargs gcloud container clusters delete --zone $region --quiet || true
gcloud container clusters create --zone $region $CLUSTER_NAME-${CLUSTER_SUFFIX} --cluster-version=1.28 --machine-type=n1-standard-4 --preemptible --num-nodes=3 --network=jenkins-vpc --subnetwork=jenkins-${CLUSTER_SUFFIX} --no-enable-autoupgrade --cluster-ipv4-cidr=/21 --labels delete-cluster-after-hours=6 --enable-ip-alias --workload-pool=cloud-dev-112233.svc.id.goog && \
gcloud container clusters create --zone $region $CLUSTER_NAME-${CLUSTER_SUFFIX} --cluster-version=1.28 --machine-type=n1-standard-4 --preemptible --disk-size 30 --num-nodes=\$NODES_NUM --network=jenkins-vpc --subnetwork=jenkins-${CLUSTER_SUFFIX} --no-enable-autoupgrade --cluster-ipv4-cidr=/21 --labels delete-cluster-after-hours=6 --enable-ip-alias --workload-pool=cloud-dev-112233.svc.id.goog && \
kubectl create clusterrolebinding cluster-admin-binding --clusterrole cluster-admin --user jenkins@"$GCP_PROJECT".iam.gserviceaccount.com || ret_val=\$?
if [ \${ret_val} -eq 0 ]; then break; fi
ret_num=\$((ret_num + 1))
Expand Down Expand Up @@ -71,19 +72,6 @@ void deleteOldClusters(String FILTER) {
}
}

void pushArtifactFile(String FILE_NAME) {
echo "Push $FILE_NAME file to S3!"

withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AMI/OVF', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sh """
touch ${FILE_NAME}
S3_PATH=s3://percona-jenkins-artifactory/\$JOB_NAME/\$(git rev-parse --short HEAD)
aws s3 ls \$S3_PATH/${FILE_NAME} || :
aws s3 cp --quiet ${FILE_NAME} \$S3_PATH/${FILE_NAME} || :
"""
}
}

void pushLogFile(String FILE_NAME) {
def LOG_FILE_PATH="e2e-tests/logs/${FILE_NAME}.log"
def LOG_FILE_NAME="${FILE_NAME}.log"
Expand All @@ -97,13 +85,15 @@ void pushLogFile(String FILE_NAME) {
}
}

void popArtifactFile(String FILE_NAME) {
echo "Try to get $FILE_NAME file from S3!"
void pushArtifactFile(String FILE_NAME) {
echo "Push $FILE_NAME file to S3!"

withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AMI/OVF', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sh """
touch ${FILE_NAME}
S3_PATH=s3://percona-jenkins-artifactory/\$JOB_NAME/\$(git rev-parse --short HEAD)
aws s3 cp --quiet \$S3_PATH/${FILE_NAME} ${FILE_NAME} || :
aws s3 ls \$S3_PATH/${FILE_NAME} || :
aws s3 cp --quiet ${FILE_NAME} \$S3_PATH/${FILE_NAME} || :
"""
}
}
Expand Down Expand Up @@ -140,6 +130,25 @@ void markPassedTests() {
}
}

void printKubernetesStatus(String LOCATION, String CLUSTER_SUFFIX) {
sh """
export KUBECONFIG=/tmp/$CLUSTER_NAME-$CLUSTER_SUFFIX
echo "========== KUBERNETES STATUS $LOCATION TEST =========="
gcloud container clusters list|grep -E "NAME|$CLUSTER_NAME-$CLUSTER_SUFFIX "
echo
kubectl get nodes
echo
kubectl top nodes
echo
kubectl get pods --all-namespaces
echo
kubectl top pod --all-namespaces
echo
kubectl get events --field-selector type!=Normal --all-namespaces
echo "======================================================"
"""
}

TestsReport = '| Test name | Status |\r\n| ------------- | ------------- |'
TestsReportXML = '<testsuite name=\\"PSMDB\\">\n'

Expand All @@ -161,6 +170,10 @@ void makeReport() {
}
TestsReport = TestsReport + "\r\n| We run $startedTestAmount out of $wholeTestAmount|"
TestsReportXML = TestsReportXML + '</testsuite>\n'

sh """
echo "${TestsReportXML}" > TestsReport.xml
"""
}

void clusterRunner(String cluster) {
Expand Down Expand Up @@ -202,7 +215,7 @@ void runTest(Integer TEST_ID) {
export DEBUG_TESTS=1
fi
export KUBECONFIG=/tmp/$CLUSTER_NAME-$clusterSuffix
./e2e-tests/$testName/run
time bash e2e-tests/$testName/run
"""
}

Expand All @@ -211,6 +224,7 @@ void runTest(Integer TEST_ID) {
return true
}
catch (exc) {
printKubernetesStatus("AFTER","$clusterSuffix")
if (retryCount >= 1 || currentBuild.nextBuild != null) {
currentBuild.result = 'FAILURE'
return true
Expand All @@ -228,9 +242,62 @@ void runTest(Integer TEST_ID) {
}
}

def skipBranchBuilds = true
needToRunTests = true
void checkE2EIgnoreFiles() {
def e2eignoreFile = ".e2eignore"
if (fileExists(e2eignoreFile)) {
def excludedFiles = readFile(e2eignoreFile).split('\n').collect{it.trim()}
def lastProcessedCommitFile="last-processed-commit.txt"
def lastProcessedCommitHash = ""

def build = currentBuild.previousBuild
while (build != null) {
if (build.result == 'SUCCESS') {
try {
echo "Found a previous successful build: $build.number"
copyArtifacts(projectName: env.JOB_NAME, selector: specific("$build.number"), filter: "$lastProcessedCommitFile")
lastProcessedCommitHash = readFile("$lastProcessedCommitFile").trim()
echo "lastProcessedCommitHash: $lastProcessedCommitHash"
break
} catch (Exception e) {
echo "No $lastProcessedCommitFile found in build $build.number. Checking earlier builds."
}
} else {
echo "Build $build.number was not successful. Checking earlier builds."
}
build = build.previousBuild
}

if (lastProcessedCommitHash == "") {
echo "This is the first run. Using merge base as the starting point for the diff."
changedFiles = sh(script: "git diff --name-only \$(git merge-base HEAD origin/$CHANGE_TARGET)", returnStdout: true).trim().split('\n').findAll{it}
} else {
echo "Processing changes since last processed commit: $lastProcessedCommitHash"
changedFiles = sh(script: "git diff --name-only $lastProcessedCommitHash HEAD", returnStdout: true).trim().split('\n').findAll{it}
}

echo "Excluded files: $excludedFiles"
echo "Changed files: $changedFiles"

def excludedFilesRegex = excludedFiles.collect{it.replace("**", ".*").replace("*", "[^/]*")}
needToRunTests = !changedFiles.every{changed -> excludedFilesRegex.any{regex -> changed ==~ regex}}

if (needToRunTests) {
echo "Some changed files are outside of the e2eignore list. Proceeding with execution."
} else {
echo "All changed files are e2eignore files. Aborting pipeline execution."
}

sh """
echo \$(git rev-parse HEAD) > $lastProcessedCommitFile
"""
archiveArtifacts "$lastProcessedCommitFile"
}
}

def isPRJob = false
if (env.CHANGE_URL) {
skipBranchBuilds = false
isPRJob = true
}

pipeline {
Expand All @@ -249,12 +316,23 @@ pipeline {
}
options {
disableConcurrentBuilds(abortPrevious: true)
copyArtifactPermission("$JOB_NAME/PR-*")
}
stages {
stage('Check Ignore Files') {
when {
expression {
isPRJob
}
}
steps {
checkE2EIgnoreFiles()
}
}
stage('Prepare') {
when {
expression {
!skipBranchBuilds
isPRJob && needToRunTests
}
}
steps {
Expand Down Expand Up @@ -305,7 +383,7 @@ EOF
stage('Build docker image') {
when {
expression {
!skipBranchBuilds
isPRJob && needToRunTests
}
}
steps {
Expand Down Expand Up @@ -334,7 +412,7 @@ EOF
stage('GoLicenseDetector test') {
when {
expression {
!skipBranchBuilds
isPRJob && needToRunTests
}
}
steps {
Expand Down Expand Up @@ -362,7 +440,7 @@ EOF
stage('GoLicense test') {
when {
expression {
!skipBranchBuilds
isPRJob && needToRunTests
}
}
steps {
Expand All @@ -387,7 +465,6 @@ EOF
| sort \
| uniq \
> golicense-new || true

diff -u e2e-tests/license/compare/golicense golicense-new
"""
}
Expand All @@ -396,7 +473,7 @@ EOF
stage('Run tests for operator') {
when {
expression {
!skipBranchBuilds
isPRJob && needToRunTests
}
}
options {
Expand All @@ -406,7 +483,7 @@ EOF
stage('cluster1') {
steps {
clusterRunner('cluster1')
}
}
}
stage('cluster2') {
steps {
Expand Down Expand Up @@ -468,36 +545,32 @@ EOF
catch (exc) {
slackSend channel: '#cloud-dev-ci', color: '#FF0000', message: "[${JOB_NAME}]: build ${currentBuild.result}, ${BUILD_URL} owner: @${AUTHOR_NAME}"
}

}

if (env.CHANGE_URL && currentBuild.nextBuild == null) {
for (comment in pullRequest.comments) {
println("Author: ${comment.user}, Comment: ${comment.body}")
if (comment.user.equals('JNKPercona')) {
println("delete comment")
comment.delete()
if (needToRunTests) {
if (isPRJob && currentBuild.nextBuild == null) {
for (comment in pullRequest.comments) {
println("Author: ${comment.user}, Comment: ${comment.body}")
if (comment.user.equals('JNKPercona')) {
println("delete comment")
comment.delete()
}
}
makeReport()
step([$class: 'JUnitResultArchiver', testResults: '*.xml', healthScaleFactor: 1.0])
archiveArtifacts '*.xml'

unstash 'IMAGE'
def IMAGE = sh(returnStdout: true, script: "cat results/docker/TAG").trim()
TestsReport = TestsReport + "\r\n\r\ncommit: ${env.CHANGE_URL}/commits/${env.GIT_COMMIT}\r\nimage: `${IMAGE}`\r\n"
pullRequest.comment(TestsReport)
}
makeReport()
deleteOldClusters("$CLUSTER_NAME")
sh """
echo "${TestsReportXML}" > TestsReport.xml
sudo docker system prune --volumes -af
"""
step([$class: 'JUnitResultArchiver', testResults: '*.xml', healthScaleFactor: 1.0])
archiveArtifacts '*.xml'

unstash 'IMAGE'
def IMAGE = sh(returnStdout: true, script: "cat results/docker/TAG").trim()
TestsReport = TestsReport + "\r\n\r\ncommit: ${env.CHANGE_URL}/commits/${env.GIT_COMMIT}\r\nimage: `${IMAGE}`\r\n"
pullRequest.comment(TestsReport)
}
deleteDir()
}
deleteOldClusters("$CLUSTER_NAME")
sh """
sudo docker system prune --volumes -af
sudo rm -rf *
"""
deleteDir()
}
}
}
Loading