forked from ARM-software/SCP-firmware
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Jenkinsfile
169 lines (149 loc) · 6.95 KB
/
Jenkinsfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
@NonCPS
List generateMatrix(Map matrixAxes) {
List axes = []
matrixAxes.each { axis, values ->
List axisList = []
values.each { value ->
axisList << [(axis): value]
}
axes << axisList
}
axes.combinations()*.sum()
}
pipeline {
agent {
label 'docker'
}
stages {
stage('Build container') {
steps {
/*
* Using `--target` with the Dockerfile agent currently bugs out
* on Jenkins, producing an error along the lines of:
*
* Cannot retrieve .Id from 'docker inspect ...'
*
* Apparently it doesn't like multi-stage builds, so until
* we're on a version of the Docker workflow plugin that
* doesn't exhibit this bug, we just have to build the image
* ourselves.
*
* See:
* - https://github.com/jenkinsci/docker-workflow-plugin/pull/149
* - https://github.com/jenkinsci/docker-workflow-plugin/pull/162
* - https://github.com/jenkinsci/docker-workflow-plugin/pull/180
*
* Once the Docker workflow plugin has been updated, we should
* be able to get rid of this stage and use:
*
* agent {
* dockerfile {
* dir 'docker'
* filename 'Dockerfile'
* additionalBuildArgs '--target ci'
* }
* }
*/
sh """ \
docker build -t scp-firmware:build-${currentBuild.number} \
--build-arg JENKINS_UID=1000 \
--build-arg JENKINS_GID=36293 \
--target=jenkins docker
"""
}
}
stage('Run tests') {
parallel {
stage('Run legacy tests') {
agent {
docker {
image "scp-firmware:build-${currentBuild.number}"
args '-e ARMLMD_LICENSE_FILE'
}
}
steps {
sh '/usr/local/bin/init'
sh 'python3 ./tools/ci.py'
}
}
stage('Build and test') {
/*
* We are on an old enough version of the Jenkins pipeline
* workflow plugin that we do not have support for matrices.
*
* Without this support, we need to generate the stages as
* part of a scripted stage.
*
* See:
* - https://www.jenkins.io/blog/2019/11/22/welcome-to-the-matrix/
* - https://www.jenkins.io/blog/2019/12/02/matrix-building-with-scripted-pipeline/
* - https://stackoverflow.com/questions/60829465/jenkins-unknown-stage-section-matrix-in-declarative-pipeline
*
* Once the plugin has been updated, we can adopt a proper
* matrix.
*/
steps {
script {
/*
* This is, admittedly, not particularly clean, but
* should require very little adjustment. The
* premise is relatively simple: generate a list of
* all the possible matrix combinations, create a
* a list of closures returning a pipeline stage for
* each combination, then execute them in parallel.
*/
def axes = [
generator: [ 'Ninja', 'Unix Makefiles' ],
buildType: [ 'Debug', 'Release', 'MinSizeRel',
'RelWithDebInfo' ]
]
def tasks = generateMatrix(axes).collectEntries { config ->
["Build and test (${config})", {
node('docker') {
docker
.image("scp-firmware:build-${currentBuild.number}")
.inside('-e ARMLMD_LICENSE_FILE')
{
/*
* If you need to adjust the
* behaviour of the generated stages
* then this is probably where you
* want to do it.
*/
stage("Build ${config}") {
checkout scm
/*
* Unfortunately, we don't have
* the CMake build plugin
* available to us, so we'll
* have to make do with shell
* scripts for now.
*/
sh '/usr/local/bin/init'
sh """ \
cmake \
-G "${config.generator}" \
-DCMAKE_BUILD_TYPE="${config.buildType}" \
.
"""
sh 'cmake --build .'
}
stage("Check ${config}") {
sh """
cmake
--build . \
--target check
"""
}
}
}
}]
}
parallel tasks
}
}
}
}
}
}
}