forked from tiiuae/ghaf-jenkins-pipeline
-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.groovy
433 lines (401 loc) · 16 KB
/
utils.groovy
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
#!/usr/bin/env groovy
// SPDX-FileCopyrightText: 2022-2024 TII (SSRC) and the Ghaf contributors
// SPDX-License-Identifier: Apache-2.0
import groovy.json.JsonOutput
////////////////////////////////////////////////////////////////////////////////
def flakeref_trim(String flakeref) {
// Trim the flakeref so it can be used in artifacts storage URL:
// Examples:
// .#packages.x86_64-linux.doc ==> packages.x86_64-linux.doc
// .#hydraJobs.doc.x86_64-linux ==> hydraJobs.doc.x86_64-linux
// .#doc ==> doc
// github:tiiuae/ghaf#doc ==> doc
// doc ==> doc
trimmed = "${flakeref.replaceAll(/^.*#/,"")}"
trimmed = "${trimmed.replaceAll(/^\s*\.*/,"")}"
// Replace any remaining non-whitelisted characters with '_':
return "${trimmed.replaceAll(/[^a-zA-Z0-9-_.]/,"_")}"
}
def run_rclone(String opts) {
sh """
export RCLONE_WEBDAV_UNIX_SOCKET_PATH=/run/rclone-jenkins-artifacts.sock
export RCLONE_WEBDAV_URL=http://localhost
rclone ${opts}
"""
}
def archive_artifacts(String subdir, String target="") {
if (!subdir) {
println "Warning: skipping archive, subdir not set"
return
}
// Archive artifacts to env.ARTIFACTS_REMOTE_PATH
if (!env.ARTIFACTS_REMOTE_PATH) {
println "Warning: skipping archive, ARTIFACTS_REMOTE_PATH not set"
return
}
run_rclone("copy -L ${subdir}/${target} :webdav:/${env.ARTIFACTS_REMOTE_PATH}/${target}")
// Add a link to Artifacts on the build description if it isn't added yet
href = "/artifacts/${env.ARTIFACTS_REMOTE_PATH}/"
artifacts_anchor = "<a href=\"${href}\">📦 Artifacts</a>"
if (!currentBuild.description) {
// Set the description if it wasn't set earlier
currentBuild.description = "${artifacts_anchor}"
} else if (!currentBuild.description.contains(" Artifacts</a>")) {
// If the description is set, but does not contain the Artifacts link
// yet, place the Artifacts link on the top of the description
currentBuild.description = "${artifacts_anchor}${currentBuild.description}"
}
}
def purge_artifacts(String remote_path) {
if (!remote_path) {
println "Warning: skipping artifacts purge, remote_path not set"
return
}
run_rclone("purge :webdav:/${remote_path}")
}
def nix_build(String flakeref, String subdir=null) {
try {
flakeref_trimmed = "${flakeref_trim(flakeref)}"
// Produce build out-links only if subdir was specified
if (!subdir) {
opts = "--no-link"
} else {
opts = "--out-link ${subdir}/${flakeref_trimmed}"
}
// Store the build start time to job's environment
epoch_seconds = (int) (new Date().getTime() / 1000l)
env."BEG_${flakeref_trimmed}_${env.BUILD_TAG}" = epoch_seconds
sh "nix build ${flakeref} ${opts}"
// If the build result is an image, produce a signature file
img_relpath = subdir ? find_img_relpath(flakeref, subdir, abort_on_error='false') : ""
if (img_relpath) {
target_path = "${subdir}/${img_relpath}"
sig_path = "sig/${img_relpath}.sig"
sign_file(target_path, sig_path, "INT-Ghaf-Devenv-Image")
// Archive signature file alongside the target image
archive_artifacts("sig")
} else {
println "Build result is not an image, skipping image signing"
}
// Store the build end time to job's environment
epoch_seconds = (int) (new Date().getTime() / 1000l)
env."END_${flakeref_trimmed}_${env.BUILD_TAG}" = epoch_seconds
// Archive possible build outputs from subdir directory
if (subdir) {
archive_artifacts(subdir)
}
} catch (InterruptedException e) {
// Do not continue pipeline execution on abort.
throw e
} catch (Exception e) {
// Otherwise, if the command fails, mark the current step unstable and set
// the final build result to failed, but continue the pipeline execution.
unstable("FAILED: ${flakeref}")
currentBuild.result = "FAILURE"
println "Error: ${e.toString()}"
}
}
def provenance(String flakeref, String outdir, String flakeref_trimmed) {
env.PROVENANCE_BUILD_TYPE = "https://github.com/tiiuae/ghaf-infra/blob/ea938e90/slsa/v1.0/L1/buildtype.md"
env.PROVENANCE_BUILDER_ID = "${env.JENKINS_URL}"
env.PROVENANCE_INVOCATION_ID = "${env.BUILD_URL}"
env.PROVENANCE_TIMESTAMP_BEGIN = env."BEG_${flakeref_trimmed}_${env.BUILD_TAG}"
env.PROVENANCE_TIMESTAMP_FINISHED = env."END_${flakeref_trimmed}_${env.BUILD_TAG}"
env.PROVENANCE_EXTERNAL_PARAMS = """
{
"target": {
"name": "${flakeref}",
"repository": "${env.TARGET_REPO}",
"ref": "${env.TARGET_COMMIT}"
},
"workflow": {
"name": "${env.JOB_NAME}",
"repository": "${env.GIT_URL}",
"ref": "${env.GIT_COMMIT}"
},
"job": "${env.JOB_NAME}",
"jobParams": ${JsonOutput.toJson(params)},
"buildRun": "${env.BUILD_ID}"
}
"""
opts = "--recursive --out ${outdir}/provenance.json"
sh "provenance ${flakeref} ${opts}"
// Sign the provenance
target_path = "${outdir}/provenance.json"
sign_file(target_path, "${target_path}.sig", "INT-Ghaf-Devenv-Provenance")
}
def sbomnix(String tool, String flakeref) {
flakeref_trimmed = "${flakeref_trim(flakeref)}"
// Sbomnix outputs are stored in directory hierarchy under 'scs/'
outdir = "scs/${flakeref_trimmed}/scs"
sh "mkdir -p ${outdir}"
if (tool == "provenance") {
provenance(flakeref, outdir, flakeref_trimmed)
} else if (tool == "sbomnix") {
sh """
cd ${outdir}
sbomnix ${flakeref}
"""
} else if (tool == "vulnxscan") {
sh """
vulnxscan ${flakeref} --out vulns.csv
csvcut vulns.csv --not-columns sortcol | csvlook -I >${outdir}/vulns.txt
"""
}
archive_artifacts("scs")
}
def find_img_relpath(String flakeref, String subdir, String abort_on_error="true") {
flakeref_trimmed = "${flakeref_trim(flakeref)}"
img_relpath = sh(
script: """
cd ${subdir} && \
find -L ${flakeref_trimmed} -regex '.*\\.\\(img\\|raw\\|zst\\|iso\\)\$' -print -quit
""", returnStdout: true).trim()
if (!img_relpath) {
println "Warning: no image found from '${subdir}/${flakeref_trimmed}'"
// Error out stopping the pipeline execution if abort_on_error was set
sh "if [ '${abort_on_error}' = 'true' ]; then exit 1; fi"
} else {
println "Found flakeref '${flakeref}' image '${img_relpath}'"
}
return img_relpath
}
def sign_file(String path, String sigfile, String cert="INT-Ghaf-Devenv-Common") {
println "sign_file: ${path} ### ${cert} ### ${sigfile}"
try {
sh(
// See the 'sign' command at: https://github.com/tiiuae/ci-yubi
script: """
mkdir -p \$(dirname '${sigfile}') || true
sign --path=${path} --cert=${cert} --sigfile=${sigfile}
""", returnStdout: true).trim()
} catch (Exception e) {
println "Warning: signing failed: sigfile will not be generated for: ${path}"
}
}
def ghaf_hw_test(String flakeref, String device_config, String testset='_boot_') {
testagent_nodes = nodesByLabel(label: "$device_config", offline: false)
if (!testagent_nodes) {
println "Warning: Skipping HW test '$flakeref', no test agents online"
unstable("No test agents online")
return
}
if (!env.ARTIFACTS_REMOTE_PATH) {
println "Warning: skipping HW test '$flakeref', ARTIFACTS_REMOTE_PATH not set"
return
}
if (!env.JENKINS_URL) {
println "Warning: skipping HW test '$flakeref', JENKINS_URL not set"
return
}
// Compose the image URL; testagent will need this URL to download the image
def imgdir = find_img_relpath(flakeref, 'archive')
def remote_path = "artifacts/${env.ARTIFACTS_REMOTE_PATH}"
def img_url = "${env.JENKINS_URL}/${remote_path}/${imgdir}"
def build_url = "${env.JENKINS_URL}/job/${env.JOB_NAME}/${env.BUILD_ID}"
def build_href = "<a href=\"${build_url}\">${env.JOB_NAME}#${env.BUILD_ID}</a>"
def flakeref_trimmed = "${flakeref_trim(flakeref)}"
def description = "Triggered by ${build_href}<br>(${flakeref_trimmed})"
// Trigger a build in 'ghaf-hw-test' pipeline.
// 'build' step is documented in https://plugins.jenkins.io/pipeline-build-step/
def job = build(
job: "ghaf-hw-test",
propagate: false,
parameters: [
string(name: "LABEL", value: "$device_config"),
string(name: "DEVICE_CONFIG_NAME", value: "$device_config"),
string(name: "IMG_URL", value: "$img_url"),
string(name: "DESC", value: "$description"),
string(name: "TESTSET", value: "$testset"),
string(name: "TARGET", value: "$flakeref_trimmed"),
],
wait: true,
)
println "ghaf-hw-test result (${device_config}:${testset}): ${job.result}"
// If the test job failed, mark the current step unstable and set
// the final build result failed, but continue the pipeline execution.
if (job.result != "SUCCESS") {
unstable("FAILED: ${device_config} ${testset}")
currentBuild.result = "FAILURE"
// Add a link to failed test job(s) on the calling pipeline
def test_href = "<a href=\"${job.absoluteUrl}\">⛔ ${flakeref_trimmed}</a>"
currentBuild.description = "${currentBuild.description}<br>${test_href}"
return
}
// Copy test results from agent to controller to 'test-results' directory
copyArtifacts(
projectName: "ghaf-hw-test",
selector: specific("${job.number}"),
target: "ghaf-hw-test/${flakeref_trimmed}/test-results",
)
// Archive the test results
archive_artifacts("ghaf-hw-test", flakeref_trimmed)
}
def nix_eval_jobs(List<Map> targets) {
// transform target names into valid nix arrays to be plugged into the expression below
def x86_targets = targets.findAll { it.system == "x86_64-linux" }.target
x86_targets = x86_targets ? "\"${x86_targets.join('" "')}\"" : ""
def aarch64_targets = targets.findAll { it.system == "aarch64-linux" }.target
aarch64_targets = aarch64_targets ? "\"${aarch64_targets.join('" "')}\"" : ""
// nix-eval-jobs is used to evaluate the targets in parallel and compute derivation paths.
// nix expression is used to create an attset on the fly which is a subset of #packages,
// but optimized to only include the targets we want to build
sh """
nix-eval-jobs --gc-roots-dir gcroots --force-recurse --expr ' \
let \
flake = builtins.getFlake ("git+file://" + toString ./.); \
lib = (import flake.inputs.nixpkgs { }).lib; \
in { \
x86_64-linux = lib.getAttrs [ ${x86_targets} ] flake.packages.x86_64-linux; \
aarch64-linux = lib.getAttrs [ ${aarch64_targets} ] flake.packages.aarch64-linux; \
}' > jobs.json
jq -s 'map({ (.attr): { drvPath, error } }) | add' < jobs.json > results.json
"""
targets.each {
target = "${it.system}.${it.target}"
drvPath = sh(script: "jq -r '.\"${target}\".drvPath' < results.json", returnStdout: true).trim()
evalError = sh(script: "jq -r '.\"${target}\".error' < results.json", returnStdout: true).trim()
it.drvPath = drvPath
it.error = evalError == "null" ? null : evalError
}
}
def nix_eval_hydrajobs(List<Map> targets) {
def targetList = targets.findAll { it }.target
targetList = targetList ? "\"${targetList.join('" "')}\"" : ""
sh """
nix-eval-jobs --gc-roots-dir gcroots --force-recurse --expr ' \
let \
flake = builtins.getFlake ("git+file://" + toString ./.); \
lib = (import flake.inputs.nixpkgs { }).lib; \
in lib.getAttrs [ ${targetList} ] flake.hydraJobs' > jobs.json
jq -s 'map({ (.attr): { drvPath, error } }) | add' < jobs.json > results.json
"""
targets.each {
target = "${it.system}.${it.target}"
drvPath = sh(script: "jq -r '.\"${target}\".drvPath' < results.json", returnStdout: true).trim()
evalError = sh(script: "jq -r '.\"${target}\".error' < results.json", returnStdout: true).trim()
it.drvPath = drvPath
it.error = evalError == "null" ? null : evalError
}
}
def create_parallel_stages(List<Map> targets, String testset='_boot_bat_perf_', List failedTargets = null) {
def target_jobs = [:]
targets.each {
def timestampBegin = ""
def timestampEnd = ""
def displayName = "${it.target} (${it.system})"
def targetAttr = "${it.system}.${it.target}"
def scsdir = "scs/${targetAttr}/scs"
def target = "${it.target}"
target_jobs[displayName] = {
stage("Build ${displayName}") {
def opts = ""
if (it.archive) {
opts = "--out-link archive/${targetAttr}"
} else {
opts = "--no-link"
}
try {
if (it.error) {
error("Error in evaluation! ${it.error}")
}
timestampBegin = sh(script: "date +%s", returnStdout: true).trim()
sh "nix build -L ${it.drvPath}\\^* ${opts}"
timestampEnd = sh(script: "date +%s", returnStdout: true).trim()
// only attempt signing if there is something to sign
if (it.archive) {
def img_relpath = find_img_relpath(targetAttr, "archive")
sign_file("archive/${img_relpath}", "sig/${img_relpath}.sig", "INT-Ghaf-Devenv-Image")
};
} catch (InterruptedException e) {
throw e
} catch (Exception e) {
unstable("FAILED: ${displayName}")
currentBuild.result = "FAILURE"
if (failedTargets != null) {
failedTargets.add(target)
}
println "Error: ${e.toString()}"
}
}
if (it.scs) {
stage("Provenance ${displayName}") {
def externalParams = """
{
"target": {
"name": "${targetAttr}",
"repository": "${env.TARGET_REPO}",
"ref": "${env.TARGET_COMMIT}"
},
"workflow": {
"name": "${env.JOB_NAME}",
"repository": "${env.GIT_URL}",
"ref": "${env.GIT_COMMIT}"
},
"job": "${env.JOB_NAME}",
"jobParams": ${JsonOutput.toJson(params)},
"buildRun": "${env.BUILD_ID}"
}
"""
// this environment block is only valid for the scope of this stage,
// preventing timestamp collision when provenances are built in parallel
withEnv([
'PROVENANCE_BUILD_TYPE="https://github.com/tiiuae/ghaf-infra/blob/ea938e90/slsa/v1.0/L1/buildtype.md"',
"PROVENANCE_BUILDER_ID=${env.JENKINS_URL}",
"PROVENANCE_INVOCATION_ID=${env.BUILD_URL}",
"PROVENANCE_TIMESTAMP_BEGIN=${timestampBegin}",
"PROVENANCE_TIMESTAMP_FINISHED=${timestampEnd}",
"PROVENANCE_EXTERNAL_PARAMS=${externalParams}"
]) {
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
def outpath = "${scsdir}/provenance.json"
sh """
mkdir -p ${scsdir}
provenance ${it.drvPath} --recursive --out ${outpath}
"""
sign_file(outpath, "sig/${outpath}.sig", "INT-Ghaf-Devenv-Provenance")
}
}
}
stage("SBOM ${displayName}") {
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh """
mkdir -p ${scsdir}
cd ${scsdir}
sbomnix ${it.drvPath}
"""
}
}
stage("Vulnxscan ${displayName}") {
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh """
mkdir -p ${scsdir}
vulnxscan ${it.drvPath} --out vulns.csv
csvcut vulns.csv --not-columns sortcol | csvlook -I >${scsdir}/vulns.txt
"""
}
}
}
if (it.archive) {
stage("Archive ${displayName}") {
script {
archive_artifacts("archive", targetAttr)
archive_artifacts("sig", targetAttr)
if (it.scs) {
archive_artifacts("scs", targetAttr)
}
}
}
}
if (testset != null && it.hwtest_device != null) {
stage("Test ${displayName}") {
script {
ghaf_hw_test(targetAttr, it.hwtest_device, testset)
}
}
}
}
}
return target_jobs
}
return this