-
Notifications
You must be signed in to change notification settings - Fork 33
/
.gitlab-ci.yml
266 lines (226 loc) · 7.94 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
include:
- project: eng/codepeer/ci-registry
file: /gnatsas-on-spark.yml
- component: $CI_SERVER_FQDN/eng/gitlab-templates/check-issue@~latest
inputs:
stage: check
- component: $CI_SERVER_FQDN/eng/gitlab-templates/pre-commit@~latest
inputs:
stage: check
# In this CI, pipelines are triggered when pushing to a Merge Request and when
# merging a MR to a protected branch (only for the gnatsas job).
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" || ($CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_REF_PROTECTED == "true") || $CI_PIPELINE_SOURCE == "schedule"
when: always
# Run on pipelines created by selecting "Run pipeline" in the GitLab UI,
# from the project’s "Build > Pipelines" section.
- if: $CI_PIPELINE_SOURCE == "web"
- when: never
variables:
PACKAGE_BASE_NAME: spark2014.tar.gz
PACKAGE_ABSOLUTE_NAME: $CI_PROJECT_DIR/$PACKAGE_BASE_NAME
stages:
- build
- test
- check
###############
# Common bits #
###############
.basic-setup: &setup_repos
# If the package exists, move it to /tmp so as not to influence "anod vcs"
- if [ -f $PACKAGE_ABSOLUTE_NAME ] ; then mv $PACKAGE_ABSOLUTE_NAME /tmp ; fi
# Use generic_anod_ci here.
- generic_anod_ci $GENERIC_ANOD_CI_OPTIONS
- cat /tmp/ci_env.sh
- . /tmp/ci_env.sh
# Tune to use our build & test plan
- anod tune --plan $CI_PROJECT_DIR/plans/ci.plan
# Go to the sandbox dir
- cd $ANOD_DEFAULT_SANDBOX_DIR
.deploy_package_and_touch_fingerprints: &deploy_package
# Unpack the package
- tar zxf /tmp/$PACKAGE_BASE_NAME -C /
# Tell anod that the package has already been built
- mkdir -p fingerprints
- COMPONENT=`anod eval spark2014 build_space_name --primitive build --qualifier=coverage,assertions`
- touch fingerprints/x86_64-linux.$COMPONENT.download_bin.json.assume-unchanged
- touch fingerprints/x86_64-linux.$COMPONENT.install.json.assume-unchanged
.spark2014_test:
services:
- image:pe-base
- cpu:16
stage: test
script:
# Move the package out of the way, so it does not influence "anod vcs"
- mv $PACKAGE_ABSOLUTE_NAME /tmp
# add sparklib and internal testsuite for which we want to use most recent
# sources for testing
- GENERIC_ANOD_CI_OPTIONS="--add-dep eng/spark/sparklib
--add-dep eng/spark/spark-internal-testsuite"
- if [[ $CI_PIPELINE_SOURCE == "schedule" ]]; then GENERIC_ANOD_CI_OPTIONS="$GENERIC_ANOD_CI_OPTIONS --continuous-builder-mode"; fi
# Setup the "anod vcs as appropriate"
- *setup_repos
# remove gnat from vcs if present for testing; this is allowed to fail when
# gnat is not present
- anod vcs --remove gnat || true
# Do not rebuild spark2014-doc
- anod install spark2014-doc --latest
- *deploy_package
# set caching location
- mkdir -p $CI_PROJECT_DIR/gnatprove_cache
- export GNATPROVE_CACHE="file:$CI_PROJECT_DIR/gnatprove_cache"
# set location of sources for coverage
- ANOD_BUILDSPACE_SOURCES=`anod eval spark2014-core build_space_name --primitive build --qualifier=coverage,assertions`
- export COVERAGE_ROOT_DIR=$ANOD_DEFAULT_SANDBOX_DIR/x86_64-linux/$ANOD_BUILDSPACE_SOURCES/src
- export COVERAGE_SOURCE_DIR=$CI_PROJECT_DIR
# Test using anod
- anod run $ANOD_ENTRY_POINT
# Process the results
- ANOD_BUILDSPACE=`anod eval spark2014 build_space_name --primitive test --qualifier=$ANOD_QUALIFIERS`
- cp -r $ANOD_DEFAULT_SANDBOX_DIR/x86_64-linux/$ANOD_BUILDSPACE/results/new/ $CI_PROJECT_DIR/testsuite-results
- cp -r $ANOD_DEFAULT_SANDBOX_DIR/x86_64-linux/$ANOD_BUILDSPACE/src/cobertura-report/ $CI_PROJECT_DIR/coverage
- testsuite_reports
# Coverage HTML report
- export HTMLTARGETDIR=$CI_COMMIT_BRANCH
- if [[ $CI_PIPELINE_SOURCE == "merge_request_event" ]]; then export HTMLTARGETDIR=MRs/$CI_MERGE_REQUEST_IID ; fi
- if [[ $CI_PIPELINE_SOURCE == "schedule" ]]; then export HTMLTARGETDIR=weekly ; fi
- publish-pages $ANOD_DEFAULT_SANDBOX_DIR/x86_64-linux/$ANOD_BUILDSPACE/src/html-report --target-subdir $HTMLTARGETDIR --expires 30
artifacts:
when: always
paths:
- xunit-*.xml
- testsuite-results
- coverage/cobertura.xml
reports:
junit: xunit-*.xml
coverage_report:
coverage_format: cobertura
path: coverage/cobertura.xml
cache:
- key: alwaysthesame
paths:
- gnatprove_cache
#########
# Build #
#########
build:
services:
- image:pe-base
- cpu:8
stage: build
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" || $CI_PIPELINE_SOURCE == "schedule"
when: always
- when: never
script:
- GENERIC_ANOD_CI_OPTIONS="--add-dep eng/spark/sparklib
--add-dep eng/spark/spark-internal-testsuite
--add-dep eng/toolchain/gnat
--add-dep eng/spark/why3"
- if [[ $CI_PIPELINE_SOURCE == "schedule" ]]; then GENERIC_ANOD_CI_OPTIONS="$GENERIC_ANOD_CI_OPTIONS --continuous-builder-mode"; fi
- *setup_repos
# Build using anod
- anod run build
# Create the package
- SB_WITHOUT_LEADING_SLASH=`echo $ANOD_DEFAULT_SANDBOX_DIR | cut -b2-`
- PACKNAME=`anod info build spark2014 --show build_space --qualifier=assertions,coverage`
- tar czf $PACKAGE_ABSOLUTE_NAME -C /
$SB_WITHOUT_LEADING_SLASH/x86_64-linux/$PACKNAME/install
artifacts:
paths:
- $PACKAGE_BASE_NAME
########
# Test #
########
spark2014:
extends: .spark2014_test
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: always
- when: never
variables:
ANOD_ENTRY_POINT: test
ANOD_QUALIFIERS: assertions,coverage,cleanup-mode=none,cache
spark2014_large:
extends: .spark2014_test
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: manual
- when: never
allow_failure: true
variables:
ANOD_ENTRY_POINT: test_large
ANOD_QUALIFIERS: assertions,only_large,coverage,cleanup-mode=none,cache
spark2014_nocache:
extends: .spark2014_test
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: manual
- if: $CI_PIPELINE_SOURCE == "schedule"
when: always
- when: never
allow_failure: true
variables:
ANOD_ENTRY_POINT: test_nocache
ANOD_QUALIFIERS: assertions,coverage,cleanup-mode=none
#################
# Test of ACATS #
#################
acats:
services:
- image:pe-base
- cpu:16
stage: test
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: always
- when: never
script:
# Setup the sanbox
# Add acats explicitly for this build.
- GENERIC_ANOD_CI_OPTIONS="--add-dep eng/toolchain/acats"
- *setup_repos
# Deploy the installed package
- *deploy_package
# Test using anod
- anod run test_acats
# Process the results
- testsuite_reports
artifacts:
paths:
- xunit-*.xml
- __results*
reports:
junit: xunit-*.xml
################
# Build of Doc #
################
build_docs:
stage: build
services:
- image:pe-base
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- docs/**/*
when: always
- when: never
artifacts:
when:
always
paths:
- spark/pdf/spark2014_rm.pdf
- spark/pdf/spark2014_ug.pdf
- spark/html/lrm
- spark/html/ug
script:
# Setup the "anod vcs as appropriate"
- *setup_repos
# Build using anod
- anod build spark2014-doc
- cp -r $ANOD_DEFAULT_SANDBOX_DIR/x86_64-linux/spark2014-doc/install/share/doc/spark $CI_PROJECT_DIR
- export HTMLTARGETDIR=$CI_COMMIT_BRANCH
- if [[ $CI_PIPELINE_SOURCE = "merge_request_event" ]]; then export HTMLTARGETDIR=MRs/$CI_MERGE_REQUEST_IID ; fi
- publish-pages $CI_PROJECT_DIR/spark/html/lrm --target-subdir doc/lrm/$HTMLTARGETDIR --expires 30
- publish-pages $CI_PROJECT_DIR/spark/html/ug --target-subdir doc/ug/$HTMLTARGETDIR --expires 30