-
Notifications
You must be signed in to change notification settings - Fork 72
173 lines (166 loc) · 6.01 KB
/
test-upstream.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
name: Nightly upstream testing
on:
schedule:
- cron: "0 0 * * *" # Daily “At 00:00” UTC for upstream dask testing
- cron: "0 3 * * *" # Daily "At 03:00" UTC for upstream datafusion testing
workflow_dispatch: # allows you to trigger the workflow run manually
inputs:
which_upstream:
type: choice
description: 'Library to update for upstream testing'
required: false
default: 'Dask'
options:
- Dask
- DataFusion
# Required shell entrypoint to have properly activated conda environments
defaults:
run:
shell: bash -l {0}
env:
which_upstream: >-
${{
github.event_name == 'workflow_dispatch'
&& inputs.which_upstream
|| (
github.event.schedule == '0 0 * * *'
&& 'Dask'
|| 'DataFusion'
)
}}
jobs:
test-dev:
name: "Test upstream dev (${{ matrix.os }}, python: ${{ matrix.python }}, distributed: ${{ matrix.distributed }})"
runs-on: ${{ matrix.os }}
env:
CONDA_FILE: continuous_integration/environment-${{ matrix.python }}-dev.yaml
DASK_SQL_DISTRIBUTED_TESTS: ${{ matrix.distributed }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python: ["3.8", "3.9", "3.10"]
distributed: [false]
include:
# run tests on a distributed client
- os: "ubuntu-latest"
python: "3.8"
distributed: true
- os: "ubuntu-latest"
python: "3.10"
distributed: true
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
- name: Set up Python
uses: conda-incubator/[email protected]
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: ${{ matrix.python }}
channel-priority: strict
activate-environment: dask-sql
environment-file: ${{ env.CONDA_FILE }}
- name: Optionally update upstream cargo dependencies
if: env.which_upstream == 'DataFusion'
run: |
bash continuous_integration/scripts/update-dependencies.sh
- name: Build the Rust DataFusion bindings
run: |
maturin develop
- name: Install hive testing dependencies
if: matrix.os == 'ubuntu-latest'
run: |
mamba install -c conda-forge "sasl>=0.3.1"
docker pull bde2020/hive:2.3.2-postgresql-metastore
docker pull bde2020/hive-metastore-postgresql:2.3.0
- name: Install upstream dev Dask
if: env.which_upstream == 'Dask'
run: |
mamba install --no-channel-priority dask/label/dev::dask
- name: Install pytest-reportlog
run: |
# TODO: add pytest-reportlog to testing environments if we move over to JSONL output
mamba install pytest-reportlog
- name: Test with pytest
id: run_tests
run: |
pytest --report-log test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl --cov-report=xml -n auto tests --dist loadfile
- name: Upload pytest results for failure
if: |
always()
&& steps.run_tests.outcome != 'skipped'
uses: actions/upload-artifact@v3
with:
name: test-${{ matrix.os }}-py${{ matrix.python }}-results
path: test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl
import-dev:
name: "Test importing with bare requirements and upstream dev"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: conda-incubator/[email protected]
with:
miniforge-variant: Mambaforge
use-mamba: true
# TODO: drop support for python 3.8, add support for python 3.11
# https://github.com/dask-contrib/dask-sql/pull/1143
python-version: "3.9"
channel-priority: strict
- name: Install Protoc
uses: arduino/setup-protoc@v1
with:
version: '3.x'
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Optionally update upstream cargo dependencies
if: env.which_upstream == 'DataFusion'
env:
UPDATE_ALL_CARGO_DEPS: false
run: |
bash continuous_integration/scripts/update-dependencies.sh
- name: Install dependencies and nothing else
run: |
pip install -e . -vv
which python
pip list
mamba list
- name: Install upstream dev Dask
if: env.which_upstream == 'Dask'
run: |
python -m pip install git+https://github.com/dask/dask
python -m pip install git+https://github.com/dask/distributed
- name: Try to import dask-sql
run: |
python -c "import dask_sql; print('ok')"
report-failures:
name: Open issue for upstream dev failures
needs: [test-dev, import-dev]
if: |
always()
&& (
needs.test-dev.result == 'failure'
|| needs.import-dev.result == 'failure'
)
&& github.repository == 'dask-contrib/dask-sql'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
- name: Prepare logs & issue label
run: |
# TODO: remove this if xarray-contrib/issue-from-pytest-log no longer needs a log-path
if [ -f test-ubuntu-latest-py3.10-results/test-ubuntu-latest-py3.10-results.jsonl ]; then
cp test-ubuntu-latest-py3.10-results/test-ubuntu-latest-py3.10-results.jsonl results.jsonl
else
touch results.jsonl
fi
# convert which_upstream to lowercase
echo "which_upstream_lower=${which_upstream,,}" >> $GITHUB_ENV
- name: Open or update issue on failure
uses: xarray-contrib/[email protected]
with:
log-path: results.jsonl
issue-title: ⚠️ Upstream CI ${{ env.which_upstream }} failed ⚠️
issue-label: upstream-${{ env.which_upstream_lower }}