Nightly upstream testing #1003
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Nightly upstream testing | |
on: | |
schedule: | |
- cron: "0 0 * * *" # Daily “At 00:00” UTC for upstream dask testing | |
- cron: "0 3 * * *" # Daily "At 03:00" UTC for upstream datafusion testing | |
workflow_dispatch: # allows you to trigger the workflow run manually | |
inputs: | |
which_upstream: | |
type: choice | |
description: 'Library to update for upstream testing' | |
required: false | |
default: 'Dask' | |
options: | |
- Dask | |
- DataFusion | |
# Required shell entrypoint to have properly activated conda environments | |
defaults: | |
run: | |
shell: bash -l {0} | |
env: | |
which_upstream: >- | |
${{ | |
github.event_name == 'workflow_dispatch' | |
&& inputs.which_upstream | |
|| ( | |
github.event.schedule == '0 0 * * *' | |
&& 'Dask' | |
|| 'DataFusion' | |
) | |
}} | |
jobs: | |
test-dev: | |
name: "Test upstream dev (${{ matrix.os }}, python: ${{ matrix.python }}, distributed: ${{ matrix.distributed }})" | |
runs-on: ${{ matrix.os }} | |
env: | |
CONDA_FILE: continuous_integration/environment-${{ matrix.python }}-dev.yaml | |
DASK_SQL_DISTRIBUTED_TESTS: ${{ matrix.distributed }} | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-latest, windows-latest, macos-latest] | |
python: ["3.8", "3.9", "3.10"] | |
distributed: [false] | |
include: | |
# run tests on a distributed client | |
- os: "ubuntu-latest" | |
python: "3.8" | |
distributed: true | |
- os: "ubuntu-latest" | |
python: "3.10" | |
distributed: true | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 # Fetch all history for all branches and tags. | |
- name: Set up Python | |
uses: conda-incubator/[email protected] | |
with: | |
miniforge-variant: Mambaforge | |
use-mamba: true | |
python-version: ${{ matrix.python }} | |
channel-priority: strict | |
activate-environment: dask-sql | |
environment-file: ${{ env.CONDA_FILE }} | |
- name: Optionally update upstream cargo dependencies | |
if: env.which_upstream == 'DataFusion' | |
run: | | |
bash continuous_integration/scripts/update-dependencies.sh | |
- name: Build the Rust DataFusion bindings | |
run: | | |
maturin develop | |
- name: Install hive testing dependencies | |
if: matrix.os == 'ubuntu-latest' | |
run: | | |
mamba install -c conda-forge "sasl>=0.3.1" | |
docker pull bde2020/hive:2.3.2-postgresql-metastore | |
docker pull bde2020/hive-metastore-postgresql:2.3.0 | |
- name: Install upstream dev Dask | |
if: env.which_upstream == 'Dask' | |
run: | | |
mamba install --no-channel-priority dask/label/dev::dask | |
- name: Install pytest-reportlog | |
run: | | |
# TODO: add pytest-reportlog to testing environments if we move over to JSONL output | |
mamba install pytest-reportlog | |
- name: Test with pytest | |
id: run_tests | |
run: | | |
pytest --report-log test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl --cov-report=xml -n auto tests --dist loadfile | |
- name: Upload pytest results for failure | |
if: | | |
always() | |
&& steps.run_tests.outcome != 'skipped' | |
uses: actions/upload-artifact@v3 | |
with: | |
name: test-${{ matrix.os }}-py${{ matrix.python }}-results | |
path: test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl | |
import-dev: | |
name: "Test importing with bare requirements and upstream dev" | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up Python | |
uses: conda-incubator/[email protected] | |
with: | |
miniforge-variant: Mambaforge | |
use-mamba: true | |
# TODO: drop support for python 3.8, add support for python 3.11 | |
# https://github.com/dask-contrib/dask-sql/pull/1143 | |
python-version: "3.9" | |
channel-priority: strict | |
- name: Install Protoc | |
uses: arduino/setup-protoc@v1 | |
with: | |
version: '3.x' | |
repo-token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Optionally update upstream cargo dependencies | |
if: env.which_upstream == 'DataFusion' | |
env: | |
UPDATE_ALL_CARGO_DEPS: false | |
run: | | |
bash continuous_integration/scripts/update-dependencies.sh | |
- name: Install dependencies and nothing else | |
run: | | |
pip install -e . -vv | |
which python | |
pip list | |
mamba list | |
- name: Install upstream dev Dask | |
if: env.which_upstream == 'Dask' | |
run: | | |
python -m pip install git+https://github.com/dask/dask | |
python -m pip install git+https://github.com/dask/distributed | |
- name: Try to import dask-sql | |
run: | | |
python -c "import dask_sql; print('ok')" | |
report-failures: | |
name: Open issue for upstream dev failures | |
needs: [test-dev, import-dev] | |
if: | | |
always() | |
&& ( | |
needs.test-dev.result == 'failure' | |
|| needs.import-dev.result == 'failure' | |
) | |
&& github.repository == 'dask-contrib/dask-sql' | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/download-artifact@v3 | |
- name: Prepare logs & issue label | |
run: | | |
# TODO: remove this if xarray-contrib/issue-from-pytest-log no longer needs a log-path | |
if [ -f test-ubuntu-latest-py3.10-results/test-ubuntu-latest-py3.10-results.jsonl ]; then | |
cp test-ubuntu-latest-py3.10-results/test-ubuntu-latest-py3.10-results.jsonl results.jsonl | |
else | |
touch results.jsonl | |
fi | |
# convert which_upstream to lowercase | |
echo "which_upstream_lower=${which_upstream,,}" >> $GITHUB_ENV | |
- name: Open or update issue on failure | |
uses: xarray-contrib/[email protected] | |
with: | |
log-path: results.jsonl | |
issue-title: ⚠️ Upstream CI ${{ env.which_upstream }} failed ⚠️ | |
issue-label: upstream-${{ env.which_upstream_lower }} |