Skip to content

rename resampling option in xarrayReader #1182

rename resampling option in xarrayReader

rename resampling option in xarrayReader #1182

Workflow file for this run

name: CI
# On every pull request, but only on push to main
on:
push:
branches:
- main
tags:
- '*'
pull_request:
env:
LATEST_PY_VERSION: '3.10'
jobs:
tests:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- '3.8'
- '3.9'
- '3.10'
- '3.11'
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e ".[test]"
- name: Run pre-commit
if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
run: |
python -m pip install pre-commit
pre-commit run --all-files
- name: Run tests
run: python -m pytest --cov rio_tiler --cov-report xml --cov-report term-missing -s -vv
- name: Upload Results
if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
uses: codecov/codecov-action@v1
with:
file: ./coverage.xml
flags: unittests
name: ${{ matrix.python-version }}
fail_ci_if_error: false
benchmark:
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e ".[benchmark]"
- name: Run Benchmark
run: python -m pytest tests/benchmarks/benchmarks.py --benchmark-only --benchmark-columns 'min, max, mean, median' --benchmark-sort 'min' --benchmark-json output.json
- name: Store and Compare benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: rio-tiler Benchmarks
tool: 'pytest'
output-file-path: output.json
alert-threshold: '150%'
comment-on-alert: true
fail-on-alert: false
# GitHub API token to make a commit comment
github-token: ${{ secrets.GITHUB_TOKEN }}
gh-pages-branch: 'gh-benchmarks'
# Make a commit on `gh-pages` only if main
auto-push: ${{ github.ref == 'refs/heads/main' }}
benchmark-data-dir-path: dev/benchmarks
benchmark-requests:
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e ".[tilebench]"
- name: Run Benchmark
run: python -m pytest tests/benchmarks/requests.py -s -vv
publish:
needs: [tests]
runs-on: ubuntu-latest
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: ${{ env.LATEST_PY_VERSION }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install hatch
python -m hatch build
- name: Set tag version
id: tag
run: |
echo "version=${GITHUB_REF#refs/*/}"
echo "version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
- name: Set module version
id: module
run: |
hatch --quiet version
echo "version=$(hatch --quiet version)" >> $GITHUB_OUTPUT
- name: Build and publish
if: ${{ steps.tag.outputs.version }} == ${{ steps.module.outputs.version}}
env:
HATCH_INDEX_USER: ${{ secrets.PYPI_USERNAME }}
HATCH_INDEX_AUTH: ${{ secrets.PYPI_PASSWORD }}
run: |
python -m hatch publish