Skip to content

[ENH] start bidsifying output #155

[ENH] start bidsifying output

[ENH] start bidsifying output #155

Workflow file for this run

---
on:
push:
branches:
- main
- maint/*
tags:
- '*'
pull_request:
branches:
- main
- maint/*
defaults:
run:
shell: bash
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
check_skip_flags:
name: Check skip flags
runs-on: ubuntu-latest
outputs:
head-commit-message: ${{ steps.get_head_commit_message.outputs.headCommitMsg }}
steps:
- name: Get repo
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Print head git commit message
id: get_head_commit_message
run: echo "headCommitMsg=$(git show -s --format=%s)" >> $GITHUB_OUTPUT
download-test-data:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
- name: install tox
run: pip install tox
- uses: actions/cache@v3
id: cache
env:
cache-name: ds000017
with:
path: /home/runner/work/giga_connectome/giga_connectome/giga_connectome/data/test_data
key: ${{ env.cache-name }}
- if: ${{ steps.cache.outputs.cache-hit != 'true' }}
name: Download fmriprep derivative of ds000017
id: download
run: tox -e test_data
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: 3
- run: pip install --upgrade build twine
- name: Build sdist and wheel
run: python -m build
- run: twine check dist/*
- name: Upload sdist and wheel artifacts
uses: actions/upload-artifact@v3
with:
name: dist
path: dist/
- name: Build git archive
run: mkdir archive && git archive -v -o archive/archive.tgz HEAD
- name: Upload git archive artifact
uses: actions/upload-artifact@v3
with:
name: archive
path: archive/
test-package:
runs-on: ubuntu-latest
needs: [build]
strategy:
matrix:
package: [wheel, sdist, archive]
steps:
- name: Download sdist and wheel artifacts
if: matrix.package != 'archive'
uses: actions/download-artifact@v3
with:
name: dist
path: dist/
- name: Download git archive artifact
if: matrix.package == 'archive'
uses: actions/download-artifact@v3
with:
name: archive
path: archive/
- uses: actions/setup-python@v5
with:
python-version: 3
- name: Display Python version
run: python -c "import sys; print(sys.version)"
- name: Update pip
run: pip install --upgrade pip
- name: Install wheel
if: matrix.package == 'wheel'
run: pip install dist/*.whl
- name: Install sdist
if: matrix.package == 'sdist'
run: pip install dist/*.tar.gz
- name: Install archive
if: matrix.package == 'archive'
run: pip install archive/archive.tgz
test-coverage:
runs-on: ubuntu-latest
needs: [build, download-test-data, check_skip_flags]
strategy:
matrix:
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
steps:
- uses: actions/checkout@v4
name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Restore cached data ds000017
id: cache
uses: actions/cache/restore@v3
with:
path: /home/runner/work/giga_connectome/giga_connectome/giga_connectome/data/test_data
key: ds000017
- name: Install build dependencies
run: python -m pip install --upgrade pip
- name: Install task package
run: pip install -e .[test]
- name: Test with pytest - ignore smoke test.
if: ${{ !contains(needs.check_skip_flags.outputs.head-commit-message, 'full_test') }}
run: |
pytest -m "not smoke" --cov=giga_connectome --cov-report=xml --doctest-modules -v --pyargs giga_connectome
- name: Full test - run all the test to generate accurate coverage report.
if: ${{ contains(needs.check_skip_flags.outputs.head-commit-message, 'full_test') }} || ${{ github.event.pull_request.merged }}
run: pytest --cov=giga_connectome --cov-report=xml --doctest-modules -v --pyargs giga_connectome
- uses: codecov/codecov-action@v3
if: ${{ always() }}
with:
token: ${{ secrets.CODECOV_TOKEN }}
docker:
# build and test docker image
if: ${{ contains(needs.check_skip_flags.outputs.head-commit-message, 'full_test') }}
runs-on: ubuntu-latest
needs: [download-test-data, check_skip_flags]
steps:
- uses: actions/checkout@v4
- name: Build the Docker image
run: |
user_name="bids"
docker build . --file Dockerfile --tag ${user_name}/giga_connectome
mkdir -p "${HOME}/docker"
docker save "${user_name}/giga_connectome" > "${HOME}/docker/image.tar"
- name: Restore cached data ds000017
id: cache
uses: actions/cache/restore@v3
with:
path: /home/runner/work/giga_connectome/giga_connectome/giga_connectome/data/test_data
key: ds000017
- name: Test the Docker image
run: |
docker load -i ${HOME}/docker/image.tar
docker run --rm --read-only \
-v /home/runner/work/giga_connectome/giga_connectome/giga_connectome/data/test_data:/test_data \
-v ./outputs:/outputs \
-v ./outputs/working_dir:/work \
bids/giga_connectome \
/test_data/ds000017-fmriprep22.0.1-downsampled-nosurface \
/outputs \
participant \
-w /work \
--participant_label 1 \
--reindex-bids
- name: Upload output artifact
uses: actions/upload-artifact@v3
with:
name: connectome
path: ./outputs/
- name: Upload docker image artifact
uses: actions/upload-artifact@v3
with:
name: docker-image
path: ${HOME}/docker/image.tar
# publish:
# runs-on: ubuntu-latest
# needs: [test-package, test-coverage]
# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
# steps:
# - uses: actions/download-artifact@v3
# with:
# name: dist
# path: dist/
# - uses: pypa/gh-action-pypi-publish@release/v1
# with:
# user: __token__
# password: ${{ secrets.PYPI_API_TOKEN }}