Skip to content

Commit

Permalink
Merge branch 'main' into evaluation
Browse files Browse the repository at this point in the history
  • Loading branch information
cmdoret authored Oct 25, 2024
2 parents 5d0ac48 + 8052c5e commit 8b3564a
Show file tree
Hide file tree
Showing 12 changed files with 261 additions and 175 deletions.
83 changes: 83 additions & 0 deletions .github/workflows/algorithm-evaluation.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
name: Execute algorithm and evaluate metrics

on:
push:
branches: [ "main" ]
paths:
- 'algorithms/*.yaml'
- '.github/workflows/algorithm-evaluation.yml'
pull_request:
branches: [ "main" ]
paths:
- 'algorithms/*.yaml'
- '.github/workflows/algorithm-evaluation.yml'

jobs:
execute:
runs-on: ubuntu-latest
steps:
- name: Find updated algorithm
id: find-algorithm
uses: tj-actions/changed-files@v44
with:
files: algorithms/**.yaml

- uses: actions/checkout@v4

- name: Execute algorithm container
run: |
# iterate over all datasets in S3
for recording in $(aws s3api list-objects --bucket ${AWS_BUCKET} --prefix datasets --output text --query 'Contents[].[Key]' | grep '.*edf'); do
if [ ${{ steps.find-algorithm.outputs.any_changed }} == "false" ]; then
break
fi
# save each recording (dataset slice) to a temp file
aws s3 cp s3://${AWS_BUCKET}/${recording} ./data/tmp.edf
# Run inference on recording for every updated algorithm
for algo in ${{ steps.find-algorithm.outputs.all_changed_files }}; do
IMAGE=$(grep '^image: ' $algo | sed 's/^image: \+//' | tr -d \'\")
ALGO_NAME=$(echo "$IMAGE" | iconv -t ascii//TRANSLIT | sed -r s/[^a-zA-Z0-9]+/-/g | sed -r s/^-+\|-+$//g | tr A-Z a-z)
mkdir -p ./predictions
chmod -R 777 ./predictions/
echo "Running inference for $ALGO_NAME"
docker run \
-e INPUT=tmp.edf \
-e OUTPUT=tmp.tsv \
-v ./predictions:/output:rw \
-v ./data:/data:ro \
"${IMAGE}"
# Upload predictions to S3
subpath=${recording#*/}
prediction=${subpath%_eeg.edf}_events.tsv
aws s3 cp \
./predictions/tmp.tsv \
"s3://${AWS_BUCKET}/submissions/${ALGO_NAME}/${prediction}"
rm ./data/tmp.edf ./predictions/tmp.tsv
done
done
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
AWS_BUCKET: ${{secrets.AWS_BUCKET }}

evaluate:
runs-on: ubuntu-latest
needs: [execute]
container:
image: ghcr.io/${{ github.repository }}-evaluator:main
credentials:
username: ${{ github.actor }}
password: ${{ secrets.github_token }}

steps:

- name: Evaluate algorithm predictions
run: python __main__.py
82 changes: 82 additions & 0 deletions .github/workflows/build-website.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
name: Build website
on:
workflow_run:
workflows: ["Execute algorithm and evaluate metrics"]
branches: [main]
types:
- completed

workflow_dispatch:

# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write

jobs:
download:
runs-on: ubuntu-latest
steps:
- name: Download results
run: |
aws s3 cp \
s3://${AWS_BUCKET}/results/results.json \
website/data/sampleEval.json
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
AWS_BUCKET: ${{secrets.AWS_BUCKET }}

- name: upload artifact
uses: actions/upload-artifact@v4
with:
name: web-data
path: website/data/


build:
needs: download
runs-on: ubuntu-latest
container:
image: ghcr.io/${{ github.repository }}-site-builder:main
credentials:
username: ${{ github.actor }}
password: ${{ secrets.github_token }}
steps:

- name: Checkout repository
uses: actions/checkout@v4

- name: download artifact
uses: actions/download-artifact@v4
with:
name: web-data
path: website/data/

- name: Build website
run: python script.py


deploy:
needs: build
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup Pages
uses: actions/configure-pages@v5

- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: 'website/public'

- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
2 changes: 1 addition & 1 deletion .github/workflows/pr-check.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: PR checks fon algorithm submission
name: PR checks on algorithm submission

on:
pull_request:
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ __pycache__/
*$py.class
.DS_Store
*.csv
website/data/*
# C extensions
*.so

Expand Down Expand Up @@ -153,4 +154,4 @@ dmypy.json
.pytype/

# Cython debug symbols
cython_debug/
cython_debug/
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ flowchart LR
subgraph S3
EDF[edf]
TSVr[TSV ref]
TSVh[TSV s3hyp]
TSVh[TSV hyp]
end
subgraph Github Actions CI
Expand Down
2 changes: 1 addition & 1 deletion algorithms/gotman.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ date-released: "1982-01-01"
abstract: >
During prolonged EEG monitoring of epileptic patients, the continuous EEG
tracing may be replaced by a selective recording of ictal and interictal
epileptic activity. We have described previously methods for the EEG
epileptic activity. We have previously described methods for the EEG
recording of seizures with overt clinical manifestations and for the automatic
detection of spikes. This paper describes a method for the automatic detection
of seizures in the EEG, independently of the presence of clinical signs; it is
Expand Down
26 changes: 5 additions & 21 deletions config/template.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@
ARG PYTHON_VERSION=3.12
FROM python:${PYTHON_VERSION}-slim as base

# Read aws creds
ENV AWS_ACCESS_KEY
ENV AWS_SECRET_KEY
ENV AWS_BUCKET

# Prevents Python from writing pyc files.
ENV PYTHONDONTWRITEBYTECODE=1
# Keeps Python from buffering stdout and stderr to avoid situations where
Expand All @@ -30,40 +25,29 @@ RUN adduser \
# Install S3 dependencies
RUN apt-get update -y && \
apt-get install -y \
s3fs \
libfuse-dev \
libcurl4-openssl-dev \
libxml2-dev \
libssl-dev \
mime-support \
automake \
libtool \
wget \
tar \
git \
unzip &&
apt-get clean

RUN pip --no-cache-dir install --upgrade awscli
RUN mkdir -p /mnt/s3


# Download dependencies as a separate step to take advantage of Docker's caching.
# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds.
# Leverage a bind mount to requirements.txt to avoid having to copy them into
# into this layer.
RUN --mount=type=cache,target=/root/.cache/pip \
--mount=type=bind,source=algo/,target=algo/ \
python -m pip install ./algo
python -m pip install numpy # <-- install your algorithm

# Switch to the non-privileged user to run the application.
USER appuser

VOLUME ["/data"]
VOLUME ["/output"]

# Define input / output files
ENV INPUT_FILE=""
ENV OUTPUT_FILE=""
ENV INPUT=""
ENV OUTPUT=""
# Run the application
# NOTE: edit the second command
CMD s3fs ${AWS_BUCKET} /mnt/s3; python3 -m algo "/mnt/s3/input/${INPUT_FILE}" "/output/${OUTPUT_FILE}"
CMD python3 -m algo "/data/${INPUT_FILE}" "/output/${OUTPUT_FILE}"
70 changes: 0 additions & 70 deletions website/data/sampleEval.json

This file was deleted.

Loading

0 comments on commit 8b3564a

Please sign in to comment.