Add EventNet #4
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Execute algorithm and evaluate metrics | |
on: | |
push: | |
branches: [ "main" ] | |
paths: | |
- 'algorithms/*.yaml' | |
- '.github/workflows/2-algorithm-evaluation.yml' | |
jobs: | |
execute: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 | |
- name: Configure Git Safe Directory | |
run: git config --global --add safe.directory /__w/szcore/szcore | |
- name: Find updated algorithm | |
id: find-algorithm | |
uses: tj-actions/changed-files@v45 | |
with: | |
files: algorithms/**.yaml | |
- name: Execute algorithm container | |
if: steps.find-algorithm.outputs.any_changed == 'true' | |
run: | | |
# iterate over all datasets in S3 | |
for recording in $(aws s3api list-objects --bucket ${AWS_BUCKET} --prefix datasets --output text --query 'Contents[].[Key]' | grep '.*edf'); do | |
# save each recording (dataset slice) to a temp file | |
aws s3 cp s3://${AWS_BUCKET}/${recording} ./data/tmp.edf | |
# Run inference on recording for every updated algorithm | |
for algo in ${{ steps.find-algorithm.outputs.all_changed_files }}; do | |
IMAGE=$(grep '^image: ' $algo | sed 's/^image: \+//' | tr -d \'\") | |
ALGO_NAME=$(echo "$IMAGE" | iconv -t ascii//TRANSLIT | sed -r s/[^a-zA-Z0-9]+/-/g | sed -r s/^-+\|-+$//g | tr A-Z a-z) | |
# Create prediction directory and set permissions for the output file. | |
mkdir -p ./predictions | |
touch ./predictions/tmp.tsv | |
chmod 776 ./predictions/tmp.tsv | |
echo "Running inference for $ALGO_NAME" | |
docker run \ | |
-e INPUT=tmp.edf \ | |
-e OUTPUT=tmp.tsv \ | |
-v ./predictions:/output:rw \ | |
-v ./data:/data:ro \ | |
--network none \ | |
--name "${ALGO_NAME}" \ | |
"${IMAGE}" | |
# Upload predictions to S3 | |
subpath=${recording#*/} | |
prediction=${subpath%_eeg.edf}_events.tsv | |
aws s3 cp \ | |
./predictions/tmp.tsv \ | |
"s3://${AWS_BUCKET}/submissions/${ALGO_NAME}/${prediction}" | |
# Cleanup | |
rm ./predictions/tmp.tsv | |
docker rm "${ALGO_NAME}" | |
done | |
# Cleanup | |
rm ./data/tmp.edf | |
done | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }} | |
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }} | |
AWS_BUCKET: ${{secrets.AWS_BUCKET }} | |
evaluate: | |
runs-on: ubuntu-latest | |
needs: [execute] | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 | |
- name: Configure Git Safe Directory | |
run: git config --global --add safe.directory /__w/szcore/szcore | |
- name: Find updated algorithm | |
id: find-algorithm | |
uses: tj-actions/changed-files@v45 | |
with: | |
files: algorithms/**.yaml | |
- name: Log in to GitHub Container Registry | |
run: | | |
echo "${{ secrets.github_token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin | |
- name: Evaluate algorithm predictions | |
run: | | |
mkdir ./submissions | |
mkdir ./results | |
# Download reference annotations | |
if [ ${{ steps.find-algorithm.outputs.any_changed }} == "true" ]; then | |
aws s3 sync s3://${AWS_BUCKET}/datasets/ ./datasets --exclude "*" --include "*.tsv" | |
fi | |
# Run evaluation on every updated algorithm | |
for algo in ${{ steps.find-algorithm.outputs.all_changed_files }}; do | |
IMAGE=$(grep '^image: ' $algo | sed 's/^image: \+//' | tr -d \'\") | |
ALGO_NAME=$(echo "$IMAGE" | iconv -t ascii//TRANSLIT | sed -r s/[^a-zA-Z0-9]+/-/g | sed -r s/^-+\|-+$//g | tr A-Z a-z) | |
# Download algorithm hypothesis annotations | |
aws s3 sync s3://${AWS_BUCKET}/submissions/${ALGO_NAME}/ ./submissions/${ALGO_NAME} | |
mkdir ./results | |
for dataset in ./datasets/*; do | |
# Create results file | |
touch ./results/${dataset##*/}.json | |
chmod 776 ./results/${dataset##*/}.json | |
# Run evaluation | |
docker run \ | |
-e OUT="${dataset##*/}.json" \ | |
-v ./datasets/${dataset##*/}:/ref:ro \ | |
-v ./submissions/${ALGO_NAME}/${dataset##*/}:/hyp:ro \ | |
-v ./results:/results:rw \ | |
ghcr.io/${{ github.repository }}-evaluator:main | |
# Upload result file | |
aws s3 cp ./results/${dataset##*/}.json s3://${AWS_BUCKET}/results/${ALGO_NAME}/${dataset##*/}.json | |
done | |
done | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }} | |
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }} | |
AWS_BUCKET: ${{secrets.AWS_BUCKET }} |