Skip to content

Commit

Permalink
ci: execution logic
Browse files Browse the repository at this point in the history
  • Loading branch information
cmdoret committed Oct 25, 2024
1 parent c93110b commit beb2e84
Showing 1 changed file with 12 additions and 15 deletions.
27 changes: 12 additions & 15 deletions .github/workflows/algorithm-evaluation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,26 +24,31 @@ jobs:
for recording in $(aws s3api list-objects --bucket ${AWS_BUCKET} --prefix datasets --output text --query 'Contents[].[Key]' | grep '.*edf'); do
# save each recording (dataset slice) to a temp file
aws s3 cp s3://${AWS_BUCKET}/${recording} /data/tmp.edf
aws s3 cp s3://${AWS_BUCKET}/${recording} ./data/tmp.edf
# Run inference on recording for every updated algorithm
for algo in ${{ steps.find-algorithm.outputs.all_changed_files }}; do
IMAGE=$(grep '^image: ' $algo | sed 's/^image: \+//' | tr -d \'\")
ALGO_NAME=$(echo "$IMAGE" | iconv -t ascii//TRANSLIT | sed -r s/[^a-zA-Z0-9]+/-/g | sed -r s/^-+\|-+$//g | tr A-Z a-z
mkdir -p ./predictions
chmod -R 777 ./predictions/
docker run \
-e INPUT=tmp.edf
-o OUTPUT=tmp.tsv
-v /predictions:/output:rw \
-v /data:/data:ro \
-e OUTPUT=tmp.tsv
-v ./predictions:/output:rw \
-v ./data:/data:ro \
"${IMAGE}"
# Upload predictions to S3
subpath=${recording#*/}
prediction=${subpath%_eeg.edf}_events.tsv
aws s3 cp \
/predictions/tmp.tsv \
"s3://${AWS_BUCKET}/submissions/${algo_name}/${prediction}"
rm /data/tmp.edf /predictions/tmp.tsv
./predictions/tmp.tsv \
"s3://${AWS_BUCKET}/submissions/${ALGO_NAME}/${prediction}"
rm ./data/tmp.edf ./predictions/tmp.tsv
done
done
env:
Expand All @@ -64,11 +69,3 @@ jobs:

- name: Evaluate algorithm predictions
run: python __main__.py



upload:
runs-on: ubuntu-latest
steps:
- name: upload to s3

0 comments on commit beb2e84

Please sign in to comment.