diff --git a/.github/workflows/docker-publish-algo-checks.yml b/.github/workflows/docker-publish-algo-checks.yml new file mode 100644 index 0000000..1021d75 --- /dev/null +++ b/.github/workflows/docker-publish-algo-checks.yml @@ -0,0 +1,97 @@ +name: Publish algorithm checker container + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + push: + branches: [ "main" ] + paths: "config/algo-checks.Dockerfile" + pull_request: + branches: [ "main" ] + paths: "config/algo-checks.Dockerfile" + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }}-algo-checks + +jobs: + build: + + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + # This is used to complete the identity challenge + # with sigstore/fulcio when running outside of PRs. + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Install the cosign tool except on PR + # https://github.com/sigstore/cosign-installer + - name: Install cosign + if: github.event_name != 'pull_request' + uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 #v3.5.0 + with: + cosign-release: 'v2.2.4' + + # Set up BuildKit Docker container builder to be able to build + # multi-platform images and export cache + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + with: + context: ./evaluation + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + file: config/algo-checks.Dockerfile + + # Sign the resulting Docker image digest except on PRs. + # This will only write to the public Rekor transparency log when the Docker + # repository is public to avoid leaking data. If you would like to publish + # transparency data even for private images, pass --force to cosign below. + # https://github.com/sigstore/cosign + - name: Sign the published Docker image + if: ${{ github.event_name != 'pull_request' }} + env: + # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable + TAGS: ${{ steps.meta.outputs.tags }} + DIGEST: ${{ steps.build-and-push.outputs.digest }} + # This step uses the identity token to provision an ephemeral certificate + # against the sigstore community Fulcio instance. + run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} + diff --git a/.github/workflows/docker-publish-evaluator.yml b/.github/workflows/docker-publish-evaluator.yml new file mode 100644 index 0000000..eb70013 --- /dev/null +++ b/.github/workflows/docker-publish-evaluator.yml @@ -0,0 +1,98 @@ +name: Publish evaluator container + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + push: + branches: [ "main" ] + paths: "evaluation/*" + pull_request: + branches: [ "main" ] + paths: "evaluation/*" + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }}-evaluator + + +jobs: + build: + + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + # This is used to complete the identity challenge + # with sigstore/fulcio when running outside of PRs. + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Install the cosign tool except on PR + # https://github.com/sigstore/cosign-installer + - name: Install cosign + if: github.event_name != 'pull_request' + uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 #v3.5.0 + with: + cosign-release: 'v2.2.4' + + # Set up BuildKit Docker container builder to be able to build + # multi-platform images and export cache + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + with: + context: ./evaluation + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + file: evaluation/Dockerfile + + # Sign the resulting Docker image digest except on PRs. + # This will only write to the public Rekor transparency log when the Docker + # repository is public to avoid leaking data. If you would like to publish + # transparency data even for private images, pass --force to cosign below. + # https://github.com/sigstore/cosign + - name: Sign the published Docker image + if: ${{ github.event_name != 'pull_request' }} + env: + # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable + TAGS: ${{ steps.meta.outputs.tags }} + DIGEST: ${{ steps.build-and-push.outputs.digest }} + # This step uses the identity token to provision an ephemeral certificate + # against the sigstore community Fulcio instance. + run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} + diff --git a/.github/workflows/docker-publish-site-builder.yml b/.github/workflows/docker-publish-site-builder.yml new file mode 100644 index 0000000..7506051 --- /dev/null +++ b/.github/workflows/docker-publish-site-builder.yml @@ -0,0 +1,98 @@ +name: Publish website builder container + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + push: + branches: [ "main" ] + paths: "website/*" + pull_request: + branches: [ "main" ] + paths: "website/*" + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }}-site-builder + + +jobs: + build: + + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + # This is used to complete the identity challenge + # with sigstore/fulcio when running outside of PRs. + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Install the cosign tool except on PR + # https://github.com/sigstore/cosign-installer + - name: Install cosign + if: github.event_name != 'pull_request' + uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 #v3.5.0 + with: + cosign-release: 'v2.2.4' + + # Set up BuildKit Docker container builder to be able to build + # multi-platform images and export cache + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + with: + context: ./website + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + file: website/Dockerfile + + # Sign the resulting Docker image digest except on PRs. + # This will only write to the public Rekor transparency log when the Docker + # repository is public to avoid leaking data. If you would like to publish + # transparency data even for private images, pass --force to cosign below. + # https://github.com/sigstore/cosign + - name: Sign the published Docker image + if: ${{ github.event_name != 'pull_request' }} + env: + # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable + TAGS: ${{ steps.meta.outputs.tags }} + DIGEST: ${{ steps.build-and-push.outputs.digest }} + # This step uses the identity token to provision an ephemeral certificate + # against the sigstore community Fulcio instance. + run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} + diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml new file mode 100644 index 0000000..b7f4908 --- /dev/null +++ b/.github/workflows/pr-check.yml @@ -0,0 +1,65 @@ +name: PR checks fon algorithm submission + +on: + pull_request: + paths: + - 'algorithms/*.yaml' + - '.github/workflows/pr-check.yml' + push: + paths: + - 'algorithms/*.yaml' + - '.github/workflows/pr-check.yml' + +jobs: + + check_yaml: + runs-on: ubuntu-latest + container: + image: ghcr.io/${{ github.repository }}-algo-checks:main + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + + - uses: actions/checkout@v4 + + - name: find algorithm files + run: | + { + echo 'FILELIST<> "$GITHUB_ENV" + + - name: Convert and validate using jsonschema + run: | + for algo in ${FILELIST}; do + yq -o json $algo > algo.json \ + && jsonschema-cli -i algo.json "config/schema.json" + done + rm algo.json + + check_image: + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v4 + + - name: find algorithm files + run: | + { + echo 'FILELIST<> "$GITHUB_ENV" + + - name: Pull image + run: | + for algo in ${FILELIST}; do + IMAGE=$(grep '^image: ' $algo | sed 's/^image: \+//' | tr -d \'\") + if ! docker pull $IMAGE ; then + echo "Cannot pull image: $IMAGE" + exit 1 + fi + done + diff --git a/.gitignore b/.gitignore index e9f77ab..a1ca18a 100644 --- a/.gitignore +++ b/.gitignore @@ -153,4 +153,4 @@ dmypy.json .pytype/ # Cython debug symbols -cython_debug/ +cython_debug/ \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..6a44f3e --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 ORD for the Sciences Hackathon, Team "zinalrothorn" + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index e69de29..8d68eaf 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,43 @@ +# SZcore + +## Background + +This repository hosts an open seizure detection benchmarking platform. +The aim is to provide an open source platform for the community to submit their seizure detection algorithms and provide automatic benchmark results on various high quality datasets. + +## Usage + +Users can submit a containerized algorithm by opening a PR adding a yaml file in the `algorithms` directory. The yaml file should describe their algorithm following our schema (see example (LINK)) and point to a publicly available docker image based on our template (see config/template.Dockerfile). + +Once the PR is merged, this image is used to execute the algorithm on github actions, compute performance metrics and update a static website hosted on github pages. + +## Data flow + +```mermaid +flowchart LR + + subgraph S3 + EDF[edf] + TSVr[TSV ref] + TSVh[TSV s3hyp] + end + + subgraph Github Actions CI + EDF --> ALG{algo} + C[yaml] --> ALG + ALG --> TSVg[TSV gha] + TSVg --> CIu{CI upload} + TSVh --> F{eval} + F --> G[json] + G --> H{site builder} + CIu --> TSVh + TSVr --> F + end + subgraph Github pages + H --> I[html] + end +``` + +## Acknowledgement + +This project was developed as part of the [ORD for the Sciences hackathon](https://sdsc-hackathons.ch/) organized by [EPFL Open Science](https://www.epfl.ch/research/open-science/) and [SDSC](http://datascience.ch/) by the team "zinalrothorn", composed of [@EishaMazhar](https://github.com/EishaMazhar), [@danjjl](https://github.com/danjjl), [@esthertsw](https://github.com/esthertsw) and [@cmdoret](https://github.com/cmdoret) diff --git a/algorithms/gotman.yaml b/algorithms/gotman.yaml new file mode 100644 index 0000000..c529c12 --- /dev/null +++ b/algorithms/gotman.yaml @@ -0,0 +1,44 @@ +--- +# Description of a seizure detection algorithm +title: "Gotman - Automatic recognition of epileptic seizures in the EEG (1982)" +image: "ghcr.io/esl-epfl/gotman_1982:latest" +authors: + - family-names: Dan + given-names: Jonathan + orcid: "https://orcid.org/0000-0002-2338-572X" + - family-names: Samanos + given-names: Clément +version: 0.1 +date-released: "1982-01-01" +abstract: > + During prolonged EEG monitoring of epileptic patients, the continuous EEG + tracing may be replaced by a selective recording of ictal and interictal + epileptic activity. We have described previously methods for the EEG + recording of seizures with overt clinical manifestations and for the automatic + detection of spikes. This paper describes a method for the automatic detection + of seizures in the EEG, independently of the presence of clinical signs; it is + based on the decomposition of the EEG into elementary waves and the detection + of paroxysmal bursts of rhythmic activity having a frequency between 3 and 20 + c/sec. Simple procedures are used to measure the amplitude of waves relative + to the background, their duration and rhythmicity. The evaluation of the + method on 24 surface recordings (average duration 12.4 h) and 44 recordings + from intracerebral electrodes (average duration 18.7 h) indicated that it was + capable of recognizing numerous types of seizures. False detections due to + non-epileptiform rhythmic EEG bursts and to artefacts were quite frequent but + were not a serious problem because they did not unduly lengthen the EEG + tracing and they could be easily identified by the electroencephalographer. + The program can perform on-line and simultaneously the automatic recognition + of spikes and of seizures in 16 channels." +license: GPL-3.0 +repository: https://github.com/esl-epfl/gotman_1982 + +# List all datasets that were used to train this algorithm +Dataset: + - title: "Gotman 1982" + license: "https://doi.org/10.1016/0013-4694(82)90038-4" + identifiers: + - description: > + Private dataset of 24 scalp-EEG recordings with an average duration + of 12.4 h and 44 intracerebral recordings with an average duration of 18.7h. + type: doi + value: "10.5281/zenodo.123456" diff --git a/config/algo-checks.Dockerfile b/config/algo-checks.Dockerfile new file mode 100644 index 0000000..1541b3f --- /dev/null +++ b/config/algo-checks.Dockerfile @@ -0,0 +1,15 @@ +# Dockerfile +# Provides dependencies to verify algorithm definition files +# and validate them against schema + +FROM alpine:3.20 + +# install dependencies +RUN apk add --no-cache \ + curl \ + cargo \ + yq + +ENV PATH="${PATH}:/root/.cargo/bin" + +RUN cargo install jsonschema-cli diff --git a/config/schema.json b/config/schema.json new file mode 100644 index 0000000..7a2fd71 --- /dev/null +++ b/config/schema.json @@ -0,0 +1,839 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "definitions": { + "dataset": { + "description": "A dataset used for training the algorithm.", + "properties": { + "title": { + "description": "The name of the dataset.", + "minLength": 1, + "type": "string" + }, + "license": { + "$ref": "#/definitions/license" + }, + "identifiers": { + "description": "The identifiers of dataset.", + "items": { + "$ref": "#/definitions/identifier" + }, + "minItems": 1, + "type": "array", + "uniqueItems": true + } + }, + "required": [ + "title" + ], + "type": "object" + }, + "date": { + "$comment": "Note to tool implementers: it is necessary to cast YAML 'date' objects to string objects when validating against this schema.", + "examples": [ + "1900-01-01", + "2020-12-31" + ], + "format": "date", + "pattern": "^[0-9]{4}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])$", + "type": "string" + }, + "doi": { + "description": "The DOI of the work (i.e., 10.5281/zenodo.1003150, not the resolver URL http://doi.org/10.5281/zenodo.1003150).", + "examples": [ + "10.5281/zenodo.1003150" + ], + "pattern": "^10\\.\\d{4,9}(\\.\\d+)?/[A-Za-z0-9:/_;\\-\\.\\(\\)\\[\\]\\\\]+$", + "type": "string" + }, + "email": { + "description": "An email address.", + "pattern": "^[\\S]+@[\\S]+\\.[\\S]{2,}$", + "type": "string" + }, + "entity": { + "additionalProperties": false, + "description": "An entity, i.e., an institution, team, research group, company, conference, etc., as opposed to a single natural person.", + "properties": { + "date-end": { + "$ref": "#/definitions/date", + "description": "The entity's ending date, e.g., when the entity is a conference." + }, + "date-start": { + "$ref": "#/definitions/date", + "description": "The entity's starting date, e.g., when the entity is a conference." + }, + "email": { + "$ref": "#/definitions/email", + "description": "The entity's email address." + }, + "location": { + "description": "The entity's location, e.g., when the entity is a conference.", + "minLength": 1, + "type": "string" + }, + "name": { + "description": "The entity's name.", + "minLength": 1, + "type": "string" + }, + "orcid": { + "$ref": "#/definitions/orcid", + "description": "The entity's orcid." + }, + "website": { + "$ref": "#/definitions/url", + "description": "The entity's website." + } + }, + "required": [ + "name" + ], + "type": "object" + }, + "identifier": { + "anyOf": [ + { + "additionalProperties": false, + "properties": { + "description": { + "$ref": "#/definitions/identifier-description" + }, + "type": { + "enum": [ + "doi" + ], + "type": "string" + }, + "value": { + "$ref": "#/definitions/doi" + } + }, + "required": [ + "type", + "value" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "$ref": "#/definitions/identifier-description" + }, + "type": { + "enum": [ + "url" + ], + "type": "string" + }, + "value": { + "$ref": "#/definitions/url" + } + }, + "required": [ + "type", + "value" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "$ref": "#/definitions/identifier-description" + }, + "type": { + "enum": [ + "other" + ], + "type": "string" + }, + "value": { + "minLength": 1, + "type": "string" + } + }, + "required": [ + "type", + "value" + ], + "type": "object" + } + ], + "description": "An identifier for a work." + }, + "identifier-description": { + "description": "A description for a specific identifier value.", + "examples": [ + "The version DOI for this version, which has a relation childOf with the concept DOI specified in the doi field in the root of this file.", + "The identifier provided by Archival Repository, which points to this version of the software." + ], + "minLength": 1, + "type": "string" + }, + "image": { + "pattern": "^.+/.+:.+$", + "type": "string" + }, + "license": { + "description": "An SPDX license identifier or a custom license URL.", + "oneOf": [ + { + "$ref": "#/definitions/license-enum", + "examples": [ + "Apache-2.0", + "MIT" + ] + }, + { + "$comment": "When there are multiple licenses, it is assumed their relationship is OR, not AND", + "examples": [ + [ + "Apache-2.0", + "MIT" + ], + [ + "GPL-3.0", + "GPL-3.0-or-later" + ] + ], + "items": { + "$ref": "#/definitions/license-enum" + }, + "minItems": 1, + "type": "array", + "uniqueItems": true + }, + { + "description": "The URL of the license text under which the software or dataset is licensed (only for non-standard licenses not included in the SPDX License List).", + "$ref": "#/definitions/url" + } + ] + }, + "license-enum": { + "$comment": "Use https://github.com/citation-file-format/get-spdx-licenses to update this enum in the future", + "description": "SPDX license list; releaseDate=2021-05-14; source=https://raw.githubusercontent.com/spdx/license-list-data/master/json/licenses.json", + "enum": [ + "0BSD", + "AAL", + "Abstyles", + "Adobe-2006", + "Adobe-Glyph", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "Afmparse", + "AGPL-1.0", + "AGPL-1.0-only", + "AGPL-1.0-or-later", + "AGPL-3.0", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "Aladdin", + "AMDPLPA", + "AML", + "AMPAS", + "ANTLR-PD", + "ANTLR-PD-fallback", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "APAFML", + "APL-1.0", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "Artistic-1.0", + "Artistic-1.0-cl8", + "Artistic-1.0-Perl", + "Artistic-2.0", + "Bahyph", + "Barr", + "Beerware", + "BitTorrent-1.0", + "BitTorrent-1.1", + "blessing", + "BlueOak-1.0.0", + "Borceux", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-2-Clause-FreeBSD", + "BSD-2-Clause-NetBSD", + "BSD-2-Clause-Patent", + "BSD-2-Clause-Views", + "BSD-3-Clause", + "BSD-3-Clause-Attribution", + "BSD-3-Clause-Clear", + "BSD-3-Clause-LBNL", + "BSD-3-Clause-Modification", + "BSD-3-Clause-No-Nuclear-License", + "BSD-3-Clause-No-Nuclear-License-2014", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause-Open-MPI", + "BSD-4-Clause", + "BSD-4-Clause-Shortened", + "BSD-4-Clause-UC", + "BSD-Protection", + "BSD-Source-Code", + "BSL-1.0", + "BUSL-1.1", + "bzip2-1.0.5", + "bzip2-1.0.6", + "C-UDA-1.0", + "CAL-1.0", + "CAL-1.0-Combined-Work-Exception", + "Caldera", + "CATOSL-1.1", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-3.0", + "CC-BY-3.0-AT", + "CC-BY-3.0-US", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-3.0-IGO", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-3.0-AT", + "CC-BY-SA-4.0", + "CC-PDDC", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDL-1.0", + "CDLA-Permissive-1.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "CERN-OHL-1.1", + "CERN-OHL-1.2", + "CERN-OHL-P-2.0", + "CERN-OHL-S-2.0", + "CERN-OHL-W-2.0", + "ClArtistic", + "CNRI-Jython", + "CNRI-Python", + "CNRI-Python-GPL-Compatible", + "Condor-1.1", + "copyleft-next-0.3.0", + "copyleft-next-0.3.1", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "Crossword", + "CrystalStacker", + "CUA-OPL-1.0", + "Cube", + "curl", + "D-FSL-1.0", + "diffmark", + "DOC", + "Dotseqn", + "DRL-1.0", + "DSDP", + "dvipdfm", + "ECL-1.0", + "ECL-2.0", + "eCos-2.0", + "EFL-1.0", + "EFL-2.0", + "eGenix", + "Entessa", + "EPICS", + "EPL-1.0", + "EPL-2.0", + "ErlPL-1.1", + "etalab-2.0", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Eurosym", + "Fair", + "Frameworx-1.0", + "FreeBSD-DOC", + "FreeImage", + "FSFAP", + "FSFUL", + "FSFULLR", + "FTL", + "GD", + "GFDL-1.1", + "GFDL-1.1-invariants-only", + "GFDL-1.1-invariants-or-later", + "GFDL-1.1-no-invariants-only", + "GFDL-1.1-no-invariants-or-later", + "GFDL-1.1-only", + "GFDL-1.1-or-later", + "GFDL-1.2", + "GFDL-1.2-invariants-only", + "GFDL-1.2-invariants-or-later", + "GFDL-1.2-no-invariants-only", + "GFDL-1.2-no-invariants-or-later", + "GFDL-1.2-only", + "GFDL-1.2-or-later", + "GFDL-1.3", + "GFDL-1.3-invariants-only", + "GFDL-1.3-invariants-or-later", + "GFDL-1.3-no-invariants-only", + "GFDL-1.3-no-invariants-or-later", + "GFDL-1.3-only", + "GFDL-1.3-or-later", + "Giftware", + "GL2PS", + "Glide", + "Glulxe", + "GLWTPL", + "gnuplot", + "GPL-1.0", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-1.0+", + "GPL-2.0", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-2.0-with-autoconf-exception", + "GPL-2.0-with-bison-exception", + "GPL-2.0-with-classpath-exception", + "GPL-2.0-with-font-exception", + "GPL-2.0-with-GCC-exception", + "GPL-2.0+", + "GPL-3.0", + "GPL-3.0-only", + "GPL-3.0-or-later", + "GPL-3.0-with-autoconf-exception", + "GPL-3.0-with-GCC-exception", + "GPL-3.0+", + "gSOAP-1.3b", + "HaskellReport", + "Hippocratic-2.1", + "HPND", + "HPND-sell-variant", + "HTMLTIDY", + "IBM-pibs", + "ICU", + "IJG", + "ImageMagick", + "iMatix", + "Imlib2", + "Info-ZIP", + "Intel", + "Intel-ACPI", + "Interbase-1.0", + "IPA", + "IPL-1.0", + "ISC", + "JasPer-2.0", + "JPNIC", + "JSON", + "LAL-1.2", + "LAL-1.3", + "Latex2e", + "Leptonica", + "LGPL-2.0", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.0+", + "LGPL-2.1", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-2.1+", + "LGPL-3.0", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "LGPL-3.0+", + "LGPLLR", + "Libpng", + "libpng-2.0", + "libselinux-1.0", + "libtiff", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "Linux-OpenIB", + "LPL-1.0", + "LPL-1.02", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "MakeIndex", + "MirOS", + "MIT", + "MIT-0", + "MIT-advertising", + "MIT-CMU", + "MIT-enna", + "MIT-feh", + "MIT-Modern-Variant", + "MIT-open-group", + "MITNFA", + "Motosoto", + "mpich2", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0", + "MPL-2.0-no-copyleft-exception", + "MS-PL", + "MS-RL", + "MTLL", + "MulanPSL-1.0", + "MulanPSL-2.0", + "Multics", + "Mup", + "NAIST-2003", + "NASA-1.3", + "Naumen", + "NBPL-1.0", + "NCGL-UK-2.0", + "NCSA", + "Net-SNMP", + "NetCDF", + "Newsletr", + "NGPL", + "NIST-PD", + "NIST-PD-fallback", + "NLOD-1.0", + "NLPL", + "Nokia", + "NOSL", + "Noweb", + "NPL-1.0", + "NPL-1.1", + "NPOSL-3.0", + "NRL", + "NTP", + "NTP-0", + "Nunit", + "O-UDA-1.0", + "OCCT-PL", + "OCLC-2.0", + "ODbL-1.0", + "ODC-By-1.0", + "OFL-1.0", + "OFL-1.0-no-RFN", + "OFL-1.0-RFN", + "OFL-1.1", + "OFL-1.1-no-RFN", + "OFL-1.1-RFN", + "OGC-1.0", + "OGDL-Taiwan-1.0", + "OGL-Canada-2.0", + "OGL-UK-1.0", + "OGL-UK-2.0", + "OGL-UK-3.0", + "OGTSL", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0", + "OLDAP-2.0.1", + "OLDAP-2.1", + "OLDAP-2.2", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OML", + "OpenSSL", + "OPL-1.0", + "OSET-PL-2.1", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "Parity-6.0.0", + "Parity-7.0.0", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "Plexus", + "PolyForm-Noncommercial-1.0.0", + "PolyForm-Small-Business-1.0.0", + "PostgreSQL", + "PSF-2.0", + "psfrag", + "psutils", + "Python-2.0", + "Qhull", + "QPL-1.0", + "Rdisc", + "RHeCos-1.1", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "RSCPL", + "Ruby", + "SAX-PD", + "Saxpath", + "SCEA", + "Sendmail", + "Sendmail-8.23", + "SGI-B-1.0", + "SGI-B-1.1", + "SGI-B-2.0", + "SHL-0.5", + "SHL-0.51", + "SimPL-2.0", + "SISSL", + "SISSL-1.2", + "Sleepycat", + "SMLNJ", + "SMPPL", + "SNIA", + "Spencer-86", + "Spencer-94", + "Spencer-99", + "SPL-1.0", + "SSH-OpenSSH", + "SSH-short", + "SSPL-1.0", + "StandardML-NJ", + "SugarCRM-1.1.3", + "SWL", + "TAPR-OHL-1.0", + "TCL", + "TCP-wrappers", + "TMate", + "TORQUE-1.1", + "TOSL", + "TU-Berlin-1.0", + "TU-Berlin-2.0", + "UCL-1.0", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "Unlicense", + "UPL-1.0", + "Vim", + "VOSTROM", + "VSL-1.0", + "W3C", + "W3C-19980720", + "W3C-20150513", + "Watcom-1.0", + "Wsuipa", + "WTFPL", + "wxWindows", + "X11", + "Xerox", + "XFree86-1.1", + "xinetd", + "Xnet", + "xpp", + "XSkat", + "YPL-1.0", + "YPL-1.1", + "Zed", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "Zlib", + "zlib-acknowledgement", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1" + ], + "type": "string" + }, + "orcid": { + "description": "Identifier for an author, see https://orcid.org.", + "format": "uri", + "pattern": "https://orcid\\.org/[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{3}[0-9X]{1}", + "type": "string" + }, + "person": { + "additionalProperties": false, + "description": "A person.", + "properties": { + "affiliation": { + "description": "The person's affilitation.", + "minLength": 1, + "type": "string" + }, + "email": { + "$ref": "#/definitions/email", + "description": "The person's email address." + }, + "family-names": { + "description": "The person's family names.", + "minLength": 1, + "type": "string" + }, + "given-names": { + "description": "The person's given names.", + "minLength": 1, + "type": "string" + }, + "name-particle": { + "description": "The person's name particle, e.g., a nobiliary particle or a preposition meaning 'of' or 'from' (for example 'von' in 'Alexander von Humboldt').", + "examples": [ + "von" + ], + "minLength": 1, + "type": "string" + }, + "name-suffix": { + "description": "The person's name-suffix, e.g. 'Jr.' for Sammy Davis Jr. or 'III' for Frank Edwin Wright III.", + "examples": [ + "Jr.", + "III" + ], + "minLength": 1, + "type": "string" + }, + "orcid": { + "$ref": "#/definitions/orcid", + "description": "The person's ORCID." + }, + "website": { + "$ref": "#/definitions/url", + "description": "The person's website." + } + }, + "type": "object" + }, + "url": { + "format": "uri", + "pattern": "^(https|http|ftp|sftp)://.+", + "type": "string" + }, + "version": { + "anyOf": [ + { + "minLength": 1, + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + "properties": { + "abstract": { + "description": "A description of the software or dataset.", + "minLength": 1, + "type": "string" + }, + "authors": { + "description": "The author(s) of the software or dataset.", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/person" + }, + { + "$ref": "#/definitions/entity" + } + ] + }, + "minItems": 1, + "type": "array", + "uniqueItems": true + }, + "Dataset": { + "description": "A dataset used for training the algorithm.", + "items": { + "$ref": "#/definitions/dataset" + }, + "minItems": 1, + "type": "array", + "uniqueItems": true + }, + "date-released": { + "$ref": "#/definitions/date", + "description": "The date the work has been released." + }, + "doi": { + "$ref": "#/definitions/doi" + }, + "image": { + "$ref": "#/definitions/image", + "description": "The URL to a docker image on an image registry." + }, + "license": { + "$ref": "#/definitions/license" + }, + "message": { + "default": "If you use this software, please cite it using the metadata from this file.", + "description": "A message to the human reader of the file to let them know what to do with the citation metadata.", + "examples": [ + "If you use this software, please cite it using the metadata from this file.", + "Please cite this software using these metadata.", + "Please cite this software using the metadata from 'preferred-citation'." + ], + "minLength": 1, + "type": "string" + }, + "repository": { + "$ref": "#/definitions/url", + "description": "The URL of the software or dataset in a repository (when the repository is neither a source code repository nor a build artifact repository).", + "examples": [ + "https://edoc.hu-berlin.de/handle/18452/23016", + "https://ascl.net/2105.013" + ] + }, + "title": { + "description": "The name of the software or dataset.", + "minLength": 1, + "type": "string" + }, + "url": { + "$ref": "#/definitions/url", + "description": "The URL of a landing page/website for the software or dataset." + }, + "version": { + "$ref": "#/definitions/version", + "description": "The version of the software or dataset." + } + }, + "required": [ + "authors", + "image", + "title" + ], + "title": "Citation File Format", + "type": "object" +} diff --git a/evaluation/Dockerfile b/evaluation/Dockerfile index 2fe8d42..a040586 100644 --- a/evaluation/Dockerfile +++ b/evaluation/Dockerfile @@ -14,4 +14,4 @@ RUN pip install --no-cache-dir -r requirements.txt COPY . . # Command to run the evaluator.py script -CMD ["python", "__main__.py"] \ No newline at end of file +CMD ["python", "__main__.py"] diff --git a/website/.gitkeep b/website/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/website/Dockerfile b/website/Dockerfile new file mode 100644 index 0000000..55f4c6e --- /dev/null +++ b/website/Dockerfile @@ -0,0 +1,17 @@ +# Use an official Python runtime as a parent image +FROM python:3.12-slim + +# Set the working directory in the container +WORKDIR . + +# Copy the requirements.txt file into the container +COPY requirements.txt . + +# Install any needed packages specified in requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the rest of the application code into the container +COPY . . + +# Command to run the evaluator.py script +CMD ["python", "script.py"] diff --git a/website/data/sampleEval.json b/website/data/sampleEval.json new file mode 100644 index 0000000..3f2e99d --- /dev/null +++ b/website/data/sampleEval.json @@ -0,0 +1,70 @@ +[{ + "algo_id": "ANN", + "datasets": [ + { + "dataset": "chbmit", + "sample_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + }, + "event_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + } + }, + { + "dataset": "group2", + "sample_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + }, + "event_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + } + } + ] +}, +{ + "algo_id": "ANN", + "datasets": [ + { + "dataset": "chbmit", + "sample_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + }, + "event_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + } + }, + { + "dataset": "group2", + "sample_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + }, + "event_results": { + "sensitivity": 1, + "precision": 1, + "f1": 1, + "fpRate": 0 + } + } + ] +}] \ No newline at end of file diff --git a/website/index.html b/website/index.html new file mode 100644 index 0000000..a805108 --- /dev/null +++ b/website/index.html @@ -0,0 +1,78 @@ + + + + + + Epilepsy Benchmarks + + + + + +

Epilepsy Benchmarks

+

Standardising benchmarking procedures across epilepsy models. Datasets, performance scores

+
+
+ +
+ + + + + + + +
+ +
+ Performance Metrics +
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+ Scoring Type + +
+ + +
+ +
+ + +
+
+ +
+
+ +
+
+
+
+ + + + + \ No newline at end of file diff --git a/website/layout.py b/website/layout.py new file mode 100644 index 0000000..a0357c2 --- /dev/null +++ b/website/layout.py @@ -0,0 +1,82 @@ +def layout_with_figures(graph, datasets): + dropdown = """ + + + + + Epilepsy Benchmarks + + + + + +

Epilepsy Benchmarks

+

Standardising benchmarking procedures across epilepsy models. Datasets, performance scores

+
+
+ +
+ """ + for d in datasets: + val = d.lower() + title = d.title() + temp = f""" + + + """ + dropdown += temp + dropdown += """ +
+ +
+ Performance Metrics +
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+ Scoring Type + +
+ + +
+ +
+ + +
+
+ +
+
+ + {graph} +
+
+ + + + + """ + return dropdown.format(graph=graph) \ No newline at end of file diff --git a/website/requirements.txt b/website/requirements.txt new file mode 100644 index 0000000..07caabd --- /dev/null +++ b/website/requirements.txt @@ -0,0 +1,12 @@ +numpy==2.1.2 +packaging==24.1 +pandas==2.2.3 +plotly==5.24.1 +python-dateutil==2.9.0.post0 +pytz==2024.2 +PyYAML==6.0.2 +setuptools==75.1.0 +six==1.16.0 +tenacity==9.0.0 +tzdata==2024.2 +wheel==0.44.0 diff --git a/website/script.py b/website/script.py new file mode 100644 index 0000000..8976d0c --- /dev/null +++ b/website/script.py @@ -0,0 +1,45 @@ +import plotly.graph_objects as go +import pandas as pd +from layout import layout_with_figures +import json, re + +path_to_eval = './data/sampleEval.json' +file = open(path_to_eval) + +metrics = ["Sensitivity", "Precision", "F1 Score", "fpRate"] # hardcoded + +eval = json.load(file) +data_for_df = [] +datasets = set() +for entry in eval: + algo_id = entry["algo_id"] + for dataset in entry["datasets"]: + dataset_name = dataset["dataset"] + datasets.add(dataset_name) + sample_results = dataset["sample_results"] + algo_html = "" + re.sub(r'[^a-zA-Z0-9]', '', algo_id) + "" + row = {"algo_id": algo_html, "dataset": dataset_name, **sample_results} + data_for_df.append(row) + +df = pd.DataFrame(data_for_df) +headers = ["Algorithm", "Dataset"] + list(df.columns[2:]) +table_data = [df[col].tolist() for col in df.columns] +# algorithms = [item["algo_id"] for item in eval] + +# Table plot +fig = go.Figure(data=[go.Table(header=dict(values=headers), + cells=dict(values=table_data))]) + + +# Generate the Plotly figure HTML as a string +plotly_html = fig.to_html(full_html=False, include_plotlyjs='cdn') + + +# Combine the custom HTML and the Plotly figure +complete_html = layout_with_figures(plotly_html, datasets) + +# Save everything into a single HTML file +with open("index.html", "w") as file: + file.write(complete_html) + +# Create second HTML file for algo details (from yaml)