diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..f45fee4b5 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,16 @@ +**/*.egg-info +**/__pycache__ +**/.*_cache +**/*.pyc +**/*.tar.gz +*.code-workspace +**/.*.ipynb +**/.ipynb* +.venv/ +build/ +export/ +.do-not-setup-on-localhost + +# Sphinx documentation +docs/html +screenshots/ diff --git a/.github/actions/create-dev-env/action.yml b/.github/actions/create-dev-env/action.yml deleted file mode 100644 index b8b3d3d40..000000000 --- a/.github/actions/create-dev-env/action.yml +++ /dev/null @@ -1,27 +0,0 @@ ---- -name: Build environment -description: Create build environment - -inputs: - architecture: - description: architecture to be run on - required: true - type: string - -runs: - using: composite - steps: - # actions/setup-python doesn't support Linux arm64 runners - # See: https://github.com/actions/setup-python/issues/108 - # python3 is manually preinstalled in the arm64 VM self-hosted runner - - name: Set Up Python 🐍 - if: ${{ inputs.architecture == 'amd64' }} - uses: actions/setup-python@v4 - with: - python-version: 3.x - - - name: Install Dev Dependencies πŸ“¦ - run: | - pip install --upgrade pip - pip install --upgrade -r docker/requirements-dev.txt - shell: bash diff --git a/.github/actions/integration-tests/action.yml b/.github/actions/integration-tests/action.yml deleted file mode 100644 index 6fe7ed55b..000000000 --- a/.github/actions/integration-tests/action.yml +++ /dev/null @@ -1,59 +0,0 @@ ---- -name: Downstream tests -description: Integration downstream tests the bulid image - -inputs: - architecture: - description: Image architecture - required: true - type: string - runsOn: - description: GitHub Actions Runner image - required: true - type: string - -runs: - using: composite - - steps: - - name: Set jupyter token env - run: echo "JUPYTER_TOKEN=$(openssl rand -hex 32)" >> $GITHUB_ENV - shell: bash - - - name: Run pytest to test image is working - run: TAG=newly-baked pytest tests_integration/test_image.py - shell: bash - - # The Firefox and its engine geckodrive need do be installed manually to run - # selenium tests. This only works on amd64 architecture and it should be enough only test this - # on one architecture. - - name: Install Firefox - if : ${{ inputs.architecture == 'amd64' }} - uses: browser-actions/setup-firefox@latest - with: - firefox-version: '96.0' - - - name: Install geckodriver - if : ${{ inputs.architecture == 'amd64' }} - run: | - wget -c https://github.com/mozilla/geckodriver/releases/download/v0.30.0/geckodriver-v0.30.0-linux64.tar.gz - tar xf geckodriver-v0.30.0-linux64.tar.gz -C /usr/local/bin - shell: bash - - - name: Run pytest for firefox - if : ${{ inputs.architecture == 'amd64' }} - run: TAG=newly-baked pytest --driver Firefox tests_integration/test_app.py - shell: bash - - - name: Run pytest for Chrome - if : ${{ inputs.architecture == 'amd64' }} - run: TAG=newly-baked pytest --driver Chrome tests_integration/test_app.py - shell: bash - - - name: Upload screenshots as artifacts - if : ${{ inputs.architecture == 'amd64' }} - uses: actions/upload-artifact@v3 - with: - name: Screenshots-CI-${{ inputs.architecture }} - path: screenshots/ - if-no-files-found: error diff --git a/.github/actions/load-image/action.yml b/.github/actions/load-image/action.yml deleted file mode 100644 index bbdedb8a3..000000000 --- a/.github/actions/load-image/action.yml +++ /dev/null @@ -1,31 +0,0 @@ ---- -name: Load Docker image -description: Download image tar and load it to docker - -inputs: - image: - description: Image name - required: true - type: string - architecture: - description: Image architecture - required: true - type: string - -runs: - using: composite - steps: - - name: Download built image πŸ“₯ - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.image }}-${{ inputs.architecture }} - path: /tmp/aiidalab/ - - name: Load downloaded image to docker πŸ“₯ - run: | - docker load --input /tmp/aiidalab/${{ inputs.image }}-${{ inputs.architecture }}.tar - docker image ls --all - shell: bash - - name: Delete the file πŸ—‘οΈ - run: rm -f /tmp/aiidalab/${{ inputs.image }}-${{ inputs.architecture }}.tar - shell: bash - if: always() diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..ce0d77c8e --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +--- +version: 2 +updates: +# Maintain dependencies for GitHub Actions + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly + groups: + gha-dependencies: + patterns: + - '*' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8ae8cae18..dc01c3400 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,7 +1,7 @@ --- # Run basic tests for this app -name: continuous-integration +name: CI on: [push, pull_request] @@ -11,42 +11,17 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -jobs: - - pre-commit: - # Adapted from: https://github.com/CasperWA/voila-optimade-client - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: '3.10' - cache: pip - cache-dependency-path: | - .pre-commit-config.yaml - **/setup.cfg - **/pyproject.toml - **/requirements*.txt - - - name: Install dependencies - run: python -m pip install pre-commit~=2.20 - - - name: Run pre-commit - run: pre-commit run --all-files || ( git status --short ; git diff ; exit 1 ) +env: + FORCE_COLOR: 1 +jobs: test-package: - needs: [pre-commit] - strategy: matrix: - tag: [latest] - python-version: ['3.8', '3.9', '3.10'] + python-version: ['3.9', '3.11'] + aiida-core-version: ['2.3', '2.6'] fail-fast: false runs-on: ubuntu-latest @@ -61,26 +36,29 @@ jobs: steps: - name: Check out app - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: pip - cache-dependency-path: | - **/setup.cfg - **/pyproject.toml - **/requirements*.txt + + - name: Install uv + uses: astral-sh/setup-uv@v1 + with: + version: 0.4.7 + - name: Install package - run: pip install -e .[dev] + run: uv pip install --system -e .[dev] aiida-core==${{ matrix.aiida-core-version }} - name: Run pytest - run: pytest -v tests --cov - env: - TAG: ${{ matrix.tag }} + # Only collect code coverage with aiida-core=2.3, to speed up tests + # with higher aiida versions that for some reason run slower, see: + # https://github.com/aiidalab/aiidalab-qe/issues/766 + run: pytest -v tests ${{ matrix.aiida-core-version == '2.3' && '--cov=aiidalab_qe' || '' }} - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: python-${{ matrix.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/docker-build-test-upload.yml b/.github/workflows/docker-build-test-upload.yml index b6473b0f9..d9d691b1b 100644 --- a/.github/workflows/docker-build-test-upload.yml +++ b/.github/workflows/docker-build-test-upload.yml @@ -1,77 +1,102 @@ --- -name: Build a new image and then upload the image, tags and manifests to GitHub artifacts +name: Build Docker image + +on: + pull_request: + push: + branches: + - main + tags: + - v* + workflow_dispatch: env: - OWNER: ${{ github.repository_owner }} + FORCE_COLOR: 1 + IMAGE: ghcr.io/aiidalab/qe + BUILDKIT_PROGRESS: plain -on: - workflow_call: - inputs: - image: - description: Image name - required: true - type: string - architecture: - description: Image architecture, e.g. amd64, arm64 - required: true - type: string - runsOn: - description: GitHub Actions Runner image - required: true - type: string +# https://docs.github.com/en/actions/using-jobs/using-concurrency +concurrency: + # only cancel in-progress jobs or runs for the current workflow - matches against branch & tags + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: build-test-upload: - runs-on: ${{ inputs.runsOn }} + runs-on: ubuntu-latest continue-on-error: true steps: - name: Checkout Repo ⚑️ - uses: actions/checkout@v3 - - name: Create dev environment πŸ“¦ - uses: ./.github/actions/create-dev-env - with: - architecture: ${{ inputs.architecture }} + uses: actions/checkout@v4 - # Self-hosted runners share a state (whole VM) between runs - # Also, they might have running or stopped containers, - # which are not cleaned up by `docker system prun` - - name: Reset docker state and cleanup artifacts πŸ—‘οΈ - if: ${{ inputs.platform != 'x86_64' }} - run: | - docker kill $(docker ps --quiet) || true - docker rm $(docker ps --all --quiet) || true - docker system prune --all --force - rm -rf /tmp/aiidalab/ - shell: bash + - name: Login to Container Registry πŸ”‘ + uses: docker/login-action@v2 + if: ${{ !github.event.pull_request.head.repo.fork }} + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - - name: Build image πŸ›  - working-directory: docker - run: docker buildx bake --set qe.platform=linux/${{ inputs.architecture }} -f docker-bake.hcl -f build.json --load - env: - # Use buildx - DOCKER_BUILDKIT: 1 - # Full logs for CI build - BUILDKIT_PROGRESS: plain - shell: bash + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 - - name: Run tests βœ… - uses: ./.github/actions/integration-tests + - name: Docker meta πŸ“ + id: meta + uses: docker/metadata-action@v5 with: - architecture: ${{ inputs.architecture }} - runsOn: ${{ inputs.runsOn }} + images: | + name=${{ env.IMAGE }} + tags: | + type=ref,event=pr + type=edge,enable={{is_default_branch}} + type=raw,value={{tag}},enable=${{ startsWith(github.ref, 'refs/tags/v') }} - - name: Save image as a tar for later use πŸ’Ύ - run: | - mkdir -p /tmp/aiidalab/ - docker save ${{ env.OWNER }}/${{ inputs.image }} -o /tmp/aiidalab/${{ inputs.image }}-${{ inputs.architecture }}.tar - shell: bash - if: always() + - name: Build and push image + id: build-upload + uses: docker/build-push-action@v5 + with: + tags: ${{ steps.meta.outputs.tags }} + load: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork }} + push: ${{ ! (github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork) }} + context: . + platforms: linux/amd64 + cache-to: | + type=gha,scope=${{ github.workflow }},mode=min + cache-from: | + type=gha,scope=${{ github.workflow }} - - name: Upload image as artifact πŸ’Ύ - uses: actions/upload-artifact@v3 + - name: Set Up Python 🐍 + uses: actions/setup-python@v5 with: - name: ${{ inputs.image }}-${{ inputs.architecture }} - path: /tmp/aiidalab/${{ inputs.image }}-${{ inputs.architecture }}.tar - retention-days: 3 + python-version: 3.11 + + - name: Install Dev Dependencies πŸ“¦ + run: pip install -r requirements-docker.txt + + - name: Set jupyter token env + run: echo "JUPYTER_TOKEN=$(openssl rand -hex 32)" >> $GITHUB_ENV + + - name: Run pytest for Chrome + run: pytest -sv --driver Chrome tests_integration/ + env: + # We'd like to identify the image by its unique digest, i.e ghcr.io/aiidalab/qe@sha256: + # but that sadly does not work when the image is loaded to Docker locally and not published on ghcr.io + # as is the case for PRs from forks. Hence this super-ugly ternary expression... + # For forks, we take the image as ghcr.io/aiidalab/qe:pr-XXX + # which is stored in the steps.meta.outputs.tags variable + QE_IMAGE: >- + ${{ + github.event_name == 'pull_request' && + github.event.pull_request.head.repo.fork && + steps.meta.outputs.tags || + format('{0}@{1}', env.IMAGE, steps.build-upload.outputs.imageid) + }} + + - name: Upload screenshots as artifacts if: always() + uses: actions/upload-artifact@v4 + with: + name: Screenshots + path: screenshots/ + if-no-files-found: error diff --git a/.github/workflows/docker-merge-tags.yml b/.github/workflows/docker-merge-tags.yml deleted file mode 100644 index d99569f97..000000000 --- a/.github/workflows/docker-merge-tags.yml +++ /dev/null @@ -1,65 +0,0 @@ ---- -name: Download images tags from GitHub artifacts and create multi-platform manifests - -on: - workflow_call: - inputs: - image: - description: Image name - required: true - type: string - registry: - description: Docker registry, e.g. ghcr.io, docker.io - required: true - type: string - secrets: - REGISTRY_USERNAME: - required: true - REGISTRY_TOKEN: - required: true - - -jobs: - merge-tags: - runs-on: ubuntu-latest - - steps: - - name: Checkout Repo ⚑️ - uses: actions/checkout@v3 - - name: Create dev environment πŸ“¦ - uses: ./.github/actions/create-dev-env - with: - architecture: amd64 - - - name: Download amd64 tags file πŸ“₯ - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.registry }}-${{ inputs.image }}-amd64-tags - path: /tmp/aiidalab - - name: Download arm64 tags file πŸ“₯ - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.registry }}-${{ inputs.image }}-arm64-tags - path: /tmp/aiidalab - - - name: Login to Container Registry πŸ”‘ - uses: docker/login-action@v2 - with: - registry: ${{ inputs.registry }} - username: ${{ secrets.REGISTRY_USERNAME }} - password: ${{ secrets.REGISTRY_TOKEN }} - - - name: Merge tags for the images of different arch πŸ”€ - run: | - for arch_tag in $(cat /tmp/aiidalab/${{ inputs.image }}-amd64-tags.txt); do - tag=$(echo $arch_tag | sed "s/:amd64-/:/") - docker manifest create $tag --amend $arch_tag - docker manifest push $tag - done - - for arch_tag in $(cat /tmp/aiidalab/${{ inputs.image }}-arm64-tags.txt); do - tag=$(echo $arch_tag | sed "s/:arm64-/:/") - docker manifest create $tag --amend $arch_tag - docker manifest push $tag - done - shell: bash diff --git a/.github/workflows/docker-push.yml b/.github/workflows/docker-push.yml deleted file mode 100644 index a30fac7e3..000000000 --- a/.github/workflows/docker-push.yml +++ /dev/null @@ -1,93 +0,0 @@ ---- -name: Download Docker image and its tags from GitHub artifacts, apply them and push the image to container registry - -env: - OWNER: ${{ github.repository_owner }} - -on: - workflow_call: - inputs: - image: - description: Image name - required: true - type: string - architecture: - description: Image architecture - required: true - type: string - registry: - description: Docker registry - required: true - type: string - secrets: - REGISTRY_USERNAME: - required: true - REGISTRY_TOKEN: - required: true - -jobs: - tag-push: - runs-on: ubuntu-latest - - steps: - - name: Checkout Repo ⚑️ - uses: actions/checkout@v3 - - name: Create dev environment πŸ“¦ - uses: ./.github/actions/create-dev-env - with: - architecture: ${{ inputs.architecture }} - - name: Load image to Docker πŸ“₯ - uses: ./.github/actions/load-image - with: - image: ${{ inputs.image }} - architecture: ${{ inputs.architecture }} - - - name: Read build variables - working-directory: docker - id: build_vars - run: | - vars=$(cat build.json | jq -c '[.variable | to_entries[] | {"key": .key, "value": .value.default}] | from_entries') - echo "vars=$vars" >> "${GITHUB_OUTPUT}" - - - name: Docker meta πŸ“ - id: meta - uses: docker/metadata-action@v4 - env: ${{ fromJson(steps.build_vars.outputs.vars) }} - with: - images: | - name=${{ inputs.registry }}/${{ env.OWNER }}/${{ inputs.image }} - tags: | - type=edge,enable={{is_default_branch}} - type=sha,enable=${{ github.ref_type != 'tag' }} - type=ref,event=pr - type=match,pattern=v(\d{2}\.\d{2}.\d+.*),group=1 - type=raw,value={{tag}},enable=${{ startsWith(github.ref, 'refs/tags/v') }} - type=raw,value=qe-${{ env.QE_VERSION }},enable=${{ startsWith(github.ref, 'refs/tags/v') }} - - - name: Login to Container Registry πŸ”‘ - uses: docker/login-action@v2 - with: - registry: ${{ inputs.registry }} - username: ${{ secrets.REGISTRY_USERNAME }} - password: ${{ secrets.REGISTRY_TOKEN }} - - - name: Set tags for image and push πŸ·οΈπŸ“€πŸ’Ύ - run: | - declare -a arr=(${{ steps.meta.outputs.tags }}) - for tag in "${arr[@]}"; do - arch_tag=$(echo ${tag} | sed "s/:/:${{ inputs.architecture }}-/") - docker tag aiidalab/${{ inputs.image }}:newly-baked ${arch_tag} - docker push ${arch_tag} - - # write tag to file - mkdir -p /tmp/aiidalab/ - echo ${arch_tag} >> /tmp/aiidalab/${{ inputs.image }}-${{ inputs.architecture }}-tags.txt - done - shell: bash - - - name: Upload tags file πŸ“€ - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.registry }}-${{ inputs.image }}-${{ inputs.architecture }}-tags - path: /tmp/aiidalab/${{ inputs.image }}-${{ inputs.architecture }}-tags.txt - retention-days: 3 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml deleted file mode 100644 index 2120e957b..000000000 --- a/.github/workflows/docker.yml +++ /dev/null @@ -1,140 +0,0 @@ ---- -name: Build, and push Docker Image - -on: - pull_request: - paths: - - .github/workflows/docker.yml - # We use local reusable workflows to make architecture clean an simple - # https://docs.github.com/en/actions/using-workflows/reusing-workflows - - .github/workflows/docker-build-test-upload.yml - - .github/workflows/docker-merge-tags.yml - - .github/workflows/docker-push.yml - - # We use local composite actions to combine multiple workflow steps within one action - # https://docs.github.com/en/actions/creating-actions/about-custom-actions#composite-actions - - .github/actions/create-dev-env/action.yml - - .github/actions/load-image/action.yml - - - src/** - - docker/** - - qe.ipynb - - setup.cfg - - pyproject.toml - - push: - branches: - - main - tags: - - v* - paths: - - .github/workflows/docker.yml - # We use local reusable workflows to make architecture clean an simple - # https://docs.github.com/en/actions/using-workflows/reusing-workflows - - .github/workflows/docker-build-test-upload.yml - - .github/workflows/docker-merge-tags.yml - - .github/workflows/docker-push.yml - - # We use local composite actions to combine multiple workflow steps within one action - # https://docs.github.com/en/actions/creating-actions/about-custom-actions#composite-actions - - .github/actions/create-dev-env/action.yml - - .github/actions/load-image/action.yml - - - src/** - - docker/** - - qe.ipynb - - setup.cfg - - pyproject.toml - workflow_dispatch: - schedule: - # Periodically build and test the image and keep `edge` always up-to-date - # Weekly, at 03:00 on Monday UTC time - - cron: 0 3 * * 1 - -# https://docs.github.com/en/actions/using-jobs/using-concurrency -concurrency: - # only cancel in-progress jobs or runs for the current workflow - matches against branch & tags - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - amd64: - uses: ./.github/workflows/docker-build-test-upload.yml - with: - image: qe - architecture: amd64 - runsOn: ubuntu-latest - - arm64: - uses: ./.github/workflows/docker-build-test-upload.yml - with: - image: qe - architecture: arm64 - runsOn: ARM64 - - amd64-push-ghcr: - uses: ./.github/workflows/docker-push.yml - with: - image: qe - architecture: amd64 - registry: ghcr.io - secrets: - REGISTRY_USERNAME: ${{ github.actor }} - REGISTRY_TOKEN: ${{ secrets.GITHUB_TOKEN }} - needs: [amd64] - - arm64-push-ghcr: - uses: ./.github/workflows/docker-push.yml - with: - image: qe - architecture: arm64 - registry: ghcr.io - secrets: - REGISTRY_USERNAME: ${{ github.actor }} - REGISTRY_TOKEN: ${{ secrets.GITHUB_TOKEN }} - needs: [arm64] - - merge-tags-ghcr: - uses: ./.github/workflows/docker-merge-tags.yml - with: - image: qe - registry: ghcr.io - secrets: - REGISTRY_USERNAME: ${{ github.actor }} - REGISTRY_TOKEN: ${{ secrets.GITHUB_TOKEN }} - needs: [amd64-push-ghcr, arm64-push-ghcr] - - amd64-push-dockerhub: - if: github.repository == 'aiidalab/aiidalab-qe' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) - uses: ./.github/workflows/docker-push.yml - with: - image: qe - architecture: amd64 - registry: docker.io - secrets: - REGISTRY_USERNAME: ${{ secrets.DOCKER_USERNAME }} - REGISTRY_TOKEN: ${{ secrets.DOCKER_PASSWORD }} - needs: [amd64] - - arm64-push-dockerhub: - if: github.repository == 'aiidalab/aiidalab-qe' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) - uses: ./.github/workflows/docker-push.yml - with: - image: qe - architecture: arm64 - registry: docker.io - secrets: - REGISTRY_USERNAME: ${{ secrets.DOCKER_USERNAME }} - REGISTRY_TOKEN: ${{ secrets.DOCKER_PASSWORD }} - needs: [arm64] - - merge-tags-dockerhub: - if: github.repository == 'aiidalab/aiidalab-qe' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) - uses: ./.github/workflows/docker-merge-tags.yml - with: - image: qe - registry: docker.io - secrets: - REGISTRY_USERNAME: ${{ secrets.DOCKER_USERNAME }} - REGISTRY_TOKEN: ${{ secrets.DOCKER_PASSWORD }} - needs: [amd64-push-dockerhub, arm64-push-dockerhub] diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 462cd85bb..000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,61 +0,0 @@ ---- -name: Release - -on: - push: - tags: - - v* - branches: - - release/* - -jobs: - - build: - - runs-on: ubuntu-latest - - steps: - - - uses: actions/checkout@v2 - - - name: Set up Python 3.10 - uses: actions/setup-python@v2 - with: - python-version: '3.10' - - - name: Install pypa/build - run: python -m pip install build - - - name: Build a binary wheel for the aiidalab_qe module - run: >- - python -m - build - --sdist - --wheel - --outdir dist/ - - - name: Upload distribution artifact - uses: actions/upload-artifact@v2 - with: - name: release - path: dist/ - - publish: - - needs: [build] - runs-on: ubuntu-latest - - steps: - - uses: actions/download-artifact@v2 - name: Download distribution artifact - with: - name: release - path: dist/ - - - uses: softprops/action-gh-release@v0.1.14 - name: Create release - if: startsWith(github.ref, 'refs/tags/v') - with: - files: | - dist/* - generate_release_notes: true diff --git a/.gitignore b/.gitignore index 5515ed710..740d7b7a3 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,8 @@ build/ export/ .do-not-setup-on-localhost +.mypy_cache/ +# Sphinx documentation +docs/html screenshots/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5c82e17d9..7a5a29698 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ ci: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.6.0 hooks: - id: check-json - id: check-yaml @@ -19,41 +19,21 @@ repos: - id: yamlfmt exclude: tests - - repo: https://github.com/psf/black - rev: 23.9.1 - hooks: - - id: black - language_version: python3 # Should be a command that runs python3.6+ - - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - args: [--count, --show-source, --statistics] - additional_dependencies: - - flake8-bugbear==22.7.1 - - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - - id: isort - args: [--profile, black, --filter-files] - - repo: https://github.com/sirosen/check-jsonschema - rev: 0.27.0 + rev: 0.28.6 hooks: - id: check-github-workflows - repo: https://github.com/kynan/nbstripout - rev: 0.6.1 + rev: 0.7.1 hooks: - id: nbstripout - - repo: https://github.com/nbQA-dev/nbQA - rev: 1.7.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.1 hooks: - - id: nbqa-pyupgrade - args: [--py38-plus] - - id: nbqa-isort - args: [--profile=black] - - id: nbqa-black + - id: ruff + types_or: [python, pyi, jupyter] + args: [--fix] + - id: ruff-format + types_or: [python, pyi, jupyter] diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 4e458ea54..0533b11c5 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -10,19 +10,10 @@ build: os: ubuntu-22.04 tools: python: '3.11' - -# Build documentation in the "docs/" directory with Sphinx -sphinx: - configuration: docs/source/conf.py - -# Optionally build your docs in additional formats such as PDF and ePub -# formats: -# - pdf -# - epub - -# Optional but recommended, declare the Python requirements required -# to build your documentation -# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html -python: - install: - - requirements: docs/requirements.txt + commands: + - asdf plugin add uv + - asdf install uv 0.2.13 + - asdf global uv 0.2.13 + - uv venv + - uv pip install -r docs/requirements.txt + - .venv/bin/python -m sphinx -W --keep-going -d _build/doctrees -D language=en -b html docs/source $READTHEDOCS_OUTPUT/html diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..6016d1aee --- /dev/null +++ b/Dockerfile @@ -0,0 +1,136 @@ +# syntax=docker/dockerfile:1 +ARG FULL_STACK_VER=2024.1023 +ARG UV_VER=0.4.7 +ARG QE_VER=7.2 +ARG QE_DIR=/opt/conda/envs/quantum-espresso-${QE_VER} +ARG HQ_VER=0.19.0 + +ARG UV_CACHE_DIR=/tmp/uv_cache +ARG QE_APP_SRC=/tmp/quantum-espresso +ARG HQ_COMPUTER="localhost-hq" + +FROM ghcr.io/astral-sh/uv:${UV_VER} AS uv + +# STAGE 1 +# Install QE into conda environment in /opt/conda +# This step is largely independent from the others and can run in parallel. +# However, it needs to be done before running `python -m aiidalab_qe install-qe`, +# otherwise QE gets installed into ~/.conda folder. +FROM ghcr.io/aiidalab/full-stack:${FULL_STACK_VER} AS qe_conda_env +ARG QE_VER +ARG QE_DIR + +USER ${NB_USER} +RUN mamba create -p ${QE_DIR} --yes qe=${QE_VER} && \ + mamba clean --all -f -y + +# STAGE 2 +# Install python dependencies needed to run aiidalab_qe CLI commands +# uv package cache from this stage is reused in the final stage as well. +FROM ghcr.io/aiidalab/full-stack:${FULL_STACK_VER} AS build_deps +ARG QE_DIR +ARG UV_CACHE_DIR +ARG QE_APP_SRC + +WORKDIR ${QE_APP_SRC} +COPY --chown=${NB_UID}:${NB_GID} src/ ${QE_APP_SRC}/src +COPY --chown=${NB_UID}:${NB_GID} setup.cfg pyproject.toml LICENSE README.md ${QE_APP_SRC} + +# Use uv instead of pip to speed up installation, per docs: +# https://github.com/astral-sh/uv/blob/main/docs/guides/docker.md#using-uv-temporarily +# Use the same constraint file as pip +ENV UV_CONSTRAINT=${PIP_CONSTRAINT} +RUN --mount=from=uv,source=/uv,target=/bin/uv \ + uv pip install --strict --system --cache-dir=${UV_CACHE_DIR} . + +# STAGE 3 +# - Prepare AiiDA profile and localhost computer +# - Prepare hq computer using hyperqueue as scheduler +# - Install QE codes and pseudopotentials +# - Archive home folder +FROM build_deps AS home_build +ARG QE_DIR +ARG HQ_VER +ARG HQ_COMPUTER + +# Install hq binary +RUN wget -c -O hq.tar.gz https://github.com/It4innovations/hyperqueue/releases/download/v${HQ_VER}/hq-v${HQ_VER}-linux-x64.tar.gz && \ + tar xf hq.tar.gz -C /opt/conda/ + +ENV PSEUDO_FOLDER=/tmp/pseudo +RUN mkdir -p ${PSEUDO_FOLDER} && \ + python -m aiidalab_qe download-pseudos --dest ${PSEUDO_FOLDER} + +ENV UV_CONSTRAINT=${PIP_CONSTRAINT} +# Install the aiida-hyperqueue +# XXX: fix me after release aiida-hyperqueue +RUN --mount=from=uv,source=/uv,target=/bin/uv \ + --mount=from=build_deps,source=${UV_CACHE_DIR},target=${UV_CACHE_DIR},rw \ + uv pip install --system --strict --cache-dir=${UV_CACHE_DIR} \ + "aiida-hyperqueue@git+https://github.com/aiidateam/aiida-hyperqueue" + +COPY ./before-notebook.d/* /usr/local/bin/before-notebook.d/ + +ENV HQ_COMPUTER=$HQ_COMPUTER + +# TODO: Remove PGSQL and daemon log files, and other unneeded files +RUN --mount=from=qe_conda_env,source=${QE_DIR},target=${QE_DIR} \ + bash /usr/local/bin/before-notebook.d/20_start-postgresql.sh && \ + bash /usr/local/bin/before-notebook.d/40_prepare-aiida.sh && \ + bash /usr/local/bin/before-notebook.d/42_setup-hq-computer.sh && \ + python -m aiidalab_qe install-qe --computer ${HQ_COMPUTER} && \ + python -m aiidalab_qe install-pseudos --source ${PSEUDO_FOLDER} && \ + verdi daemon stop && \ + mamba run -n aiida-core-services pg_ctl stop && \ + touch /home/${NB_USER}/.FLAG_HOME_INITIALIZED && \ + cd /home/${NB_USER} && tar -cf /opt/conda/home.tar . + +# STAGE 3 - Final stage +# - Install python dependencies +# - Copy QE env environment +# - Remove all content of home folder +# - Copy the whole repo content into the container +# - Copy home folder archive +FROM ghcr.io/aiidalab/full-stack:${FULL_STACK_VER} +ARG QE_DIR +ARG QE_APP_SRC +ARG UV_CACHE_DIR +ARG HQ_COMPUTER +USER ${NB_USER} + +WORKDIR /tmp +# Install python dependencies +# Use uv cache from the previous build step +# # Install the aiida-hyperqueue +# # XXX: fix me after release aiida-hyperqueue +ENV UV_CONSTRAINT=${PIP_CONSTRAINT} +RUN --mount=from=uv,source=/uv,target=/bin/uv \ + --mount=from=build_deps,source=${UV_CACHE_DIR},target=${UV_CACHE_DIR},rw \ + --mount=from=build_deps,source=${QE_APP_SRC},target=${QE_APP_SRC},rw \ + uv pip install --strict --system --compile-bytecode --cache-dir=${UV_CACHE_DIR} ${QE_APP_SRC} "aiida-hyperqueue@git+https://github.com/aiidateam/aiida-hyperqueue" + +# copy hq binary +COPY --from=home_build /opt/conda/hq /usr/local/bin/ + +COPY --from=qe_conda_env ${QE_DIR} ${QE_DIR} + +USER root + +COPY ./before-notebook.d/* /usr/local/bin/before-notebook.d/ + +ENV HQ_COMPUTER=$HQ_COMPUTER + +# Remove content of $HOME +# '-mindepth=1' ensures that we do not remove the home directory itself. +RUN find /home/${NB_USER}/ -mindepth 1 -delete + +ENV QE_APP_FOLDER=/opt/conda/quantum-espresso +COPY --chown=${NB_UID}:${NB_GID} . ${QE_APP_FOLDER} +# Remove all untracked files and directories. +RUN git clean -dffx || true + +ENV HOME_TAR="/opt/home.tar" +COPY --from=home_build /opt/conda/home.tar "$HOME_TAR" + +USER ${NB_USER} +WORKDIR "/home/${NB_USER}" diff --git a/README.md b/README.md index ac430b574..9172dd71e 100644 --- a/README.md +++ b/README.md @@ -5,8 +5,8 @@ ## About -This is a early-development implementation of an AiiDAlab application for Quantum ESPRESSO workflow. -The app allows the execution of a workflow with Quantum ESPRESSO that includes the selection of an input structure, its relaxation, and the bands structure calculation. +This is an AiiDAlab application for Quantum ESPRESSO workflows. +The app allows the execution of a workflow with Quantum ESPRESSO that includes the selection of an input structure, its relaxation, the bands structure calculation, and more! **The app is currently in an early development stage!** @@ -32,15 +32,24 @@ pytest -sv tests To run the integration tests, you need to build the Docker image first: ``` -cd docker/ -docker buildx bake -f build.json -f docker-bake.hcl --set "*.platform=linux/amd64" --load +docker build . -t aiidalab/qe ``` Then, you can run the integration tests with: ```bash -JUPYTER_TOKEN=max TAG=newly-baked pytest --driver Chrome tests_integration -sv -`````` +pytest --driver Chrome tests_integration +``` + +### Published Docker images + +Supported tags released on [Github Container Registry](https://ghcr.io/aiidalab): + +- `edge` – the latest commit on the default branch (`main`) +- `latest` – the latest stable release +- `$version` – the version of a specific release (ex. `2022.1001`) + +Pull requests into the default branch are further released on ghcr.io with the `pr-###` tag to simplify the testing of development versions. ## For maintainers @@ -50,21 +59,28 @@ This will: 1. Create a tagged release with bumped version and push it to the repository. 2. Trigger a GitHub actions workflow that creates a GitHub release. +For more details of the releases plan and management, please go to [the wiki](https://github.com/aiidalab/aiidalab-qe/wiki/Releases-management). + Additional notes: - Use the `--dry` option to preview the release change. - The release tag (e.g. a/b/rc) is determined from the last release. Use the `--tag` option to switch the release tag. + - For making "outdated" release since we fix minor version to `2x.04.xx` and `2x.10.xx`, use e.g. `bumpver update --set-version v23.10.0rc4 --ignore-vcs-tag` to make the release. ## Acknowledgements +We acknowledge support from: +* the European Union\'s Horizon 2020 research and innovation programme (Grant No. 957189, [project BIG-MAP](https://www.big-map.eu)). +* the [MARVEL National Centre for Competency in Research](https://nccr-marvel.ch/) funded by the [Swiss National Science Foundation](https://www.snf.ch/en). +* the MARKETPLACE project funded by [Horizon 2020](https://ec.europa.eu/programmes/horizon2020/) under the H2020-NMBP-25-2017 call (Grant No. 760173). +* the [MaX European Centre of Excellence](https://www.max-centre.eu/) funded by the Horizon 2020 EINFRA-5 program (Grant No. 676598). -This project has received funding from the European Union’s [Horizon 2020 research and innovation programme](https://ec.europa.eu/programmes/horizon2020/en) under grant agreement [No 957189](https://cordis.europa.eu/project/id/957189). The project is part of BATTERY 2030+, the large-scale European research initiative for inventing the sustainable batteries of the future. -Also supported by the [MARVEL National Centre for Competency in Research]() funded by the [Swiss National Science Foundation](), -the MARKETPLACE project funded by [Horizon 2020](https://ec.europa.eu/programmes/horizon2020/) under the H2020-NMBP-25-2017 call (Grant No. 760173), -as well as by the [MaX European Centre of Excellence]() funded by the Horizon 2020 EINFRA-5 program, Grant No. 676598. - -
+
MARVEL MaX MarketPlace
+
+ BIG-MAP + EU +
diff --git a/before-notebook.d/00_untar-home.sh b/before-notebook.d/00_untar-home.sh new file mode 100644 index 000000000..d55902280 --- /dev/null +++ b/before-notebook.d/00_untar-home.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -eux + +home="/home/${NB_USER}" + +# Untar home archive file to restore home directory if it is empty +if [ ! -e $home/.FLAG_HOME_INITIALIZED ]; then + if [[ ! -f $HOME_TAR ]]; then + echo "File $HOME_TAR does not exist!" + exit 1 + fi + if [[ ! -d ${QE_APP_FOLDER} ]]; then + echo "Folder $QE_APP_FOLDER does not exist!" + exit 1 + fi + + echo "Extracting $HOME_TAR to $home" + # NOTE: a tar error when deployed to k8s but at the momment not cause any issue + # tar: .: Cannot utime: Operation not permitted + # tar: .: Cannot change mode to rwxr-s---: Operation not permitted + tar -xf $HOME_TAR -C "$home" +else + echo "$home folder is not empty!" + ls -lrta "$home" +fi + +if [ -d $AIIDALAB_APPS/quantum-espresso ]; then + echo "Quantum ESPRESSO app does exist" +else + echo "Copying directory '$QE_APP_FOLDER' to '$AIIDALAB_APPS'" + cp -r "$QE_APP_FOLDER" "$AIIDALAB_APPS" +fi + +set +eux diff --git a/before-notebook.d/42_setup-hq-computer.sh b/before-notebook.d/42_setup-hq-computer.sh new file mode 100755 index 000000000..7031ced00 --- /dev/null +++ b/before-notebook.d/42_setup-hq-computer.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +set -x + +# computer +verdi computer show ${HQ_COMPUTER} || verdi computer setup \ + --non-interactive \ + --label "${HQ_COMPUTER}" \ + --description "local computer with hyperqueue scheduler" \ + --hostname "localhost" \ + --transport core.local \ + --scheduler hyperqueue \ + --work-dir /home/${NB_USER}/aiida_run/ \ + --mpirun-command "mpirun -np {num_cpus}" + +verdi computer configure core.local "${HQ_COMPUTER}" \ + --non-interactive \ + --safe-interval 5.0 + +# disable the localhost which is set in base image +verdi computer disable localhost aiida@localhost diff --git a/before-notebook.d/43_start-hq.sh b/before-notebook.d/43_start-hq.sh new file mode 100644 index 000000000..c20a462e4 --- /dev/null +++ b/before-notebook.d/43_start-hq.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +set -x + +# NOTE: this cgroup folder hierachy is based on cgroupv2 +# if the container is open in system which has cgroupv1 the image build procedure will fail. +# Since the image is mostly for demo server where we know the machine and OS I supposed +# it should have cgroupv2 (> Kubernetes v1.25). +# We only build the server for demo server so it does not require user to have new cgroup. +# But for developers, please update your cgroup version to v2. +# See: https://kubernetes.io/docs/concepts/architecture/cgroups/#using-cgroupv2 + +# computer memory from runtime +MEMORY_LIMIT=$(cat /sys/fs/cgroup/memory.max) + +if [ "$MEMORY_LIMIT" = "max" ]; then + MEMORY_LIMIT=4096 + echo "No memory limit set, use 4GiB" +else + MEMORY_LIMIT=$(echo "scale=0; $MEMORY_LIMIT / (1024 * 1024)" | bc) + echo "Memory Limit: ${MEMORY_LIMIT} MiB" +fi + +# Compute number of cpus allocated to the container +CPU_LIMIT=$(awk '{print $1}' /sys/fs/cgroup/cpu.max) +CPU_PERIOD=$(awk '{print $2}' /sys/fs/cgroup/cpu.max) + +if [ "$CPU_PERIOD" -ne 0 ]; then + CPU_NUMBER=$(echo "scale=2; $CPU_LIMIT / $CPU_PERIOD" | bc) + echo "Number of CPUs allocated: $CPU_NUMBER" + + # for HQ setting round to integer number of CPUs, the left are for system tasks + CPU_LIMIT=$(echo "scale=0; $CPU_LIMIT / $CPU_PERIOD" | bc) +else + # if no limit (with local OCI without setting cpu limit, use all CPUs) + CPU_LIMIT=$(nproc) + echo "No CPU limit set" +fi + +# Start hq server with a worker +run-one-constantly hq server start 1>$HOME/.hq-stdout 2>$HOME/.hq-stderr & +run-one-constantly hq worker start --cpus=${CPU_LIMIT} --resource "mem=sum(${MEMORY_LIMIT})" --no-detect-resources & + +# Reset the default memory_per_machine and default_mpiprocs_per_machine +# c.set_default_mpiprocs_per_machine = ${CPU_LIMIT} +# c.set_default_memery_per_machine = ${MEMORY_LIMIT} + +# Same as original localhost set job poll interval to 2.0 secs +# In addition, set default mpiprocs and memor per machine +# TODO: this will be run every time the container start, we need a lock file to prevent it. +job_poll_interval="2.0" +computer_name=${HQ_COMPUTER} +python -c " +from aiida import load_profile; from aiida.orm import load_computer; +load_profile(); +load_computer('${computer_name}').set_minimum_job_poll_interval(${job_poll_interval}) +load_computer('${computer_name}').set_default_mpiprocs_per_machine(${CPU_LIMIT}) +load_computer('${computer_name}').set_default_memory_per_machine(${MEMORY_LIMIT}) +" diff --git a/bumpver_pre_commit.sh b/bumpver_pre_commit.sh new file mode 100755 index 000000000..57986077f --- /dev/null +++ b/bumpver_pre_commit.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +# This script is configured to run automatically by bumpver +# before it creates the release commit. +# We check for common mistakes, such as making a release commit +# in a wrong branch, or trying to push to a wrong remote. +# +# For now, only two checks are implemented: +# +# 1. Check that the current branch matches either release/* or support/* +# +# 2. Check that the remote 'origin' is pointing to the origin repository, +# and not a fork. Note however that this assumes that origin is the default remote +# where new branches are pushed. If the user configured a different default remote, +# this check will not save them in the current implementation. +# +# Future work: +# - make sure the main branch is up-to-date with origin/main +# - make sure the HEAD commit was branched off of main branch, +# although this rule should only apply to release/* branches, not support/* branches +# +# Ideally, some of these check would be handled by bumpver itself: +# Restricting releases from branch: https://github.com/mbarkhau/bumpver/issues/198 +# Restricting releases to specified remote: https://github.com/mbarkhau/bumpver/issues/234 + +set -euo pipefail + +ORIGIN="github\.com[:/]aiidalab/aiidalab-qe" + +error=0 + +branch=$(git branch --show-current) + +# Explicitly disallow master/main branch +if [[ $branch = "master" || $branch = "main" ]];then + echo "ERROR: You should not run bumpver from main/master branch!" + echo "Make sure your main branch is up-to-date with origin ('git pull origin main')" + echo "and create a release branch first, e.g. 'git switch -c release/v2.0.0'" + error=1 +fi + +# Only allow release/* and support/* branches +if [[ ! $branch =~ 'release/' && ! $branch =~ 'support/' ]];then + echo "ERROR: The branch name must be either release/ or support/" + error=1 +fi + +# TODO: We need to check which remote is actually configured for push! +origin_url=$(git remote get-url --push --all origin) +if [[ ! $origin_url =~ $ORIGIN ]];then + echo "ERROR: Wrong default repo remote set!" + echo "got: $origin_url" + echo "expected: $ORIGIN" + error=1 +fi + +if [[ $error != 0 ]];then + exit 1 +fi diff --git a/delete.ipynb b/delete.ipynb new file mode 100644 index 000000000..670a6561f --- /dev/null +++ b/delete.ipynb @@ -0,0 +1,145 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# AiiDAlab QuantumESPRESSO App\n", + "\n", + "Caution! Deleting this job will also remove all associated nodes, including every calculation initiated by this job and their respective results. This action is irreversible.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import urllib.parse as urlparse\n", + "\n", + "import ipywidgets as widgets\n", + "from IPython.display import Markdown, display\n", + "\n", + "from aiida import load_profile\n", + "from aiida.orm import load_node\n", + "from aiida.tools import delete_nodes\n", + "\n", + "# Load AiiDA profile\n", + "load_profile()\n", + "\n", + "# Parse the primary key from the Jupyter notebook URL\n", + "url = urlparse.urlsplit(jupyter_notebook_url) # noqa F821\n", + "query = urlparse.parse_qs(url.query)\n", + "pk = int(query[\"pk\"][0])\n", + "\n", + "\n", + "def display_node_details(pk):\n", + " try:\n", + " node = load_node(pk)\n", + " print(f\"Node ID: {node.pk}\")\n", + " print(f\"Node Type: {node.process_label}\")\n", + " print(f\"Label: {node.label}\")\n", + " print(f\"Description: {node.description}\")\n", + " print(f\"Creation Time: {node.ctime}\")\n", + " except Exception as e:\n", + " print(f\"Error loading node: {e!s}\")\n", + " return False\n", + " return True\n", + "\n", + "\n", + "def delete_node(pk, dry_run=True):\n", + " if dry_run:\n", + " _, was_deleted = delete_nodes([pk], dry_run=True)\n", + " if was_deleted:\n", + " print(f\"Dry run: Node {pk} can be deleted.\")\n", + " return\n", + "\n", + " _, was_deleted = delete_nodes([pk], dry_run=False)\n", + " if was_deleted:\n", + " print(f\"Node {pk} deleted successfully.\")\n", + "\n", + "\n", + "def confirm_deletion(_):\n", + " if delete_confirmation.value.lower() in (\"y\", \"yes\"):\n", + " delete_node(pk, dry_run=False)\n", + " else:\n", + " print(\"Deletion aborted.\")\n", + "\n", + "\n", + "def find_linked_qeapp_jobs(root_node_pk, process_label=\"QeAppWorkChain\"):\n", + " \"\"\"Query all linked node with process_label = QeAppWorkChain.\"\"\"\n", + " from aiida.orm import Node, QueryBuilder\n", + " from aiida.orm.nodes.process.workflow.workchain import WorkChainNode\n", + "\n", + " qb = QueryBuilder()\n", + " qb.append(WorkChainNode, filters={\"id\": root_node_pk}, tag=\"root\")\n", + " qb.append(Node, with_incoming=\"root\", tag=\"calcjob\")\n", + " # There are seems a bug with `with_ancestors` in the QueryBuilder, so we have to use `with_incoming` instead.\n", + " # For the moment, it's safe to use `with_incoming` since we check it very time we delete a QEApp\n", + " qb.append(\n", + " WorkChainNode,\n", + " filters={\"attributes.process_label\": process_label},\n", + " with_incoming=\"calcjob\",\n", + " )\n", + " results = qb.all()\n", + " if len(results) == 0:\n", + " return None\n", + " return results\n", + "\n", + "\n", + "if display_node_details(pk):\n", + " linked_qeapp_jobs = find_linked_qeapp_jobs(pk)\n", + " if linked_qeapp_jobs:\n", + " warning_html = f\"\"\"\n", + "
\n", + " Critical: Unable to delete the requested node due to dependencies.\n", + " There are {len(linked_qeapp_jobs)} QEApp jobs linked to this node. Please delete them first:\n", + "
    \n", + "\"\"\"\n", + " for node in linked_qeapp_jobs[0]:\n", + " warning_html += f\"\"\"{node.pk}
    \"\"\"\n", + " display(widgets.HTML(value=warning_html))\n", + " else:\n", + " # Ask for confirmation\n", + " nodes, _ = delete_nodes([pk], dry_run=True)\n", + " display(\n", + " Markdown(\n", + " f\"**YOU ARE ABOUT TO DELETE `{len(nodes)}` NODES! THIS CANNOT BE UNDONE!**\"\n", + " )\n", + " )\n", + " delete_confirmation = widgets.Text(\n", + " value=\"\",\n", + " placeholder='Type \"yes\" to confirm',\n", + " description=\"Confirm:\",\n", + " disabled=False,\n", + " )\n", + " confirm_button = widgets.Button(description=\"Delete Node\")\n", + " confirm_button.on_click(confirm_deletion)\n", + " display(delete_confirmation, confirm_button)\n", + "else:\n", + " print(\"No valid node found for deletion.\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docker/Dockerfile b/docker/Dockerfile deleted file mode 100644 index a92fecfef..000000000 --- a/docker/Dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -# syntax=docker/dockerfile:1 -FROM base-image - -# Copy whole repo and pre-install the dependencies and app to the tmp folder. -# In the before notebook scripts the app will be re-installed by moving it to the app folder. -ENV PREINSTALL_APP_FOLDER ${CONDA_DIR}/aiidalab-qe -COPY --chown=${NB_UID}:${NB_GID} --from=src . ${PREINSTALL_APP_FOLDER} - -USER ${NB_USER} - -RUN cd ${PREINSTALL_APP_FOLDER} && \ - # Remove all untracked files and directories. For example the setup lock flag file. - git clean -fx && \ - # It is important to install from `aiidalab install` to mimic the exact installation operation as - # from the app store. - # The command wil first install the dependencies from list by parsing setup config files, - # (for `aiidalab/aiidalab<23.03.2` the `setup.py` should be in the root folder of the app https://github.com/aiidalab/aiidalab/pull/382). - # and then the app and restart the daemon in the end. - # But since the aiida profile not yet exists, the daemon restart will fail but it is not a problem. - # Because we only need the dependencies to be installed. - aiidalab install --yes --python ${CONDA_DIR}/bin/python "quantum-espresso@file://${PREINSTALL_APP_FOLDER}" && \ - fix-permissions "${CONDA_DIR}" && \ - fix-permissions "/home/${NB_USER}" - -# The app version is used for installing the app when first time the container is started. -ARG APP_VERSION -ENV APP_VERSION ${APP_VERSION} - -ARG QE_VERSION -ENV QE_VERSION ${QE_VERSION} -RUN mamba create -p /opt/conda/envs/quantum-espresso --yes \ - qe=${QE_VERSION} \ - && mamba clean --all -f -y && \ - fix-permissions "${CONDA_DIR}" && \ - fix-permissions "/home/${NB_USER}" - -# Download the QE pseudopotentials to the folder for afterware installation. -ENV PSEUDO_FOLDER ${CONDA_DIR}/pseudo -RUN mkdir -p ${PSEUDO_FOLDER} && \ - python -m aiidalab_qe download-pseudos --dest ${PSEUDO_FOLDER} - -COPY before-notebook.d/* /usr/local/bin/before-notebook.d/ - -WORKDIR "/home/${NB_USER}" diff --git a/docker/before-notebook.d/70_prepare-qe-executable.sh b/docker/before-notebook.d/70_prepare-qe-executable.sh deleted file mode 100644 index 41ba2d391..000000000 --- a/docker/before-notebook.d/70_prepare-qe-executable.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -e - -# Debugging. -set -x - -# Copy quantum espresso env to user space. -mkdir -p /home/${NB_USER}/.conda/envs -if [ ! -d /home/${NB_USER}/.conda/envs/quantum-espresso-${QE_VERSION} ]; then - ln -s /opt/conda/envs/quantum-espresso /home/${NB_USER}/.conda/envs/quantum-espresso-${QE_VERSION} - - # Install qe so the progress bar not shown in the notebook when first time using app. - echo "Installing qe." - python -m aiidalab_qe install-qe -else - echo "Quantum ESPRESSO app is already installed." -fi diff --git a/docker/before-notebook.d/71_install-qeapp.sh b/docker/before-notebook.d/71_install-qeapp.sh deleted file mode 100644 index 849d92849..000000000 --- a/docker/before-notebook.d/71_install-qeapp.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -e - -# Debugging. -set -x - -# Install qeapp if it is not already installed. -if aiidalab list | grep -q quantum-espresso; then - echo "Quantum ESPRESSO app is already installed." -else - echo "Installing Quantum ESPRESSO app." - # Install by move the repo folder that is already in the image. - mv ${PREINSTALL_APP_FOLDER} /home/${NB_USER}/apps/quantum-espresso -fi - -# Install the pseudo libraries if not already installed. -if aiida-pseudo list | grep -q "no pseudo potential families"; then - echo "Installing pseudo potential families." - python -m aiidalab_qe install-pseudos --source ${PSEUDO_FOLDER} -else - echo "Pseudo potential families are already installed." -fi diff --git a/docker/build.json b/docker/build.json deleted file mode 100644 index 03141a752..000000000 --- a/docker/build.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "variable": { - "AIIDALAB_BASE_IMAGE": { - "default": "aiidalab/full-stack:latest" - }, - "QE_VERSION": { - "default": "7.2" - } - } -} diff --git a/docker/docker-bake.hcl b/docker/docker-bake.hcl deleted file mode 100644 index 97017844f..000000000 --- a/docker/docker-bake.hcl +++ /dev/null @@ -1,27 +0,0 @@ -# docker-bake.hcl for building QeApp images -group "default" { - targets = ["qe"] -} - -variable "QE_VERSION" { -} - -variable "BASE_IMAGE" { - default = "aiidalab/full-stack:latest" -} - -variable "ORGANIZATION" { - default = "aiidalab" -} - -target "qe" { - tags = ["${ORGANIZATION}/qe:newly-baked"] - context = "." - contexts = { - src = ".." - base-image = "docker-image://${BASE_IMAGE}" - } - args = { - "QE_VERSION" = "${QE_VERSION}" - } -} diff --git a/docker/requirements-dev.txt b/docker/requirements-dev.txt deleted file mode 100644 index 4c8d31491..000000000 --- a/docker/requirements-dev.txt +++ /dev/null @@ -1,11 +0,0 @@ -docker -requests -pre-commit -pytest -pytest-docker - -# test dependencies -pytest-selenium -pytest-html<4.0 -selenium~=4.9.0 -webdriver-manager diff --git a/docs/requirements.in b/docs/requirements.in index c3b945730..a53e994fb 100644 --- a/docs/requirements.in +++ b/docs/requirements.in @@ -1,3 +1,3 @@ sphinx~=4.5.0 sphinx-design~=0.4.1 -pydata-sphinx-theme==0.8.0 +pydata-sphinx-theme==0.13.3 diff --git a/docs/requirements.txt b/docs/requirements.txt index a72320b01..8c4e9752e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,16 +1,20 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile docs/requirements.in # +accessible-pygments==0.0.4 + # via pydata-sphinx-theme alabaster==0.7.13 # via sphinx babel==2.12.1 - # via sphinx + # via + # pydata-sphinx-theme + # sphinx beautifulsoup4==4.12.2 # via pydata-sphinx-theme -certifi==2023.5.7 +certifi==2024.7.4 # via requests charset-normalizer==3.1.0 # via requests @@ -18,21 +22,28 @@ docutils==0.17.1 # via # pydata-sphinx-theme # sphinx -idna==3.4 +idna==3.7 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.1.2 +importlib-metadata==6.8.0 + # via sphinx +jinja2==3.1.4 # via sphinx markupsafe==2.1.3 # via jinja2 packaging==23.1 - # via sphinx -pydata-sphinx-theme==0.8.0 - # via -r docs/requirements.in + # via + # pydata-sphinx-theme + # sphinx +pydata-sphinx-theme==0.13.3 + # via -r requirements.in pygments==2.15.1 - # via sphinx -requests==2.31.0 + # via + # accessible-pygments + # pydata-sphinx-theme + # sphinx +requests==2.32.0 # via sphinx snowballstemmer==2.2.0 # via sphinx @@ -40,11 +51,11 @@ soupsieve==2.4.1 # via beautifulsoup4 sphinx==4.5.0 # via - # -r docs/requirements.in + # -r requirements.in # pydata-sphinx-theme # sphinx-design sphinx-design==0.4.1 - # via -r docs/requirements.in + # via -r requirements.in sphinxcontrib-applehelp==1.0.4 # via sphinx sphinxcontrib-devhelp==1.0.2 @@ -57,5 +68,9 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -urllib3==2.0.3 +typing-extensions==4.8.0 + # via pydata-sphinx-theme +urllib3==2.2.2 # via requests +zipp==3.19.1 + # via importlib-metadata diff --git a/docs/source/_static/images/Calandra_Bunau-PRB-205105-2013-gamma_func_plot.png b/docs/source/_static/images/Calandra_Bunau-PRB-205105-2013-gamma_func_plot.png new file mode 100644 index 000000000..f920cc010 Binary files /dev/null and b/docs/source/_static/images/Calandra_Bunau-PRB-205105-2013-gamma_func_plot.png differ diff --git a/docs/source/_static/images/Li2CO3_Example-C_K-edge-XCH_Only-Cropped.png b/docs/source/_static/images/Li2CO3_Example-C_K-edge-XCH_Only-Cropped.png new file mode 100644 index 000000000..92054cd7a Binary files /dev/null and b/docs/source/_static/images/Li2CO3_Example-C_K-edge-XCH_Only-Cropped.png differ diff --git a/docs/source/_static/images/Li2CO3_Example-O_K-edge-FCH_Only-Cropped.png b/docs/source/_static/images/Li2CO3_Example-O_K-edge-FCH_Only-Cropped.png new file mode 100644 index 000000000..b62ff68eb Binary files /dev/null and b/docs/source/_static/images/Li2CO3_Example-O_K-edge-FCH_Only-Cropped.png differ diff --git a/docs/source/_static/images/XAS_Plugin-Set_Adv_Options-Alt-Annotated-Cropped.png b/docs/source/_static/images/XAS_Plugin-Set_Adv_Options-Alt-Annotated-Cropped.png new file mode 100644 index 000000000..eb665ba1f Binary files /dev/null and b/docs/source/_static/images/XAS_Plugin-Set_Adv_Options-Alt-Annotated-Cropped.png differ diff --git a/docs/source/_static/images/XAS_Plugin_Result_Panel-Carbon-Annotated-Cropped.png b/docs/source/_static/images/XAS_Plugin_Result_Panel-Carbon-Annotated-Cropped.png new file mode 100644 index 000000000..e54106f16 Binary files /dev/null and b/docs/source/_static/images/XAS_Plugin_Result_Panel-Carbon-Annotated-Cropped.png differ diff --git a/docs/source/_static/images/XAS_Plugin_Result_Panel-Oxygen.png b/docs/source/_static/images/XAS_Plugin_Result_Panel-Oxygen.png new file mode 100644 index 000000000..4b6a238df Binary files /dev/null and b/docs/source/_static/images/XAS_Plugin_Result_Panel-Oxygen.png differ diff --git a/docs/source/_static/images/XAS_Plugin_Setting_Panel-Annotated-Cropped.png b/docs/source/_static/images/XAS_Plugin_Setting_Panel-Annotated-Cropped.png new file mode 100644 index 000000000..c46fbb27f Binary files /dev/null and b/docs/source/_static/images/XAS_Plugin_Setting_Panel-Annotated-Cropped.png differ diff --git a/docs/source/_static/images/XAS_Plugin_Structure_Panel-Li2CO3.png b/docs/source/_static/images/XAS_Plugin_Structure_Panel-Li2CO3.png new file mode 100644 index 000000000..c2eeaa889 Binary files /dev/null and b/docs/source/_static/images/XAS_Plugin_Structure_Panel-Li2CO3.png differ diff --git a/docs/source/_static/images/full_logo.png b/docs/source/_static/images/full_logo.png deleted file mode 100755 index 5e181e4f2..000000000 Binary files a/docs/source/_static/images/full_logo.png and /dev/null differ diff --git a/docs/source/_static/images/full_logo.svg b/docs/source/_static/images/full_logo.svg deleted file mode 100755 index 2077ae53d..000000000 --- a/docs/source/_static/images/full_logo.svg +++ /dev/null @@ -1 +0,0 @@ -The Quantum ESPRESSO App diff --git a/docs/source/_static/images/icon.svg b/docs/source/_static/images/icon.svg index 65422e2e8..513c5c201 100755 --- a/docs/source/_static/images/icon.svg +++ b/docs/source/_static/images/icon.svg @@ -1 +1 @@ - + diff --git a/docs/source/_static/images/qeapp_release_202404_job_list.gif b/docs/source/_static/images/qeapp_release_202404_job_list.gif new file mode 100644 index 000000000..445188b59 Binary files /dev/null and b/docs/source/_static/images/qeapp_release_202404_job_list.gif differ diff --git a/docs/source/_static/images/qeapp_release_202404_new_widgets.png b/docs/source/_static/images/qeapp_release_202404_new_widgets.png new file mode 100644 index 000000000..60e639988 Binary files /dev/null and b/docs/source/_static/images/qeapp_release_202404_new_widgets.png differ diff --git a/docs/source/_static/images/qeapp_release_202404_plugin_management.gif b/docs/source/_static/images/qeapp_release_202404_plugin_management.gif new file mode 100644 index 000000000..0e51b6da2 Binary files /dev/null and b/docs/source/_static/images/qeapp_release_202404_plugin_management.gif differ diff --git a/docs/source/_static/images/qeapp_release_202404_plugins.png b/docs/source/_static/images/qeapp_release_202404_plugins.png new file mode 100644 index 000000000..f64f74af2 Binary files /dev/null and b/docs/source/_static/images/qeapp_release_202404_plugins.png differ diff --git a/docs/source/_static/images/xps_etfa_dft.png b/docs/source/_static/images/xps_etfa_dft.png new file mode 100644 index 000000000..d92fba9b3 Binary files /dev/null and b/docs/source/_static/images/xps_etfa_dft.png differ diff --git a/docs/source/_static/images/xps_etfa_exp.jpg b/docs/source/_static/images/xps_etfa_exp.jpg new file mode 100644 index 000000000..f95c01f6c Binary files /dev/null and b/docs/source/_static/images/xps_etfa_exp.jpg differ diff --git a/docs/source/_static/images/xps_step_1.png b/docs/source/_static/images/xps_step_1.png new file mode 100644 index 000000000..bc278e97a Binary files /dev/null and b/docs/source/_static/images/xps_step_1.png differ diff --git a/docs/source/_static/images/xps_step_2_setting_tab.png b/docs/source/_static/images/xps_step_2_setting_tab.png new file mode 100644 index 000000000..4476bc01a Binary files /dev/null and b/docs/source/_static/images/xps_step_2_setting_tab.png differ diff --git a/docs/source/_static/images/xps_step_3.png b/docs/source/_static/images/xps_step_3.png new file mode 100644 index 000000000..10d9edff8 Binary files /dev/null and b/docs/source/_static/images/xps_step_3.png differ diff --git a/docs/source/_static/images/xps_step_4_output.png b/docs/source/_static/images/xps_step_4_output.png new file mode 100644 index 000000000..af0b33b00 Binary files /dev/null and b/docs/source/_static/images/xps_step_4_output.png differ diff --git a/docs/source/_static/images/xps_step_4_pa_exp.png b/docs/source/_static/images/xps_step_4_pa_exp.png new file mode 100644 index 000000000..d27a16aca Binary files /dev/null and b/docs/source/_static/images/xps_step_4_pa_exp.png differ diff --git a/docs/source/_static/images/xps_step_4_xps_tab.png b/docs/source/_static/images/xps_step_4_xps_tab.png new file mode 100644 index 000000000..41d0c7855 Binary files /dev/null and b/docs/source/_static/images/xps_step_4_xps_tab.png differ diff --git a/docs/source/_static/logo.png b/docs/source/_static/logo.png new file mode 100644 index 000000000..a55c0e093 Binary files /dev/null and b/docs/source/_static/logo.png differ diff --git a/docs/source/_static/logo_dark.png b/docs/source/_static/logo_dark.png new file mode 100644 index 000000000..4af08ca22 Binary files /dev/null and b/docs/source/_static/logo_dark.png differ diff --git a/docs/source/_static/logos b/docs/source/_static/logos new file mode 120000 index 000000000..a954d2141 --- /dev/null +++ b/docs/source/_static/logos @@ -0,0 +1 @@ +../../../miscellaneous/logos \ No newline at end of file diff --git a/docs/source/blogs/index.rst b/docs/source/blogs/index.rst new file mode 100644 index 000000000..5f46e381f --- /dev/null +++ b/docs/source/blogs/index.rst @@ -0,0 +1,13 @@ +.. _blog: + +################### +Blogs +################### + + + +.. toctree:: + :maxdepth: 1 + + release_202404 + release_202404_details diff --git a/docs/source/blogs/release_202404.rst b/docs/source/blogs/release_202404.rst new file mode 100644 index 000000000..df2c0108c --- /dev/null +++ b/docs/source/blogs/release_202404.rst @@ -0,0 +1,56 @@ +.. _blogs:release_202404: + +************************ +QEApp 2024.04 Release +************************ + +The AiiDAlab team and the broader developer community are proud to present the AiiDAlab Quantum Espresso app (QEApp) version 2024.04! + +This version represents a significant milestone in the evolution of QEApp, delivering several features and improvements on various aspects of the app. + +What's New +================================ + +New Plugins +---------------------- +After the introduction of a plugin interface in the previous release of QEApp, we are now introducing several new plugins that expand the capabilities of QEApp. +These plugins focus on addressing several diverse computational needs, significantly enhancing the range of properties that are now offered to the QEApp users. + +.. figure:: /_static/images/qeapp_release_202404_plugins.png + :align: center + :alt: New plugins for AiiDAlab QEApp 2024.04 + +Plugin Management Page +---------------------- +The new plugin management page simplifies how users can find and manage their plugins, making it easier to customize and enhance their computational environment. + +.. figure:: /_static/images/qeapp_release_202404_plugin_management.gif + :align: center + :alt: Plugin management interface + +Job Search Page +---------------------- +To facilitate better job management, we now provide a new job search page that allows users to efficiently search and manage their computational jobs run by the QEApp. + +.. figure:: /_static/images/qeapp_release_202404_job_list.gif + :align: center + :alt: New job management page + +New Widgets +---------------------- +This release also introduces several new widgets, including tools for dealing with DFT+U calculations, with magnetic settings, with setting up custom computational resources and selecting custom pseudopotentials, and more generally further enhancing the functionality and user interface of the QEApp. + +.. figure:: /_static/images/qeapp_release_202404_new_widgets.png + :align: center + :alt: New widgets in AiiDAlab QEApp + +For a deeper dive into all the updates, please visit the :ref:`blogs:release_202404_details` for an in-depth look at what's new. + +Acknowledgments +================================ +We extend our heartfelt thanks to all contributors from the AiiDAlab community, to the AiiDA developers, and to our supporting `organizations `_. Your dedication and contributions make these advancements possible. + +Happy calculations! + +The AiiDAlab Team +May 6th, 2024 diff --git a/docs/source/blogs/release_202404_details.rst b/docs/source/blogs/release_202404_details.rst new file mode 100644 index 000000000..bd59bf0ff --- /dev/null +++ b/docs/source/blogs/release_202404_details.rst @@ -0,0 +1,75 @@ +.. _blogs:release_202404_details: + +****************************** +QEApp 2024.04 Release Details +****************************** + +New Feature: + +- Support using total magnetization by @AndresOrtegaGuerrero in `#512 `_ +- Add plugin: XPS by @superstar54 in `#518 `_ +- Add new computational resource widget by @superstar54 in `#566 `_ +- Support DFT+U by @AndresOrtegaGuerrero in `#577 `_ +- Add plugin: XAS Plugin by @PNOGillespie in `#580 `_ +- New BandsPdosWidget by @AndresOrtegaGuerrero in `#581 `_ +- Add Van del Waals Correction widget by @AndresOrtegaGuerrero in `#620 `_ +- Add fat bands and optimize the BandPdosWidget by @t-reents in `#624 `_ +- Update the logo by @edan-bainglass in `#635 `_ +- Add plugin management page by @superstar54 in `#646 `_ +- Add job search page by @superstar54 in `#657 `_ +- Add label and description for the job by @superstar54 in `#670 `_ +- Add converge thresholds by @AndresOrtegaGuerrero in `#699 `_ +- Allow QEApp to use HubbardStructureData by @AndresOrtegaGuerrero in `#718 `_ +- Support using customize pseudopotential by @unkcpz in `#435 `_ + +New registered plugins: + +- `aiida-bader `_ by @superstar54 +- `aiidalab-qe-muon `_ by @mikibonacci +- `aiidalab-qe-vibroscopy `_ by @mikibonacci and @AndresOrtegaGuerrero + + +User-friendliness improvements: + +- Remove the load_profile warning by move it to front by @unkcpz in `#599 `_ +- Add an adjustable intensity factor for XPS spectra by @superstar54 in `#642 `_ +- Hide kill button when the process is done by @superstar54 in `#648 `_ +- Add HP code and LiCoO2 example by @superstar54 in `#655 `_ +- Add reminder text when users selecting property by @superstar54 in `#663 `_ + + +Development improvements: + +- Add aiida-core-version in ci and action by @superstar54 in `#643 `_ +- Regression test for the parameters generated by the app by @superstar54 in `#644 `_ +- New plugin API: add a update_inputs to plugin's workchain_and_builder by @superstar54 in `#656 `_ +- Add bumpver pre-commit by @danielhollas in `#688 `_ +- Use python script to setup all codes in one-shot by @superstar54 in `#706 `_ + +Bug fixes: + +- Remove force_parity from create_kpoints_from_distance by @AndresOrtegaGuerrero in `#598 `_ +- Include xas yaml in package data by @superstar54 in `#609 `_ +- Fix Hexagonal 2D path order by @AndresOrtegaGuerrero in `#619 `_ +- Fix pw setting for molecule in XPS calculation by @superstar54 in `#625 `_ +- Check code exist in the DEFAULT_PARAMETERS or not by @superstar54 in `#660 `_ +- Assgin clean_workdir to plugin's builder by @superstar54 in `#667 `_ +- Check if the code is installed and usable by @superstar54 in `#669 `_ +- Update page footer by @AndresOrtegaGuerrero in `#710 `_ + + +Documentation: + +- Add XPS to howto by @superstar54 in `#438 `_ +- Update acknowledgement by @superstar54 in `#608 `_ +- Update README.md for release management by @unkcpz in `#612 `_ +- Add documentation page for XAS plugin by @PNOGillespie in `#614 `_ +- Link logos to docs folder by @superstar54 in `#616 `_ +- Add Lithium Carbonate to List of Examples by @PNOGillespie in `#622 `_ +- Update docstrings and documentation consistent by @superstar54 in `#649 `_ + +Dependency updates: + +- Support aiida-quantumespresso plugin to 4.5.0 by @unkcpz in `#601 `_ +- Bump aiidalab-widgets-base with new ipyoptimade by @unkcpz in `#632 `_ +- Deprecate py3.8 where AWB already deprecated it by @unkcpz in `#633 `_ diff --git a/docs/source/conf.py b/docs/source/conf.py index 3d88c4283..49ec3f654 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -9,15 +9,12 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys import time -# sys.path.insert(0, os.path.abspath('.')) - # -- Project information ----------------------------------------------------- +version = "v24.10.0a3" +release = f"{version}-dev" project = "Quantum ESPRESSO App" copyright_first_year = "2023" copyright_owners = "The AiiDAlab Team" @@ -28,9 +25,7 @@ if current_year == copyright_first_year else f"{copyright_first_year}-{current_year}" ) -copyright = "{}, {}. All rights reserved".format( - copyright_year_string, copyright_owners -) +copyright = f"{copyright_year_string}, {copyright_owners}. All rights reserved" # noqa: A001 # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. @@ -75,15 +70,20 @@ # html_theme = "pydata_sphinx_theme" -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -html_logo = "_static/images/full_logo.png" - # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] +# Using this instead of `html_logo` to handle light/dark modes +# See https://pydata-sphinx-theme.readthedocs.io +html_theme_options = { + "logo": { + "image_light": "logo.png", + "image_dark": "logo_dark.png", + }, +} + # If true, links to the reST sources are added to the pages. html_show_sourcelink = False diff --git a/docs/source/development/architecture.rst b/docs/source/development/architecture.rst index dced079e8..5627b6c78 100644 --- a/docs/source/development/architecture.rst +++ b/docs/source/development/architecture.rst @@ -46,10 +46,10 @@ The dictionary has the following structure: }, "pseudos": {"Si": "eaef3352-2b0e-4205-b404-e6565a88aec8"}, }, - "pseudo_family": "SSSP/1.2/PBEsol/efficiency", + "pseudo_family": "SSSP/1.3/PBEsol/efficiency", "kpoints_distance": 0.5, }, - "bands": {"kpath_2d": "hexagonal"}, + "bands": {}, "pdos": {...}, "plugin_1": {...}, "plugin_2": {...}, diff --git a/docs/source/development/index.rst b/docs/source/development/index.rst index b81d49d9e..712b0d37e 100644 --- a/docs/source/development/index.rst +++ b/docs/source/development/index.rst @@ -11,3 +11,4 @@ This guide explains the architecture of the application and how to extend the fu architecture plugin + plugin_registry diff --git a/docs/source/development/plugin.rst b/docs/source/development/plugin.rst index 3dbd12692..5199e70cf 100644 --- a/docs/source/development/plugin.rst +++ b/docs/source/development/plugin.rst @@ -98,14 +98,15 @@ Beside, you need to override the the following methods: } def set_panel_value(self, input_dict): - """Set a dictionary with the input parameters for the plugin.""" + """Set the value of the widgets in the panel from the input dictionary. + This method is called when the user wants to reload the panel from the previous calculation, + or reset the panel to the default values.""" self.scale.value = input_dict.get("scale", 0.05) self.npoint.value = input_dict.get("npoint", 5) def reset(self): """Reset the input fields.""" - self.scale.value = 0.05 - self.npoint.value = 5 + self.set_panel_value({"scale": 0.05, "npoint": 5}) Result ----------------------- @@ -206,10 +207,10 @@ The `parameters` passed to the `get_builder` function has the following structur }, "pseudos": {"Si": "eaef3352-2b0e-4205-b404-e6565a88aec8"}, }, - "pseudo_family": "SSSP/1.2/PBEsol/efficiency", + "pseudo_family": "SSSP/1.3/PBEsol/efficiency", "kpoints_distance": 0.5, }, - "bands": {"kpath_2d": "hexagonal"}, + "bands": {}, "pdos": {...}, "eos": {...}, "plugin_1": {...}, @@ -224,7 +225,7 @@ The ``get_builder`` function will return a ``builder`` for the ``EOSWorkChain``, def get_builder(codes, structure, parameters, **kwargs): protocol = parameters["workchain"].pop('protocol', "fast") - pw_code = codes.get("pw") + pw_code = codes.get("pw")['code'] overrides = { "pw": parameters["advanced"], } @@ -239,8 +240,21 @@ The ``get_builder`` function will return a ``builder`` for the ``EOSWorkChain``, overrides=overrides, **kwargs, ) + # update resources + update_resources(builder, codes) return builder +The `update_resources` function is used to set the computational resources of the builder. It will basically +feed the `metadata` of each Calcjob which is submitted in the workchain. For example, in the EOSWorkChain case: + +.. code-block:: python + + from aiidalab_qe.plugins.utils import set_component_resources + + def update_resources(builder, codes): + set_component_resources(builder.pw, codes.get("pw")) + +This function can and should be adapted for each plugin specific case. Then add the workchain and builder into the `workchain_and_builder` dict, so that the QuantumESPRESSO app can load them. .. code-block:: python @@ -333,9 +347,9 @@ Here is the example of the built-in `pdos` plugins with codes `dos.x` and `projw .. code-block:: python - from aiidalab_widgets_base import ComputationalResourcesWidget + from aiidalab_qe.common.widgets import QEAppComputationalResourcesWidget - dos_code = ComputationalResourcesWidget( + dos_code = QEAppComputationalResourcesWidget( description="dos.x", default_calc_job_plugin="quantumespresso.dos", ) @@ -361,4 +375,8 @@ Further Reading QuantumESPRESSO app comes with several built-in plugins, which can be found in the ``aiidalab_qe.plugins`` folder. You can also use them as a start point to create your own plugins. + +You can register your plugin to facilitate its discovery and use by the community. +Please refer to the :doc:`Plugin registry ` for more details. + .. _aiidalab-qe-plugin-demos: https://github.com/aiidalab/aiidalab-qe-plugin-demos diff --git a/docs/source/development/plugin_registry.rst b/docs/source/development/plugin_registry.rst new file mode 100644 index 000000000..a7855ff03 --- /dev/null +++ b/docs/source/development/plugin_registry.rst @@ -0,0 +1,55 @@ + + +Plugin Registry +========================================= + +If you are either in the process of creating a new plugin or already have one developed, you're encouraged to register your plugin here to become part of the official AiiDAlab Quantum ESPRESSO App plugin ecosystem. + +Registering Your Plugin +----------------------- + +To include your plugin in the registry, follow these steps: + +1. Fork this `repository `_. + +2. Add your plugin to the `plugins.yaml` file. Place your entry at the end of the file, following this example: + + .. code-block:: yaml + + Top-level key: + title: "Description to show on top" + description: "Quantum ESPRESSO plugin for XYZ by AiiDAlab." + author: "Alice Doe" + github: "https://github.com/alicedoe/aiidalab-qe-xyz" + documentation: "https://aiidalab-qe-xyz.readthedocs.io/" + pip: "aiidalab-qe-xyz==version-of-the-code" + post-install: "post-install-command" + +3. Submit a Pull Request. Direct it to `this repository's Pull Requests section `_. + +Plugin Entry Requirements +------------------------- + +**Required Keys** + +- **Top-level key:** The plugin's distribution name, which should be lowercase and prefixed by ``aiidalab-`` or ``aiida-``. For example, ``aiidalab-qe-coolfeature`` or ``aiidalab-neutron``. +- **title:** Brief title to show on top of the plugin entry. Should contain the main properties we can compute with the given plugin. +- **description:** A brief description of your plugin. Can include more verbose informations with respect to the title. + +**Optional Keys** + +- **github:** If provided, this should be the URL to the plugin's GitHub homepage. + +At least one of ``github`` or ``pip`` is required. ``pip`` installation will be preferred if both are provided, and "==version-of-the-code" can be omitted (but strongly suggested, to ensure compatiblity). + +- **pip:** The PyPI package name for your plugin, useful for installation via pip. Example: ``aiida-quantum``. +- **documentation:** The URL to your plugin's online documentation, such as ReadTheDocs. +- **author:** The developer of the plugin. +- **post-install:** a post install Command Line Interface (CLI) command which should be defined inside your plugin if you needs it. For example in the ``aiidalab-qe-vibroscopy`` plugin, we automatically setup the phonopy code via this command. See below for more explanations. + +How to define a post install command in your plugin +--------------------------------------------------------------------- +If you need to run a post-install command, you can define it in the CLI of your package. The command should be designed to be run as ``package-name post-install-command``. +To define the CLI, you can use the ``__main__.py`` file in your source folder and the ``pyproject.toml`` file. You can refer to the `aiidalab-qe-vibroscopy `_ plugin for an example of how to do this. +In that plugin, the automatic setup for the phonopy code is implemented. It assumes that the ``phonopy`` binary is already present on the machine, as the plugin will install it as a dependency. +The post-install command will be triggered after the installation of the plugin (only) from the plugin list page of the Quantum ESPRESSO app. diff --git a/docs/source/howto/index.rst b/docs/source/howto/index.rst index 4e6138b81..08361e63e 100644 --- a/docs/source/howto/index.rst +++ b/docs/source/howto/index.rst @@ -10,3 +10,5 @@ How-to guides setup_computer_code import_structure upgrade_uninstall + xas + xps diff --git a/docs/source/howto/xas.rst b/docs/source/howto/xas.rst new file mode 100644 index 000000000..e6273d333 --- /dev/null +++ b/docs/source/howto/xas.rst @@ -0,0 +1,197 @@ +============================== +How to calculate XANES spectra +============================== + +Overview +-------- + +Using the XSpectra module of Quantum Espresso, it is possible to calculate X-ray Absorption Near Edge Spectra (XANES) for many types of systems. +Here we will compute XANES spectra for lithium carbonate (Li\ :sub:`2`\ CO\ :sub:`3`). + +Due to the number of calculation steps required, we will need to set up our environment to submit to a remote machine capable of handling the calculation. +Please refer to the relevant :doc:`How-To ` section for this procedure. + +.. admonition:: Goal + + To submit an XANES calculation with XSpectra and post-process the results + +.. note:: + The XAS plugin feature is available in the official AiiDALab-QE App as of pre-release `v24.04.0a1` and upwards. + See the relevant :doc:`How-To ` guide on upgrading and remember to tick `include prereleases` when searching for the latest version in the App Manager. + +Start +----- + +To start, go ahead and :doc:`launch ` the app, then follow the steps below. + +Step 1: Select a structure +************************** + +Select `Lithium Carbonate` from the `From Examples` tab and click `Confirm`. + +.. image:: ../_static/images/XAS_Plugin_Structure_Panel-Li2CO3.png + +Step 2: Configure the workflow +****************************** + +Select `Full geometry` to relax the structure and set `ElectronicType` to `Insulator`, then select `X-ray absorption spectroscopy (XAS)` as the properties of interest. + +For the protocol, select `fast` to quickly produce results, or, if you have enough resources, you can select the `moderate` protocol for increased accuracy. + +.. tip:: + For this example of Li\ :sub:`2`\ CO\ :sub:`3` changing the **K-points distance** to 0.25 from 0.15 in the `advanced settings` tab while using the `moderate` protocol will speed up calculations without compromising accuracy. + +.. note:: + At present the pseudopotential set available for XAS calculations only covers the PBE functional. + In order to run the workflow: select the `Advanced settings` tab, navigate to `Accuracy and precision`, tick the `Override` box on the right-hand-side and in the dropdown box under `Exchange-correlation functional` select `PBE`. + + .. image:: ../_static/images/XAS_Plugin-Set_Adv_Options-Alt-Annotated-Cropped.png + :scale: 55 % + :align: center + +Open the `XAS Settings` tab. +Selection of elements for XANES calculations is found below the explanatory text for the core-hole treatment. +You may wish to test different treatments for each element to see how this changes the spectra at a later date, however for this example we will use the default values. + + .. image:: ../_static/images/XAS_Plugin_Setting_Panel-Annotated-Cropped.png + :scale: 75 % + :align: center + +Tick the boxes for O and C to select these for calculation, then click `Confirm` to proceed. + +Step 3 - Choose computational resources +*************************************** + +As mentioned in the overview, our calculation will require more computational resources than the basic tutorial. +Please make sure to read the relevant :doc:`How-To ` section to configure the environment with a remote machine. + +Since the workflow uses `xspectra.x` to calculate the XANES spectrum, a code for this will also be required. +When you're done, click `Submit` to submit the calculation. + +.. tip:: + `xspectra.x` does not require additional conserdiations for installation or setup compared to `pw.x`, so re-using the configuration for the `pw.x` code and changing the executable & plugin entry point will be sufficient. + +.. note:: + As the XSpectra module of Quantum Espresso is not currently able to exploit GPU accelleration, it is strongly recommend to configure this calculation for a non-GPU system if possible. + +Step 4: Check the status +************************ + +While the calculation is running, you can monitor its status as shown in the :ref:`basic tutorial `. +You can view the results once the calculation is finished. + +Step 5: Spectrum view and post-processing +***************************************** + +Once the calculation is finished, you can view the calculated spectra in the `XAS` tab of the results panel. +You can change which element to view XANES spectra for using the dropdown box in the top left. +Select carbon from the dropdown box. + + .. figure:: ../_static/images/XAS_Plugin_Result_Panel-Carbon-Annotated-Cropped.png + :scale: 65 % + :align: center + + XAS result panel for carbon K-edge of Li\ :sub:`2`\ CO\ :sub:`3`. + +.. note:: + You should notice that "C K-edge" and "Site 4" are listed in the legend to the right of the plot - this is because all carbon atoms in the structure are symmetrically equivalent and thus will produce the same spectrum. + The workflow has accounted for this and only calculates the spectrum of the first carbon atom (site number 4 in the structure.) + +Immediately below the element selection box are the broadening parameters. +The XANES spectrum returned by the workflow will initially have a Lorentzian broadening of 0.1 eV. +As broadening parameters cannot be calculated from first-principles, we will tune these parameters by hand. +We will first compare to an experimentally-obtained C K-edge spectrum of Li\ :sub:`2`\ CO\ :sub:`3`. + +Try changing the first slider (:math:`\Gamma_{hole}`). +This will initially apply a constant Lorentzian broadening for the entire spectrum. +Comparing to the experimental reference for carbon, we can see that it is difficult to effectively re-create the experimental spectrum with a constant Lorentzian broadening scheme. +Setting this to 0 eV will plot the spectrum with no post-processing. + +Navigate to the upper center of the XAS panel and tick the box next to `use variable energy broadening`, which will change the behaviour of the broadening tools to use an arctangent-like function commonly used for broadening XANES spectra (see `Calandra & Bunau (2013)`_\ [1]_ for further discussion). +Set the three sliders in the following configuration: + +* :math:`\Gamma_{hole} = 0.3` +* :math:`\Gamma_{max} = 5.0` +* :math:`E_{center} = 15` + +The resulting spectrum should now more closely resemble the features seen in the experimental example: + +.. figure:: ../_static/images/Li2CO3_Example-C_K-edge-XCH_Only-Cropped.png + :scale: 75 % + :align: center + + Carbon K-edge XRS (low-q)\ [2]_ of Li\ :sub:`2`\ CO\ :sub:`3` compared to the XANES dipole computed with the XCH approximation. + Note that computed and experimental spectra are aligned according to the first peak of the signal in this case. + +.. tip:: + For advice with parameter tuning: + + * :math:`\Gamma_{hole}` sets the initial Lorentzian broadening value up to the Fermi level (:math:`E_{F}`, where :math:`E_{F} = 0` eV on the relative energy scale used here). The natural linewidth of the core-hole (if known) typically provides a good reference value (`reference for atomic numbers 10-110`_). + * :math:`\Gamma_{max}` sets the "sharpness" of the s-curve of the function - lower values give a smoother change at the inflexion point, while higher values cause the broadening to increase more quickly at the inflexion point. + * :math:`E_{center}` sets the energy position of the inflexion point of the function. + + The variable energy function (:math:`\Gamma(\Omega)`) and its parameters can be visualised in the following plot (from Fig.1 of `Calandra & Bunau (2013)`_\ [1]_): + + .. image:: ../_static/images/Calandra_Bunau-PRB-205105-2013-gamma_func_plot.png + :scale: 33 % + :align: center + + +Next, select the oxygen K-edge spectrum using the dropdown box in the upper left. +With the broadening scheme used for carbon, the spectrum should already resemble the experimental spectrum quite well, though you may try to tune the parameters further if desired - particularly increasing the initial broadening (:math:`\Gamma_{hole}`): + +.. figure:: ../_static/images/Li2CO3_Example-O_K-edge-FCH_Only-Cropped.png + :scale: 75 % + :align: center + + O K-edge total electron yield (TEY)\ [3]_ XAS spectrum compared to the XANES dipole computed with the FCH approximation. + Here, the broadening scheme used for carbon is modified such that :math:`\Gamma_{hole} = 0.8` eV. + Note that computed and experimental spectra are aligned according to the first peak of the signal in this case. + +In the plot window, you should be able to see three different plots: One for the full O K-edge and one for each of the two symmetrically-inequivalent oxygen atoms. +The component spectra in each case are first normalised, then the intensities are scaled according to the site multiplicity. + +.. image:: ../_static/images/XAS_Plugin_Result_Panel-Oxygen.png + +Click on a spectrum in the legend to show/hide it in the viewer. +Click and drag a box over the plot area to zoom in to the selected region. +Double-click to zoom out to the full spectrum. + +Finally, click on the "Download CSV" button to the upper left of the plot area to download a CSV file of the XAS plots for the selected element in order to export the spectrum for further analysis. + +.. note:: + The CSV file will contain all component spectra for the selected element. + Any broadening applied to the spectrum *via* the available tools will be applied to the data in the CSV file. + If multiple inequivalent absorbing atoms are present, the CSV file will contain one column for the total and two for each component: + + * The normalised & weighted spectrum. (with respect to ratio of site multiplicity to total multiplicity) + * The normalised & un-weighted spectrum. + +Additional Note on Charged Systems +---------------------------------- +Computing XANES spectra for systems where a charge has been applied (in this case using the `Total charge` advanced setting) is possible using the tools +available in the QE App, however such workflows should always be tested by the user against experimental data if possible. + +When running XAS workflows for systems where a total charge has been applied, it is suggested to use the following settings for the total charge to ensure the corresponding +core-hole treatment is applied correctly: + +* "xch_fixed" or "xch_smear": Set the total charge as required for the system's charged ground-state. +* "full": **Increase** the total charge by 1 *relative* to the system's charged ground-state (e.g. set total charge = 2 in the advanced settings tab if the charge is normally 1). + +Note that for neutral systems (total charge = 0), the QE App will handle these settings automatically. + +Summary +------- + +Here, you learned how to submit an XANES calculation on a remote machine using the Quantum ESPRESSO app and how to effectively use the post-processing tools. + +.. rubric:: References + +.. [1] O\. Bunau and M. Calandra, *Phys. Rev. B*, **87**, 205105 (2013) https://dx.doi.org/10.1103/PhysRevB.87.205105 +.. [2] E\. de Clermont Gallerande *et al*, *Phys. Rev. B*, **98**, 214104, (2018) https://dx.doi.org/10.1103/PhysRevB.98.214104 +.. [3] R\. Qiao *et al*, *Plos ONE*, **7**, e49182 (2012) https://dx.doi.org/doi:10.1371/journal.pone.0049182 + +.. _reference for atomic numbers 10-110: https://dx.doi.org/10.1063/1.555595 +.. _inelastic mean free path: https://dx.doi.org/10.1002/sia.740010103 +.. _Calandra & Bunau (2013): https://dx.doi.org/10.1103/PhysRevB.87.205105 +.. _PEP 440 version specifier: https://www.python.org/dev/peps/pep-0440/#version-specifiers diff --git a/docs/source/howto/xps.rst b/docs/source/howto/xps.rst new file mode 100644 index 000000000..f8ac2ddda --- /dev/null +++ b/docs/source/howto/xps.rst @@ -0,0 +1,161 @@ +============================ +How to calculate XPS spectra +============================ + +Overview +======== +This tutorial will guide you through the process of setting up and running an XPS calculation for Phenylacetylene molecule. + + +Steps +===== + +To start, go ahead and :doc:`launch ` the app, then follow the steps below. + + +Step 1 Select a structure +-------------------------------- +For this tutorial task, please use the `From Examples` tab, and select the Phenylacetylene molecule structure. + +Click the `Confirm` button to proceed. + +.. figure:: /_static/images/xps_step_1.png + :align: center + + +Step 2 Configure workflow +-------------------------------- + +In the **Basic Settings** tab, set the following parameters: + +- In the **Structure optimization** section, select ``Structure as is``. +- Set **Electronic Type** to ``Insulator`` +- In the **properties** section, select ``X-ray photoelectron spectroscopy (XPS)`` + + +Then go to the **Advanced settings** tab, navigate to `Accuracy and precision`, tick the `Override` box on the right-hand-side and in the dropdown box under `Exchange-correlation functional` select `PBE`. + +.. image:: ../_static/images/XAS_Plugin-Set_Adv_Options-Alt-Annotated-Cropped.png + :align: center + + +.. note:: + At present, core-hole pseudopotentials for Si and O are only available for the PBE functional. + +Then go to the **XPS setting** tab and, in the **Select core-level** section, select ``C_1s`` by ticking the appropriate box. + +.. image:: ../_static/images/xps_step_2_setting_tab.png + :align: center + + +Click the **Confirm** button to proceed. + + +Step 3 Choose computational resources +--------------------------------------- +We need to use a `pw` code on the high-performance computer to run XPS calculation for this system. +Please read the relevant :doc:`How-To ` section to setup code on a remote machine. + +.. image:: ../_static/images/xps_step_3.png + :align: center + + +Then, click the **Submit** button. + + + +Step 4 Check the status and results +----------------------------------------- +The job may take 5~10 minutes to finish if your jobs are running immediately without waiting in the queue. + +While the calculation is running, you can monitor its status as shown in the :ref:`basic tutorial `. +When the job is finished, you can view result spectra in the `XPS` tab. + +.. tip:: + + If the `XPS` tab is now shown when the jobs is finished. + Click the ``QeAppWorkChain`` item on top of the nodetree to refresh the step. + +Here is the result of the XPS calculation. +You can click the **Binding energy** button to view the calculated binding energies. +You can change which element to view XPS spectra for using the dropdown box in the top left. + +.. figure:: /_static/images/xps_step_4_xps_tab.png + :align: center + +One can upload the experimental XPS spectra, and compare to the calculated XPS spectra. +There is a button on the bottom left of the XPS tab to upload the experimental data. +Here is an example of the comparison between the calculated and experimental XPS spectra [1] for the C_1s core level of Phenylacetylene. + +.. figure:: /_static/images/xps_step_4_pa_exp.png + :align: center + +The calculated spectra agrees well with the experimental data, underscoring the reliability of DFT calculations. + + +.. tip:: + + One can also read the exact binding energies from the the output of the calculation, by clicking the `outputs` tab on the node tree of the WorkChain, as shown below. + + .. figure:: /_static/images/xps_step_4_output.png + :align: center + + + The DFT calculated binding energies do not include spin-orbit splitting of the core level state. + We can include the spin-orbit splitting using its experimental value. + Take `f` orbit as a example, we need subtracting :math:`3/7` of the experimental spin-orbit splitting or adding :math:`4/7` of the DFT calculated value, to get the position of the :math:`4f_{7/2}` and :math:`4f_{5/2}` peaks, respectively. Here is a table of the spin-orbit splitting for different orbitals. + + +----------------+-------------------+-------------------+ + | Orbit | Substracting | Adding | + +================+===================+===================+ + | 1s | 0 | 0 | + +----------------+-------------------+-------------------+ + | 2p | :math:`1/3` | :math:`2/3` | + +----------------+-------------------+-------------------+ + | 3d | :math:`2/5` | :math:`3/5` | + +----------------+-------------------+-------------------+ + | 4f | :math:`3/7` | :math:`4/7` | + +----------------+-------------------+-------------------+ + + + +Congratulations, you have finished this tutorial! + + +Another example +==================== +ETFA is commonly used as example for XPS measurements and calculations due to the extreme chemical shifts of its four different carbon atoms. [2] + +.. tip:: + + One can select the ETFA molecule from the `From Example` tab, and follow the same steps as above to run the XPS calculation for this molecule. + +Here is the result of the XPS calculation for the ETFA molecule. + +.. figure:: /_static/images/xps_etfa_dft.png + :align: center + +Here is the chemical shift from experiment. [2] + +.. figure:: /_static/images/xps_etfa_exp.jpg + :align: center + + +The calculated relative shifts align well with the trends observed in experimental data, underscoring the reliability of DFT calculations. +Although there are minor discrepancies in the absolute shift values, this is a recognized limitation stemming from the approximations in the exchange-correlation functional within DFT frameworks. [3] + +Questions +========= + +If you have any questions, please, do not hesitate to ask on the AiiDA discourse forum: https://aiida.discourse.group/. + + + +References +========== + +[1] V. Carravetta, *et al.*, *Chem. Phys.* 264, 175 (2001) https://doi.org/10.1016/S0301-0104(00)00396-7 + +[2] O. Travnikova, *et al.*, , *Relat. Phenom.* 185, 191 (2012) https://doi.org/10.1016/j.elspec.2012.05.009 + +[3] B.P. Klein, *et al.*, , *J. Phys. Condens. Matter* 33, 154005 (2021) https://doi.org/10.1088/1361-648X/abdf00 diff --git a/docs/source/index.rst b/docs/source/index.rst index c7ffcf5ec..f7354df36 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -11,7 +11,7 @@ Welcome Page :child-align: justify :class: sd-fs-5 - .. rubric:: The Quantum ESPRESSO App - Version 23.10.0a0 + .. rubric:: The Quantum ESPRESSO App - Version |release| The Quantum ESPRESSO (QE) app is an `AiiDAlab`_ Jupyter-based graphical user interface allowing users to conduct and streamline many of `Quantum ESPRESSO`_'s calculations by leveraging `AiiDA`_'s powerful workflows implemented in the AiiDA Quantum ESPRESSO plugin. @@ -20,7 +20,7 @@ Welcome Page .. image:: _static/images/icon.svg :width: 200px - :class: sd-m-auto + :class: sd-m-auto dark-light ---- @@ -95,6 +95,23 @@ Welcome Page To the development guide + .. grid-item-card:: Blogs + :text-align: center + :shadow: md + + Learn how to develop the app + + ++++ + + .. button-ref:: blogs/index + :ref-type: doc + :click-parent: + :expand: + :color: primary + :outline: + + To the blogs + .. grid-item-card:: Report an Issue :text-align: center :shadow: md @@ -120,6 +137,7 @@ Welcome Page tutorials/index howto/index development/index + blogs/index report_issue ---- @@ -140,3 +158,22 @@ Acknowledgements ================ We acknowledge the `AiiDAlab`_ team for the development of the Quantum ESPRESSO app. + +We also acknowledge support from: + +- the European Union's Horizon 2020 research and innovation programme (Grant No. 957189, `project BIG-MAP `_); +- the `MARVEL National Centre for Competency in Research `_ funded by the `Swiss National Science Foundation `_; +- the MARKETPLACE project funded by `Horizon 2020 `_ under the H2020-NMBP-25-2017 call (Grant No. 760173); +- the `MaX European Centre of Excellence `_ funded by the Horizon 2020 EINFRA-5 program (Grant No. 676598). + +.. raw:: html + +
    + MARVEL + MaX + MarketPlace +
    +
    + BIG-MAP + EU +
    diff --git a/docs/source/installation/access_aiidalab/container.rst b/docs/source/installation/access_aiidalab/container.rst new file mode 100644 index 000000000..af4a94855 --- /dev/null +++ b/docs/source/installation/access_aiidalab/container.rst @@ -0,0 +1,149 @@ +===================== +Local Docker Instance +===================== + +.. note:: + + If you used another method to access AiiDAlab, you may proceed to :doc:`install the app `. + +AiiDAlab is available as a Docker container - a self-contained, pre-configured environment including all the necessary software to access the AiiDAlab platform. +Conveniently, we provide a blueprint (image) for such a container with the Quantum ESPRESSO app pre-installed and ready for use. To run the container, you first need to `install Docker `_ on your local machine. + +.. important:: + + The Docker installation link above walks you through installing Docker Desktop - a convenient graphical user interface to Docker. However, if you have chosen instead to install the `docker engine `_ directly for use via a terminal, if **(and only if)** you are on a Linux system, you will need `root` privileges to perform the `post-installation steps for Docker Engine `_. + +Once Docker is installed, you can launch the container in one of several ways depending on your operating system. +This is discussed in the following sections. + +Docker Desktop +************** + +If you use Windows, you may choose to use the Docker Desktop interface to launch the container. +Once Docker Desktop is installed (see above link), follow these instructions to spin up an AiiDAlab container. + +#. Open the Docker Desktop app +#. On the left sidebar, click on *Images* +#. In the search bar at the top of the app, type ``aiidalab/qe`` +#. Select ``latest`` from the *tag* dropdown menu +#. Click *Pull* to download the image + + * Once downloaded, the image will appear as a new line in the list of images + * Exit the search menu when done + +#. At the far right column of the new image line, under actions, click ▢️ to start a container instance +#. In the pop-up window, expand *optional settings* +#. You may choose to name the container for easy reference (randomly generated otherwise) +#. Choose a local port from which to communicate with the container's 8888 port +#. Set up the following local volumes: **see notes below** + + * ``\aiidalab_qe_home`` --> ``/home/jovyan`` + * ``\aiidalab_qe_conda`` --> ``/home/jovyan/.conda`` + + .. note:: + + ``local-docker-volumes-dir`` can be any local directory in which to store Docker volumes, for example ``C:\Users\\Docker\`` + +#. Click *Run* to start the container +#. On the left sidebar, click on *Containers* +#. Click on the name of your newly-created container +#. Wait for the container build process to finish + + * The log will show a line ``To access the notebook, open this file in a browser:`` + +#. Click on the ``:8888`` link at the top of the app to open AiiDAlab in the browser +#. Copy and paste the container's token to the browser and submit to open AiiDAlab + + * The token can be found at the bottom of the log in a line similar to ``...?token=`` + + .. note:: + + Subsequent connections to the port in the browser will not prompt for a token for some time. If and when it does, you may again retrieve the token from the log. + +.. important:: + + To avoid losing your work when the container shuts down (manually, or when the machine is turned off), it is important to associate the container with a volume - a local directory - with which the container data's is mirrored. When set up, the container will restart from this mirrored volume. + +Note that Docker Desktop is also available for MacOS and Linux. +However, you may prefer to use the command line interface (CLI). +If so, proceed to the following sections for instructions. + +AiiDAlab launch +*************** + +.. important:: + + The following steps require a local installation of Docker. You can verify your Docker installation by running ``docker run hello-world`` in the terminal. + +`AiiDAlab launch`_ is a thin Docker wrapper which takes care of all the prerequisites to run the AiiDAlab Docker image. +It helps to manage multiple AiiDAlab profiles, each with its own home directory for persistent storage, and allows to easily switch between them. +To use AiiDAlab launch, you will have to + +#. Install AiiDAlab launch with `pipx `_ (**recommended**): + + .. code-block:: console + + pipx install aiidalab-launch + + or directly with pip + + .. code-block:: console + + pip install aiidalab-launch + +#. Set up a new `QE` profile with + + .. code-block:: console + + aiidalab-launch profile add --image aiidalab/qe:latest QE + + At the prompt, enter `n` to skip editing the profile settings. + +#. Start AiiDAlab with + + .. code-block:: console + + aiidalab-launch start -p QE + +#. Follow the URL on the screen to open AiiDAlab in the browser + +.. tip:: + + For more detailed help, run + + .. code-block:: console + + aiidalab-launch --help + +Profile Management +^^^^^^^^^^^^^^^^^^ + +As shown above, you can manage multiple profiles in AiiDAlab launch, e.g., with different home directories or ports. For more information, run + +.. code-block:: console + + aiidalab-launch profiles --help + +You can inspect the status of all configured AiiDAlab profiles with + +.. code-block:: console + + aiidalab-launch status + +.. _`AiiDAlab launch`: https://github.com/aiidalab/aiidalab-launch + +Using docker CLI directly +************************* + +It is not necessary to use AiiDAlab launch to run the AiiDAlab container. +You can also use the docker CLI directly by running + +.. code-block:: console + + docker run -p 8888:8888 aiidalab/qe:latest + +Follow the URL on the screen to open AiiDAlab in the browser. + +.. important:: + + If you use the docker CLI directly, the data in the home directory of the container will be lost when the container is deleted. You can use the ``-v`` option to mount a local directory to the container to store the data persistently. For more information, run ``docker run --help``. diff --git a/docs/source/installation/aiidalab/index.rst b/docs/source/installation/access_aiidalab/index.rst similarity index 76% rename from docs/source/installation/aiidalab/index.rst rename to docs/source/installation/access_aiidalab/index.rst index 9bd26240f..9f962cdd6 100644 --- a/docs/source/installation/aiidalab/index.rst +++ b/docs/source/installation/access_aiidalab/index.rst @@ -9,71 +9,55 @@ Access AiiDAlab .. grid:: 1 1 1 2 :gutter: 3 - .. grid-item-card:: Local Docker Instance + .. grid-item-card:: Materials Cloud AiiDAlab Server :text-align: center :shadow: md - Install Docker locally and run an instance of an AiiDAlab image *pre-configured** for the Quantum ESPRESSO app. No prior knowledge of Docker necessary! + For researchers affiliated with Materials Cloud partners, log into the open AiiDAlab server hosted on the Materials Cloud. ++++ - .. button-ref:: docker - :ref-type: doc + .. button-link:: https://aiidalab.materialscloud.org/hub/login :click-parent: :expand: :color: primary :outline: - To the guide + Launch the server - .. grid-item-card:: Virtual Machine Image + .. grid-item-card:: Local Docker Instance :text-align: center :shadow: md - Download a virtual machine image for AiiDAlab based on Quantum Mobile, *pre-configured** with everything you need to run the Quantum ESPRESSO app. + Install Docker locally and run an instance of an AiiDAlab image *pre-configured** for the Quantum ESPRESSO app. No prior knowledge of Docker necessary! ++++ - .. button-ref:: vm + .. button-ref:: container :ref-type: doc :click-parent: :expand: :color: primary :outline: - To the download page - - .. grid-item-card:: Materials Cloud AiiDAlab Server - :text-align: center - :shadow: md - - For researchers affiliated with Materials Cloud partners, log into the open AiiDAlab server hosted on the Materials Cloud. - - ++++ - - .. button-link:: https://aiidalab.materialscloud.org/hub/login - :click-parent: - :expand: - :color: primary - :outline: - - Launch the server + To the guide - .. grid-item-card:: Materials MarketPlace AiiDAlab Server + .. grid-item-card:: Virtual Machine Image :text-align: center :shadow: md - For members of the Materials Modeling MarketPlace, log into the open AiiDAlab server hosted on the Materials MarketPlace. + Download a virtual machine image for AiiDAlab based on Quantum Mobile, *pre-configured** with everything you need to run the Quantum ESPRESSO app. ++++ - .. button-link:: https://aiidalab.materials-marketplace.eu/hub/login + .. button-ref:: vm + :ref-type: doc :click-parent: :expand: :color: primary :outline: - Launch the server + To the download page .. div:: @@ -83,6 +67,5 @@ Access AiiDAlab :maxdepth: 1 :hidden: - docker - launch + container vm diff --git a/docs/source/installation/access_aiidalab/vm.rst b/docs/source/installation/access_aiidalab/vm.rst new file mode 100644 index 000000000..c79217185 --- /dev/null +++ b/docs/source/installation/access_aiidalab/vm.rst @@ -0,0 +1,57 @@ +===================== +Virtual Machine Image +===================== + +.. note:: + + If you used another method to access AiiDAlab, you may proceed to :doc:`install the app `. + +The AiiDAlab Quantum ESPRESSO virtual image based on `Quantum Mobile `_ image and is available for the following architectures: + ++ Intel x86_64 ++ Mac computer with Apple Silicon + +See the following sections for instructions on how to download and run the virtual machine image on your computer. + +Intel x86_64 +------------ + +Get AiiDAlab Quantum ESPRESSO virtual machine running on your computer in three simple steps: + +#. Download the virtual machine image (4.09 GB) + + + URL: https://bit.ly/48qUay9 (Google drive: https://bit.ly/3Nyf4Dt) + + Filename: `quantum_mobile_23.10.0-qeapp.ova` + + MD5 hash: `5c80d9ab2458f2edac30e97dc2fe36e7` + +#. Install Virtual Box 6.1.6 or later (see https://www.virtualbox.org) +#. Import the virtual machine image into Virtualbox (11.2 GB) File => **Import Appliance** + +Login credentials: + ++ username: `max` ++ password: `moritz` + +The default configuration of 4 cores and 4096 MB RAM can be adjusted in the VM settings. + +Mac computer with Apple Silicon +------------------------------- + +If you have an Apple silicon Mac computer with M1/M2 chips, you can use `UTM `_ to run the AiiDAlab Quantum ESPRESSO virtual machine. The steps are as follows: + +#. Download the compressed virtual machine image (5.9 GB) + + + URL: https://bit.ly/477mMeR (Google Drive: https://bit.ly/486rKtP) + + Filename: `quantum_mobile_23.10.0-qeapp.utm.zip` + + MD5 hash: `44ea9189d788737459c31bf330366926` + +#. Decompress the zip file. +#. Install UTM (see https://mac.getutm.app/) +#. Import the image into UTM by clicking on the **Create a New Virtual Machine** button and selecting the `quantum_mobile_23.10.0-qeapp.utm` file. + +Login credentials: + ++ username: `max` ++ password: `moritz` + +The default configuration of 4 cores and 4096 MB RAM can be adjusted in the VM settings. diff --git a/docs/source/installation/aiidalab/docker.rst b/docs/source/installation/aiidalab/docker.rst deleted file mode 100644 index 966e8421f..000000000 --- a/docs/source/installation/aiidalab/docker.rst +++ /dev/null @@ -1,35 +0,0 @@ -============== -Install Docker -============== - -.. note:: - - If you used another method to access AiiDAlab, you may proceed to :doc:`install the app `. - -AiiDAlab is available as a Docker container - a self-contained, pre-configured environment including all the necessary software to access the AiiDAlab platform. To run the container, you first need to install Docker on your local machine. If you have yet to do so, you may click `here `_ to follow the official Docker installation guide. - -.. important:: - - On Linux, you need `root` privileges to perform the `post-installation steps for Docker Engine `_. - -Once Docker is installed, you can launch the container in one of several ways depending on your operating system. This is discussed in the following sections. - -Linux -***** - -You have two options to launch the container: - -#. Execute the following from terminal: - - .. code-block:: console - - docker run -p 8888:8888 aiidalab/qe:edge - -#. Use the :doc:`aiidalab-launch ` (**recommended**) - -Windows/Mac -*********** - -.. note:: - - Instructions for using the `Docker Desktop `_ app coming soon diff --git a/docs/source/installation/aiidalab/launch.rst b/docs/source/installation/aiidalab/launch.rst deleted file mode 100644 index 95bcebbd0..000000000 --- a/docs/source/installation/aiidalab/launch.rst +++ /dev/null @@ -1,66 +0,0 @@ -=============== -AiiDAlab launch -=============== - -.. note:: - - If you ran the container directly using the :code:`docker` command, feel free to skip this page. - -.. important:: - - The following steps require a local installation of Docker. If you have yet to do so, you can follow the instructions :doc:`here `. - -`AiiDAlab launch`_ is a thin Docker wrapper which takes care of all the prerequisites to run the AiiDAlab Docker image. To use AiiDAlab launch, you will have to - -#. Install AiiDAlab launch with `pipx `_ (**recommended**): - - .. code-block:: console - - pipx install aiidalab-launch - - or directly with pip - - .. code-block:: console - - pip install aiidalab-launch - -#. Set up a new `QE` profile with - - .. code-block:: console - - aiidalab-launch profile add --image aiidalab/qe:edge QE - - At the prompt, enter `n` to skip editing the profile settings. - -#. Start AiiDAlab with - - .. code-block:: console - - aiidalab-launch start -p QE - -#. Follow the URL on the screen to open AiiDAlab in the browser - -.. tip:: - - For more detailed help, run - - .. code-block:: console - - aiidalab-launch --help - -Profile Management -^^^^^^^^^^^^^^^^^^ - -As shown above, you can manage multiple profiles in AiiDAlab launch, e.g., with different home directories or ports. For more information, run - -.. code-block:: console - - aiidalab-launch profiles --help - -You can inspect the status of all configured AiiDAlab profiles with - -.. code-block:: console - - aiidalab-launch status - -.. _`AiiDAlab launch`: https://github.com/aiidalab/aiidalab-launch diff --git a/docs/source/installation/aiidalab/vm.rst b/docs/source/installation/aiidalab/vm.rst deleted file mode 100644 index a3a543b38..000000000 --- a/docs/source/installation/aiidalab/vm.rst +++ /dev/null @@ -1,12 +0,0 @@ -===================== -Virtual Machine Image -===================== - -.. note:: - - If you used another method to access AiiDAlab, you may proceed to :doc:`install the app `. - -The AiiDAlab virtual image based on `Quantum Mobile `_ is available for the following architectures: - -#. M1/AMD64 - link coming soon -#. All other - link coming soon diff --git a/docs/source/installation/index.rst b/docs/source/installation/index.rst index 3df7803ce..6f45fed97 100644 --- a/docs/source/installation/index.rst +++ b/docs/source/installation/index.rst @@ -7,6 +7,6 @@ The following links will guide you through the necessary steps to access the Aii .. toctree:: :maxdepth: 1 - aiidalab/index + access_aiidalab/index install launch diff --git a/docs/source/tutorials/index.rst b/docs/source/tutorials/index.rst index 28d73c503..0c5e267bd 100644 --- a/docs/source/tutorials/index.rst +++ b/docs/source/tutorials/index.rst @@ -5,7 +5,7 @@ Tutorials .. important:: - Before you get started, make sure you've :doc:`accessed ` AiiDAlab and :doc:`installed ` the Quantum ESPRESSO app. + Before you get started, make sure you've :doc:`accessed ` AiiDAlab and :doc:`installed ` the Quantum ESPRESSO app. .. grid:: 1 1 1 2 :gutter: 3 diff --git a/job_list.ipynb b/job_list.ipynb new file mode 100644 index 000000000..cedc6d0c8 --- /dev/null +++ b/job_list.ipynb @@ -0,0 +1,66 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# AiiDAlab QuantumESPRESSO App" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%capture\n", + "from aiida import load_profile\n", + "\n", + "load_profile()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from aiidalab_qe.app.utils.search_jobs import QueryInterface\n", + "\n", + "job_history = QueryInterface()\n", + "job_history.setup_table()\n", + "job_history.filters_layout" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "job_history.table" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/miscellaneous/logos/EU_flag.png b/miscellaneous/logos/EU_flag.png new file mode 100644 index 000000000..b18a700b1 Binary files /dev/null and b/miscellaneous/logos/EU_flag.png differ diff --git a/miscellaneous/logos/bigmap_logo.png b/miscellaneous/logos/bigmap_logo.png new file mode 100644 index 000000000..91f555016 Binary files /dev/null and b/miscellaneous/logos/bigmap_logo.png differ diff --git a/plugin_list.ipynb b/plugin_list.ipynb new file mode 100644 index 000000000..05a5c8108 --- /dev/null +++ b/plugin_list.ipynb @@ -0,0 +1,314 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## AiiDAlab Quantum ESPRESSO Plugin manager\n", + "\n", + "This page lets you manage the plugins of the AiiDAlab Quantum ESPRESSO app. You can find below all plugins available in the official [AiiDAlab Quantum ESPRESSO plugin registry](https://github.com/aiidalab/aiidalab-qe/blob/main/plugins.yaml) (click [here](https://aiidalab-qe.readthedocs.io/development/plugin_registry.html) to learn how to register a new plugin, if you are developing one). You can install and uninstall plugins from this page.\n", + "\n", + "\n", + "### Available plugins\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "\n", + "import yaml\n", + "\n", + "# Get the current working directory\n", + "cwd = Path.cwd()\n", + "# Define a relative path\n", + "relative_path = \"plugins.yaml\"\n", + "# Resolve the relative path to an absolute path\n", + "yaml_file = cwd / relative_path\n", + "\n", + "# Load the YAML content\n", + "with yaml_file.open(\"r\") as file:\n", + " data = yaml.safe_load(file)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import subprocess\n", + "import sys\n", + "from threading import Thread\n", + "\n", + "import ipywidgets as ipw\n", + "from IPython.display import display\n", + "\n", + "\n", + "def is_package_installed(package_name):\n", + " import importlib\n", + "\n", + " package_name = package_name.replace(\"-\", \"_\")\n", + " try:\n", + " importlib.import_module(package_name)\n", + " except ImportError:\n", + " return False\n", + " else:\n", + " return True\n", + "\n", + "\n", + "def stream_output(process, output_widget):\n", + " \"\"\"Reads output from the process and forwards it to the output widget.\"\"\"\n", + " while True:\n", + " output = process.stdout.readline()\n", + " if process.poll() is not None and output == \"\":\n", + " break\n", + " if output:\n", + " output_widget.value += f\"\"\"
    {output}
    \"\"\"\n", + "\n", + "\n", + "def execute_command_with_output(\n", + " command, output_widget, install_btn, remove_btn, action=\"install\"\n", + "):\n", + " \"\"\"Execute a command and stream its output to the given output widget.\"\"\"\n", + " output_widget.value = \"\" # Clear the widget\n", + " process = subprocess.Popen(\n", + " command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, bufsize=1\n", + " )\n", + " # Create a thread to read the output stream and write it to the output widget\n", + " thread = Thread(target=stream_output, args=(process, output_widget))\n", + " thread.start()\n", + " thread.join() # Wait for the thread to finish\n", + "\n", + " if process.returncode == 0 and action == \"install\":\n", + " install_btn.disabled = True\n", + " remove_btn.disabled = False\n", + " return True\n", + " elif process.returncode == 0 and action == \"remove\":\n", + " install_btn.disabled = False\n", + " remove_btn.disabled = True\n", + " return True\n", + " else:\n", + " output_widget.value += \"\"\"
    Command failed.
    \"\"\"\n", + " return False\n", + "\n", + "\n", + "def install_package(\n", + " package_name,\n", + " pip,\n", + " github,\n", + " post_install,\n", + " output_container,\n", + " message_container,\n", + " install_btn,\n", + " remove_btn,\n", + " accordion,\n", + " index,\n", + "):\n", + " if pip:\n", + " command = [\"pip\", \"install\", pip, \"--user\"]\n", + " else:\n", + " command = [\"pip\", \"install\", \"git+\" + github, \"--user\"]\n", + " message_container.value = (\n", + " f\"\"\"
    Installing {package_name}...
    \"\"\"\n", + " )\n", + " result = execute_command_with_output(\n", + " command, output_container, install_btn, remove_btn\n", + " )\n", + " # Execute post install if defined in the plugin.yaml:\n", + " if post_install:\n", + " message_container.value += (\n", + " \"\"\"
    Post installation step...
    \"\"\"\n", + " )\n", + " command = [sys.executable, \"-m\", package_name.replace(\"-\", \"_\"), post_install]\n", + " # Execute the command\n", + " result = subprocess.run(command, capture_output=True, text=True, check=False)\n", + " # if the package was installed successfully\n", + " if result:\n", + " message_container.value += \"\"\"
    Initiating test to load the plugin...
    \"\"\"\n", + " # Test plugin functionality\n", + " command = [sys.executable, \"-m\", \"aiidalab_qe\", \"test-plugin\", package_name]\n", + " # Execute the command\n", + " result = subprocess.run(command, capture_output=True, text=True, check=False)\n", + " if result.returncode == 0:\n", + " # restart daemon\n", + " message_container.value = (\n", + " \"\"\"
    Loading plugin test passed.
    \"\"\"\n", + " )\n", + " message_container.value += (\n", + " \"\"\"
    Plugin installed successfully.
    \"\"\"\n", + " )\n", + " accordion.set_title(index, f\"{accordion.get_title(index)[:-2]} βœ…\")\n", + " command = [\"verdi\", \"daemon\", \"restart\"]\n", + " subprocess.run(command, capture_output=True, shell=False, check=False)\n", + " else:\n", + " # uninstall the package\n", + " message_container.value = f\"\"\"
    The plugin '{package_name}' was installed successfully but plugin functionality test failed: {result.stderr}.
    \"\"\"\n", + " message_container.value += \"\"\"
    This may be due to compatibility issues with the current AiiDAlab QEApp version. Please contact the plugin author for further assistance.
    \"\"\"\n", + " message_container.value += \"\"\"
    To prevent potential issues, the plugin will now be uninstalled.
    \"\"\"\n", + " remove_package(\n", + " package_name,\n", + " output_container,\n", + " message_container,\n", + " install_btn,\n", + " remove_btn,\n", + " accordion,\n", + " index,\n", + " )\n", + "\n", + "\n", + "def remove_package(\n", + " package_name,\n", + " output_container,\n", + " message_container,\n", + " install_btn,\n", + " remove_btn,\n", + " accordion,\n", + " index,\n", + "):\n", + " message_container.value += (\n", + " f\"\"\"
    Removing {package_name}...
    \"\"\"\n", + " )\n", + " package_name = package_name.replace(\"-\", \"_\")\n", + " command = [\"pip\", \"uninstall\", \"-y\", package_name]\n", + " result = execute_command_with_output(\n", + " command, output_container, install_btn, remove_btn, action=\"remove\"\n", + " )\n", + " if result:\n", + " message_container.value += f\"\"\"
    {package_name} removed successfully.
    \"\"\"\n", + " accordion.set_title(index, f\"{accordion.get_title(index)[:-2]} ☐\")\n", + " command = [\"verdi\", \"daemon\", \"restart\"]\n", + " subprocess.run(command, capture_output=True, shell=False, check=False)\n", + "\n", + "\n", + "def run_remove_button(\n", + " package_name,\n", + " output_container,\n", + " message_container,\n", + " install_btn,\n", + " remove_btn,\n", + " accordion,\n", + " index,\n", + "):\n", + " message_container.value = \"\"\n", + " remove_package(\n", + " package_name,\n", + " output_container,\n", + " message_container,\n", + " install_btn,\n", + " remove_btn,\n", + " accordion,\n", + " index,\n", + " )\n", + "\n", + "\n", + "accordion = ipw.Accordion()\n", + "\n", + "for i, (plugin_name, plugin_data) in enumerate(data.items()):\n", + " installed = is_package_installed(plugin_name)\n", + "\n", + " # Output container with customized styling\n", + " output_container = ipw.HTML(\n", + " value=\"\"\"\n", + "
    \n", + "
    \n", + " \"\"\",\n", + " layout=ipw.Layout(\n", + " max_height=\"250px\", overflow=\"auto\", border=\"2px solid #CCCCCC\"\n", + " ),\n", + " )\n", + " # Output container with customized styling\n", + " message_container = ipw.HTML(\n", + " value=\"\"\"\n", + "
    \n", + "
    \n", + " \"\"\",\n", + " layout=ipw.Layout(\n", + " max_height=\"250px\", overflow=\"auto\", border=\"2px solid #CCCCCC\"\n", + " ),\n", + " )\n", + "\n", + " details = (\n", + " f\"Author: {plugin_data.get('author', 'N/A')}
    \"\n", + " f\"Description: {plugin_data.get('description', 'No description available')}
    \"\n", + " )\n", + " if \"documentation\" in plugin_data:\n", + " details += f\"Documentation: Visit
    \"\n", + " if \"github\" in plugin_data:\n", + " details += (\n", + " f\"Github: Visit\"\n", + " )\n", + "\n", + " install_btn = ipw.Button(\n", + " description=\"Install\", button_style=\"success\", disabled=installed\n", + " )\n", + " remove_btn = ipw.Button(\n", + " description=\"Remove\", button_style=\"danger\", disabled=not installed\n", + " )\n", + "\n", + " install_btn.on_click(\n", + " lambda _btn,\n", + " pn=plugin_name,\n", + " pip=plugin_data.get(\"pip\", None), # noqa: B008\n", + " github=plugin_data.get(\"github\", \"\"), # noqa: B008\n", + " post=plugin_data.get(\"post_install\", None), # noqa: B008\n", + " oc=output_container,\n", + " mc=message_container,\n", + " ib=install_btn,\n", + " rb=remove_btn,\n", + " ac=accordion,\n", + " index=i: install_package(pn, pip, github, post, oc, mc, ib, rb, ac, index)\n", + " )\n", + " remove_btn.on_click(\n", + " lambda _btn,\n", + " pn=plugin_name,\n", + " oc=output_container,\n", + " mc=message_container,\n", + " ib=install_btn,\n", + " rb=remove_btn,\n", + " ac=accordion,\n", + " index=i: run_remove_button(pn, oc, mc, ib, rb, ac, index)\n", + " )\n", + "\n", + " box = ipw.VBox(\n", + " [\n", + " ipw.HTML(details),\n", + " ipw.HBox([install_btn, remove_btn]),\n", + " message_container,\n", + " output_container,\n", + " ]\n", + " )\n", + "\n", + " title_with_icon = f\"{plugin_data.get('title')} {'βœ…' if installed else '☐'}\"\n", + " accordion.set_title(i, title_with_icon)\n", + " accordion.children = [*accordion.children, box]\n", + "\n", + "display(accordion)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/plugins.yaml b/plugins.yaml new file mode 100644 index 000000000..be0f7dd68 --- /dev/null +++ b/plugins.yaml @@ -0,0 +1,21 @@ +--- +aiida-bader: + title: Bader charge analysis (aiida-bader) + description: Perform Bader charge analysis of the electronic charge density + author: Xing Wang + github: https://github.com/superstar54/aiida-bader + documentation: https://aiida-bader.readthedocs.io/ + pip: aiida-bader + +aiidalab-qe-vibroscopy: + title: Phonons and IR/Raman (aiidalab-qe-vibroscopy) + description: Plugin to compute phonons, IR/Raman spectra, Inelastic Neutron Scattering of materials via finite displacement and finite field approach. + author: Miki Bonacci, Andres Ortega Guerrero, Lorenzo Bastonero and Nicola Marzari + pip: aiidalab-qe-vibroscopy~=1.1.3 + post_install: setup-phonopy + +aiidalab-qe-muon: + title: Muon spectroscopy (aiidalab-qe-muon) + description: Compute properties to assist Β΅SR experiments, such as muon stopping sites and related properties (local fields and polarization signals). + author: Miki Bonacci, Ifeanyi J. Onuorah, Pietro Bonfa', Giovanni Pizzi and Roberto de Renzi + github: https://github.com/mikibonacci/aiidalab-qe-muon diff --git a/post_install b/post_install index 5a251b0e8..d49042185 100755 --- a/post_install +++ b/post_install @@ -1,10 +1,15 @@ #!/bin/bash +# After installing or updating a plugin package, one needs to restart the daemon with the --reset flag for changes to take effect. +# For the moment aiidalab only restart the daemon without the --reset flag, so we need to do it manually. +verdi daemon restart --reset + # This script is executed after the app installation completes # triggered by the post install hook implemented in https://github.com/aiidalab/aiidalab/pull/295 +# TODO: this script is not yet properly tested. The integration tests are run from the qeapp docker image, this script is executed when the app is installed by `aiidalab install`. Therefore, we need to bring original di test (which removed in https://github.com/aiidalab/aiidalab-qe/pull/449/files#diff-83993fe9ad162677c85ae244400df2469b6d666c2a28d7b8d179785e87beb7f3) back. echo "Starting installation of QE..." python -m aiidalab_qe install-qe & disown echo "Starting installation of pseudo-potentials..." -python -m aiidalab_qe install-sssp & disown +python -m aiidalab_qe install-pseudos & disown diff --git a/pyproject.toml b/pyproject.toml index 1eb0ca77d..616c5ff9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,3 +4,50 @@ requires = [ "wheel" ] build-backend = "setuptools.build_meta" + +[tool.pytest.ini_options] +addopts = '--strict-config --strict-markers --durations=30 --durations-min=1 -ra' +filterwarnings = [ + # This is needed since SQLAlchemy 2.0, see + # https://github.com/aiidalab/aiidalab-widgets-base/issues/605 + 'ignore:Object of type.*not in session,.*operation along.*will not proceed:', + 'ignore:Creating AiiDA configuration:UserWarning:', + 'ignore:metadata.*traitlets.traitlets.Unicode object:DeprecationWarning:', + # For some reason we get this error, see + # https://github.com/aiidalab/aiidalab-widgets-base/issues/551 + 'ignore:Exception ignored in:pytest.PytestUnraisableExceptionWarning:_pytest', + # This popped up in spglib 2.5. Since we still try to support spglib v1, + "ignore:dict interface.*is deprecated.Use attribute interface:DeprecationWarning:", +] + +[tool.ruff] +line-length = 88 +show-fixes = true +output-format = "full" +target-version = "py39" + +[tool.ruff.lint] +ignore = ["E501", "E402", "TRY003", "RUF012", "N806"] +select = [ + "A", # flake8-builtins + "ARG", # flake8-unused-arguments + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "N", # pep8-naming + "PLE", # pylint error rules + "PLW", # pylint warning rules + "PLC", # pylint convention rules + "RUF", # ruff-specific rules + "TRY", # Tryceratops + "UP" # pyupgrade +] + +[tool.ruff.lint.isort] +known-first-party = ["aiida", "aiidalab_widgets_base", "aiida_quantumespresso"] + +[tool.ruff.lint.per-file-ignores] +"tests/*" = ["ARG001"] +"tests_integration/*" = ["ARG001"] diff --git a/qe.ipynb b/qe.ipynb index cca80a21d..0ec99c175 100644 --- a/qe.ipynb +++ b/qe.ipynb @@ -13,6 +13,28 @@ "document.title='AiiDAlab QE app'" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from aiida import load_profile\n", + "\n", + "load_profile();" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from aiidalab_widgets_base.utils.loaders import load_css\n", + "\n", + "load_css(css_path=\"src/aiidalab_qe/app/static/styles\")" + ] + }, { "cell_type": "code", "execution_count": null, @@ -34,9 +56,15 @@ "metadata": {}, "outputs": [], "source": [ - "from aiida import load_profile\n", + "from IPython.display import display\n", "\n", - "load_profile();" + "from aiidalab_qe.app.wrapper import AppWrapperContoller, AppWrapperModel, AppWrapperView\n", + "\n", + "model = AppWrapperModel()\n", + "view = AppWrapperView()\n", + "controller = AppWrapperContoller(model, view)\n", + "\n", + "display(view)" ] }, { @@ -45,39 +73,39 @@ "metadata": {}, "outputs": [], "source": [ - "import ipywidgets as ipw\n", + "import urllib.parse as urlparse\n", + "\n", + "from aiidalab_qe.app.main import App\n", "from aiidalab_widgets_base.bug_report import (\n", " install_create_github_issue_exception_handler,\n", ")\n", - "from importlib_resources import files\n", - "from IPython.display import display\n", - "from jinja2 import Environment\n", - "\n", - "from aiidalab_qe.app import App, static\n", - "from aiidalab_qe.version import __version__\n", - "\n", - "env = Environment()\n", - "\n", - "template = files(static).joinpath(\"welcome.jinja\").read_text()\n", - "style = files(static).joinpath(\"style.css\").read_text()\n", - "welcome_message = ipw.HTML(env.from_string(template).render(style=style))\n", - "footer = ipw.HTML(\n", - " f'

    Copyright (c) 2023 AiiDAlab team (EPFL) Version: {__version__}

    '\n", - ")\n", "\n", - "app_with_work_chain_selector = App(qe_auto_setup=True)\n", - "\n", - "output = ipw.Output()\n", "install_create_github_issue_exception_handler(\n", - " output,\n", + " view.output,\n", " url=\"https://github.com/aiidalab/aiidalab-qe/issues/new\",\n", " labels=(\"bug\", \"automated-report\"),\n", ")\n", "\n", - "with output:\n", - " display(welcome_message, app_with_work_chain_selector, footer)\n", + "url = urlparse.urlsplit(jupyter_notebook_url) # noqa F821\n", + "query = urlparse.parse_qs(url.query)\n", + "\n", + "app = App(qe_auto_setup=True)\n", + "# if a pk is provided in the query string, set it as the process of the app\n", + "if \"pk\" in query:\n", + " pk = query[\"pk\"][0]\n", + " app.process = pk\n", "\n", - "display(output)" + "view.main.children = [app]\n", + "view.app = app" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "controller.enable_toggles()" ] } ], @@ -97,7 +125,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.9.13" }, "vscode": { "interpreter": { diff --git a/requirements-docker.txt b/requirements-docker.txt new file mode 100644 index 000000000..4cfdbb2c9 --- /dev/null +++ b/requirements-docker.txt @@ -0,0 +1,6 @@ +docker +requests +pytest~=8.2.0 +pytest-docker~=3.0 +pytest-selenium~=4.1 +selenium==4.20.0 diff --git a/setup.cfg b/setup.cfg index ef2da3503..e3d9a86ed 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = aiidalab_qe -version = 23.11.0rc0 +version = 24.10.0a3 description = Package for the AiiDAlab QE app long_description = file: README.md long_description_content_type = text/markdown @@ -26,14 +26,13 @@ packages = find: install_requires = aiida-core~=2.2,<3 Jinja2~=3.0 - aiida-quantumespresso~=4.3.0 - aiidalab-widgets-base==2.1.0rc0 + aiida-quantumespresso~=4.6 + aiidalab-widgets-base[optimade]==2.3.0a2 aiida-pseudo~=1.4 filelock~=3.8 importlib-resources~=5.2 - widget-bandsplot~=0.5.1 - pydantic~=1.10,>=1.10.8 -python_requires = >=3.8 + aiida-wannier90-workflows==2.3.0 +python_requires = >=3.9 [options.packages.find] where = src @@ -41,22 +40,29 @@ where = src [options.extras_require] dev = bumpver~=2023.1124 - pre-commit~=2.20 - pytest~=6.2 + pre-commit~=3.2 + pytest~=7.4 pytest-regressions~=2.2 pgtest==1.3.1 - pytest-cov~=4.0 + pytest-cov~=5.0 [options.package_data] aiidalab_qe.app.parameters = qeapp.yaml -aiidalab_qe.app.static = * +aiidalab_qe.app.static.styles = *.css +aiidalab_qe.app.static.templates = *.jinja aiidalab_qe.app.structure.examples = * +aiidalab_qe.plugins.xas = pseudo_toc.yaml [options.entry_points] aiidalab_qe.properties = bands = aiidalab_qe.plugins.bands:bands pdos = aiidalab_qe.plugins.pdos:pdos + xps = aiidalab_qe.plugins.xps:xps electronic_structure = aiidalab_qe.plugins.electronic_structure:electronic_structure + xas = aiidalab_qe.plugins.xas:xas + +aiida.workflows = + aiidalab_qe.bands_workchain = aiidalab_qe.plugins.bands.bands_workchain:BandsWorkChain [aiidalab] title = Quantum ESPRESSO @@ -64,19 +70,14 @@ description = Perform Quantum ESPRESSO calculations categories = quantum -[flake8] -ignore = - E501 - W503 - E203 - [bumpver] -current_version = "v23.11.0rc0" +current_version = "v24.10.0a3" version_pattern = "v0Y.0M.PATCH[PYTAGNUM]" commit_message = "Bump version {old_version} -> {new_version}" commit = True tag = True push = True +pre_commit_hook = ./bumpver_pre_commit.sh [bumpver:file_patterns] src/aiidalab_qe/version.py = @@ -84,3 +85,5 @@ src/aiidalab_qe/version.py = setup.cfg = current_version = "{version}" version = {pep440_version} +docs/source/conf.py = + version = "{version}" diff --git a/src/aiidalab_qe/__init__.py b/src/aiidalab_qe/__init__.py index 7ed200493..1331bdcde 100644 --- a/src/aiidalab_qe/__init__.py +++ b/src/aiidalab_qe/__init__.py @@ -1,6 +1,7 @@ """Package for the AiiDAlab QE app version. Plugin (workflow) version will be read from the plugin's __init__.py file. """ + from aiidalab_qe.version import __version__ __all__ = [ diff --git a/src/aiidalab_qe/__main__.py b/src/aiidalab_qe/__main__.py index 703016968..2a81f56d0 100644 --- a/src/aiidalab_qe/__main__.py +++ b/src/aiidalab_qe/__main__.py @@ -1,13 +1,9 @@ -"""For running the app from the command line used for post_install script. -""" +"""For running the app from the command line used for post_install script.""" +import sys from pathlib import Path import click -from aiida import load_profile - -from aiidalab_qe.common.setup_codes import codes_are_setup -from aiidalab_qe.common.setup_codes import install as install_qe_codes # The default profile name of AiiDAlab container. _DEFAULT_PROFILE = "default" @@ -20,16 +16,20 @@ def cli(): @cli.command() @click.option("-f", "--force", is_flag=True) +@click.option("--computer") @click.option("-p", "--profile", default=_DEFAULT_PROFILE) -def install_qe(force, profile): +def install_qe(force, profile, computer): + from aiida import load_profile + from aiidalab_qe.setup.codes import codes_are_setup, install_and_setup + load_profile(profile) try: - for msg in install_qe_codes(force=force): + for msg in install_and_setup(computer=computer, force=force): click.echo(msg) - assert codes_are_setup() + assert codes_are_setup(computer=computer) click.secho("Codes are setup!", fg="green") except Exception as error: - raise click.ClickException(f"Failed to set up QE failed: {error}") + raise click.ClickException(f"Failed to set up QE: {error}") from error @cli.command() @@ -45,7 +45,8 @@ def install_pseudos(profile, source): """Install pseudopotentials from a local folder if source is specified, otherwise download from remote repositories. """ - from aiidalab_qe.common.setup_pseudos import install + from aiida import load_profile + from aiidalab_qe.setup.pseudos import install load_profile(profile) @@ -54,7 +55,9 @@ def install_pseudos(profile, source): click.echo(msg) click.secho("Pseudopotentials are installed!", fg="green") except Exception as error: - raise click.ClickException(f"Failed to set up pseudo potentials: {error}") + raise click.ClickException( + f"Failed to set up pseudo potentials: {error}" + ) from error @cli.command() @@ -66,7 +69,7 @@ def install_pseudos(profile, source): type=click.Path(exists=True, path_type=Path, resolve_path=True), ) def download_pseudos(dest): - from aiidalab_qe.common.setup_pseudos import EXPECTED_PSEUDOS, _install_pseudos + from aiidalab_qe.setup.pseudos import EXPECTED_PSEUDOS, _install_pseudos try: for progress in _install_pseudos( @@ -76,7 +79,33 @@ def download_pseudos(dest): click.secho("Pseudopotentials are downloaded!", fg="green") except Exception as error: - raise click.ClickException(f"Failed to download pseudo potentials: {error}") + raise click.ClickException( + f"Failed to download pseudo potentials: {error}" + ) from error + + +@cli.command() +@click.argument( + "plugin_name", + default="aiidalab_qe", +) +@click.option("-p", "--profile", default=_DEFAULT_PROFILE) +def test_plugin(plugin_name, profile): + from aiida import load_profile + from aiidalab_qe.app.utils import test_plugin_functionality + + load_profile(profile) + + try: + success, message = test_plugin_functionality(plugin_name) + if success: + click.secho("Plugin is loaded successfully!", fg="green") + else: + click.secho(f"Failed to load plugin: {message}", fg="red", err=True) + sys.exit(1) # Exit with status 1 to indicate failure + except Exception as error: + click.secho(f"Failed to load plugin: {error}", fg="red", err=True) + sys.exit(1) # Exit with status 1 to indicate failure if __name__ == "__main__": diff --git a/src/aiidalab_qe/app/__init__.py b/src/aiidalab_qe/app/__init__.py index ea2de96ed..8138fc07a 100644 --- a/src/aiidalab_qe/app/__init__.py +++ b/src/aiidalab_qe/app/__init__.py @@ -1,7 +1 @@ """Package for the AiiDAlab QE app.""" - -from .main import App - -__all__ = [ - "App", -] diff --git a/src/aiidalab_qe/app/configuration/__init__.py b/src/aiidalab_qe/app/configuration/__init__.py index 67c8981a8..39f5f07ae 100644 --- a/src/aiidalab_qe/app/configuration/__init__.py +++ b/src/aiidalab_qe/app/configuration/__init__.py @@ -1,16 +1,16 @@ -# -*- coding: utf-8 -*- """Widgets for the submission of bands work chains. Authors: AiiDAlab team """ + from __future__ import annotations import ipywidgets as ipw import traitlets as tl -from aiida import orm -from aiidalab_widgets_base import WizardAppWidgetStep +from aiida import orm from aiidalab_qe.app.utils import get_entry_items +from aiidalab_widgets_base import WizardAppWidgetStep from .advanced import AdvancedSettings from .workflow import WorkChainSettings @@ -45,6 +45,11 @@ def __init__(self, **kwargs): (self.advanced_settings, "input_structure"), ) # + ipw.dlink( + (self, "input_structure"), + (self.workchain_settings, "input_structure"), + ) + # self.built_in_settings = [ self.workchain_settings, self.advanced_settings, @@ -110,7 +115,7 @@ def __init__(self, **kwargs): ) @tl.observe("previous_step_state") - def _observe_previous_step_state(self, change): + def _observe_previous_step_state(self, _change): self._update_state() def get_configuration_parameters(self): diff --git a/src/aiidalab_qe/app/configuration/advanced.py b/src/aiidalab_qe/app/configuration/advanced.py index ecdee89d4..5df0e76cd 100644 --- a/src/aiidalab_qe/app/configuration/advanced.py +++ b/src/aiidalab_qe/app/configuration/advanced.py @@ -1,22 +1,25 @@ -# -*- coding: utf-8 -*- """Widgets for the submission of bands work chains. Authors: AiiDAlab team """ + import os import ipywidgets as ipw +import numpy as np import traitlets as tl +from IPython.display import clear_output, display + from aiida import orm from aiida_quantumespresso.calculations.functions.create_kpoints_from_distance import ( create_kpoints_from_distance, ) +from aiida_quantumespresso.data.hubbard_structure import HubbardStructureData from aiida_quantumespresso.workflows.pw.base import PwBaseWorkChain -from IPython.display import clear_output, display - from aiidalab_qe.app.parameters import DEFAULT_PARAMETERS from aiidalab_qe.common.panel import Panel -from aiidalab_qe.common.setup_pseudos import PseudoFamily +from aiidalab_qe.common.widgets import HubbardWidget +from aiidalab_qe.setup.pseudos import PseudoFamily from .pseudos import PseudoFamilySelector, PseudoSetter @@ -38,6 +41,12 @@ class AdvancedSettings(Panel): Tick the box to override the default, smaller is more accurate and costly.
""" ) + dftd3_version = { + "dft-d3": 3, + "dft-d3bj": 4, + "dft-d3m": 5, + "dft-d3mbj": 6, + } # protocol interface protocol = tl.Unicode(allow_none=True) input_structure = tl.Instance(orm.StructureData, allow_none=True) @@ -98,13 +107,16 @@ def __init__(self, default_protocol=None, **kwargs): style={"description_width": "initial"}, ) self.mesh_grid = ipw.HTML() + self.create_kpoints_distance_link() + self.kpoints_distance.observe(self._callback_value_set, "value") + + # Hubbard setting widget + self.hubbard_widget = HubbardWidget() ipw.dlink( (self.override, "value"), - (self.kpoints_distance, "disabled"), + (self.hubbard_widget.activate_hubbard, "disabled"), lambda override: not override, ) - self.kpoints_distance.observe(self._callback_value_set, "value") - # Total change setting widget self.total_charge = ipw.BoundedFloatText( min=-3, @@ -121,12 +133,98 @@ def __init__(self, default_protocol=None, **kwargs): ) self.total_charge.observe(self._callback_value_set, "value") + # Van der Waals setting widget + self.van_der_waals = ipw.Dropdown( + options=[ + ("None", "none"), + ("Grimme-D3", "dft-d3"), + ("Grimme-D3BJ", "dft-d3bj"), + ("Grimme-D3M", "dft-d3m"), + ("Grimme-D3MBJ", "dft-d3mbj"), + ("Tkatchenko-Scheffler", "ts-vdw"), + ], + description="Van der Waals correction:", + value="none", + disabled=False, + style={"description_width": "initial"}, + ) + + ipw.dlink( + (self.override, "value"), + (self.van_der_waals, "disabled"), + lambda override: not override, + ) + self.magnetization = MagnetizationSettings() ipw.dlink( (self.override, "value"), (self.magnetization, "disabled"), lambda override: not override, ) + + # Convergence Threshold settings + self.scf_conv_thr = ipw.BoundedFloatText( + min=1e-15, + max=1.0, + step=1e-10, + description="SCF conv.:", + disabled=False, + style={"description_width": "initial"}, + ) + self.scf_conv_thr.observe(self._callback_value_set, "value") + ipw.dlink( + (self.override, "value"), + (self.scf_conv_thr, "disabled"), + lambda override: not override, + ) + self.forc_conv_thr = ipw.BoundedFloatText( + min=1e-15, + max=1.0, + step=0.0001, + description="Force conv.:", + disabled=False, + style={"description_width": "initial"}, + ) + self.forc_conv_thr.observe(self._callback_value_set, "value") + ipw.dlink( + (self.override, "value"), + (self.forc_conv_thr, "disabled"), + lambda override: not override, + ) + self.etot_conv_thr = ipw.BoundedFloatText( + min=1e-15, + max=1.0, + step=0.00001, + description="Energy conv.:", + disabled=False, + style={"description_width": "initial"}, + ) + self.etot_conv_thr.observe(self._callback_value_set, "value") + ipw.dlink( + (self.override, "value"), + (self.etot_conv_thr, "disabled"), + lambda override: not override, + ) + + # Max electron SCF steps widget + self._create_electron_maxstep_widgets() + + # Spin-Orbit calculation + self.spin_orbit = ipw.ToggleButtons( + options=[ + ("Off", "wo_soc"), + ("On", "soc"), + ], + description="Spin-Orbit:", + value="wo_soc", + style={"description_width": "initial"}, + ) + ipw.dlink( + (self.override, "value"), + (self.spin_orbit, "disabled"), + lambda override: not override, + ) + self.pseudo_family_selector = PseudoFamilySelector() self.pseudo_setter = PseudoSetter() ipw.dlink( @@ -134,6 +232,12 @@ def __init__(self, default_protocol=None, **kwargs): (self.pseudo_setter, "pseudo_family"), ) self.kpoints_distance.observe(self._display_mesh, "value") + + # Link with PseudoWidget + ipw.dlink( + (self.spin_orbit, "value"), + (self.pseudo_family_selector, "spin_orbit"), + ) self.children = [ self.title, ipw.HBox( @@ -146,13 +250,26 @@ def __init__(self, default_protocol=None, **kwargs): ), # total charge setting widget self.total_charge, + # van der waals setting widget + self.van_der_waals, # magnetization setting widget self.magnetization, + # convergence threshold setting widget + ipw.HTML("Convergence Thresholds:"), + ipw.HBox( + [self.forc_conv_thr, self.etot_conv_thr, self.scf_conv_thr], + layout=ipw.Layout(height="50px", justify_content="flex-start"), + ), + # Max electron SCF steps widget + self.electron_maxstep, # smearing setting widget self.smearing, # Kpoints setting widget self.kpoints_description, ipw.HBox([self.kpoints_distance, self.mesh_grid]), + self.hubbard_widget, + # Spin-Orbit calculation + self.spin_orbit, self.pseudo_family_selector, self.pseudo_setter, ] @@ -164,6 +281,51 @@ def __init__(self, default_protocol=None, **kwargs): # Default settings to trigger the callback self.reset() + def create_kpoints_distance_link(self): + """Create the dlink for override and kpoints_distance.""" + self.kpoints_distance_link = ipw.dlink( + (self.override, "value"), + (self.kpoints_distance, "disabled"), + lambda override: not override, + ) + + def remove_kpoints_distance_link(self): + """Remove the kpoints_distance_link.""" + if hasattr(self, "kpoints_distance_link"): + self.kpoints_distance_link.unlink() + del self.kpoints_distance_link + + def _create_electron_maxstep_widgets(self): + self.electron_maxstep = ipw.BoundedIntText( + min=20, + max=1000, + step=1, + value=80, + description="Max. electron steps:", + style={"description_width": "initial"}, + ) + ipw.dlink( + (self.override, "value"), + (self.electron_maxstep, "disabled"), + lambda override: not override, + ) + self.electron_maxstep.observe(self._callback_value_set, "value") + + def set_value_and_step(self, attribute, value): + """ + Sets the value and adjusts the step based on the order of magnitude of the value. + This is used for the thresolds values (etot_conv_thr, scf_conv_thr, forc_conv_thr). + Parameters: + attribute: The attribute whose values are to be set (e.g., self.etot_conv_thr). + value: The numerical value to set. + """ + attribute.value = value + if value != 0: + order_of_magnitude = np.floor(np.log10(abs(value))) + attribute.step = 10 ** (order_of_magnitude - 1) + else: + attribute.step = 0.1 # Default step if value is zero + def _override_changed(self, change): """Callback function to set the override value""" if change["new"] is False: @@ -175,10 +337,32 @@ def _update_input_structure(self, change): if self.input_structure is not None: self.magnetization._update_widget(change) self.pseudo_setter.structure = change["new"] + self._update_settings_from_protocol(self.protocol) self._display_mesh() + self.hubbard_widget.update_widgets(change["new"]) + if isinstance(self.input_structure, HubbardStructureData): + self.override.value = True + if self.input_structure.pbc == (False, False, False): + self.kpoints_distance.value = 100.0 + self.kpoints_distance.disabled = True + if hasattr(self, "kpoints_distance_link"): + self.remove_kpoints_distance_link() + else: + # self.kpoints_distance.disabled = False + if not hasattr(self, "kpoints_distance_link"): + self.create_kpoints_distance_link() else: self.magnetization.input_structure = None self.pseudo_setter.structure = None + self.hubbard_widget.update_widgets(None) + self.kpoints_distance.disabled = False + if not hasattr(self, "kpoints_distance_link"): + self.create_kpoints_distance_link() + + @tl.observe("electronic_type") + def _electronic_type_changed(self, change): + """Input electronic_type changed, update the widget values.""" + self.magnetization.electronic_type = change["new"] @tl.observe("protocol") def _protocol_changed(self, _): @@ -194,7 +378,31 @@ def _update_settings_from_protocol(self, protocol): parameters = PwBaseWorkChain.get_protocol_inputs(protocol) - self.kpoints_distance.value = parameters["kpoints_distance"] + if self.input_structure: + if self.input_structure.pbc == (False, False, False): + self.kpoints_distance.value = 100.0 + self.kpoints_distance.disabled = True + else: + self.kpoints_distance.value = parameters["kpoints_distance"] + else: + self.kpoints_distance.value = parameters["kpoints_distance"] + + num_atoms = len(self.input_structure.sites) if self.input_structure else 1 + + etot_value = num_atoms * parameters["meta_parameters"]["etot_conv_thr_per_atom"] + self.set_value_and_step(self.etot_conv_thr, etot_value) + + # Set SCF conversion threshold + scf_value = num_atoms * parameters["meta_parameters"]["conv_thr_per_atom"] + self.set_value_and_step(self.scf_conv_thr, scf_value) + + # Set force conversion threshold + forc_value = parameters["pw"]["parameters"]["CONTROL"]["forc_conv_thr"] + self.set_value_and_step(self.forc_conv_thr, forc_value) + + # The pseudo_family read from the protocol (aiida-quantumespresso plugin settings) + # we override it with the value from the pseudo_family_selector widget + parameters["pseudo_family"] = self.pseudo_family_selector.value def _callback_value_set(self, _=None): """Callback function to set the parameters""" @@ -223,9 +431,25 @@ def get_panel_value(self): "pw": { "parameters": { "SYSTEM": {}, - }, + "CONTROL": {}, + "ELECTRONS": {}, + } }, + "clean_workdir": self.clean_workdir.value, + "pseudo_family": self.pseudo_family_selector.value, + "kpoints_distance": self.value.get("kpoints_distance"), } + + # Set total charge + parameters["pw"]["parameters"]["SYSTEM"]["tot_charge"] = self.total_charge.value + + if self.hubbard_widget.activate_hubbard.value: + parameters["hubbard_parameters"] = self.hubbard_widget.hubbard_dict + if self.hubbard_widget.eigenvalues_label.value: + parameters["pw"]["parameters"]["SYSTEM"].update( + self.hubbard_widget.eigenvalues_dict + ) + # add clean_workdir to the parameters parameters["clean_workdir"] = self.clean_workdir.value @@ -233,32 +457,90 @@ def get_panel_value(self): parameters["pseudo_family"] = self.pseudo_family_selector.value if self.pseudo_setter.pseudos: parameters["pw"]["pseudos"] = self.pseudo_setter.pseudos - parameters["pw"]["parameters"]["SYSTEM"][ - "ecutwfc" - ] = self.pseudo_setter.ecutwfc - parameters["pw"]["parameters"]["SYSTEM"][ - "ecutrho" - ] = self.pseudo_setter.ecutrho - # if override is not ticked, use the default value - parameters["pw"]["parameters"]["SYSTEM"]["tot_charge"] = self.total_charge.value + parameters["pw"]["parameters"]["SYSTEM"]["ecutwfc"] = ( + self.pseudo_setter.ecutwfc + ) + parameters["pw"]["parameters"]["SYSTEM"]["ecutrho"] = ( + self.pseudo_setter.ecutrho + ) + + if self.van_der_waals.value in ["none", "ts-vdw"]: + parameters["pw"]["parameters"]["SYSTEM"]["vdw_corr"] = ( + self.van_der_waals.value + ) + else: + parameters["pw"]["parameters"]["SYSTEM"]["vdw_corr"] = "dft-d3" + parameters["pw"]["parameters"]["SYSTEM"]["dftd3_version"] = ( + self.dftd3_version[self.van_der_waals.value] + ) + # there are two choose, use link or parent if self.spin_type == "collinear": - parameters[ - "initial_magnetic_moments" - ] = self.magnetization.get_magnetization() + parameters["initial_magnetic_moments"] = ( + self.magnetization.get_magnetization() + ) parameters["kpoints_distance"] = self.value.get("kpoints_distance") if self.electronic_type == "metal": # smearing type setting - parameters["pw"]["parameters"]["SYSTEM"][ - "smearing" - ] = self.smearing.smearing_value + parameters["pw"]["parameters"]["SYSTEM"]["smearing"] = ( + self.smearing.smearing_value + ) # smearing degauss setting - parameters["pw"]["parameters"]["SYSTEM"][ - "degauss" - ] = self.smearing.degauss_value + parameters["pw"]["parameters"]["SYSTEM"]["degauss"] = ( + self.smearing.degauss_value + ) + + # Set tot_magnetization for collinear simulations. + if self.spin_type == "collinear": + # Conditions for metallic systems. Select the magnetization type and set the value if override is True + if self.electronic_type == "metal" and self.override.value is True: + self.set_metallic_magnetization(parameters) + # Conditions for insulator systems. Default value is 0.0 + elif self.electronic_type == "insulator": + self.set_insulator_magnetization(parameters) + + # convergence threshold setting + parameters["pw"]["parameters"]["CONTROL"]["forc_conv_thr"] = ( + self.forc_conv_thr.value + ) + parameters["pw"]["parameters"]["ELECTRONS"]["conv_thr"] = ( + self.scf_conv_thr.value + ) + parameters["pw"]["parameters"]["CONTROL"]["etot_conv_thr"] = ( + self.etot_conv_thr.value + ) + + # Max electron SCF steps + parameters["pw"]["parameters"]["ELECTRONS"]["electron_maxstep"] = ( + self.electron_maxstep.value + ) + + # Spin-Orbit calculation + if self.spin_orbit.value == "soc": + parameters["pw"]["parameters"]["SYSTEM"]["lspinorb"] = True + parameters["pw"]["parameters"]["SYSTEM"]["noncolin"] = True + parameters["pw"]["parameters"]["SYSTEM"]["nspin"] = 4 return parameters + def set_insulator_magnetization(self, parameters): + """Set the parameters for collinear insulator calculation. Total magnetization.""" + parameters["pw"]["parameters"]["SYSTEM"]["tot_magnetization"] = ( + self.magnetization.tot_magnetization.value + ) + + def set_metallic_magnetization(self, parameters): + """Set the parameters for magnetization calculation in metals""" + magnetization_type = self.magnetization.magnetization_type.value + if magnetization_type == "tot_magnetization": + parameters["pw"]["parameters"]["SYSTEM"]["tot_magnetization"] = ( + self.magnetization.tot_magnetization.value + ) + else: + parameters["initial_magnetic_moments"] = ( + self.magnetization.get_magnetization() + ) + def set_panel_value(self, parameters): """Set the panel value from the given parameters.""" @@ -286,11 +568,72 @@ def set_panel_value(self, parameters): self.total_charge.value = parameters["pw"]["parameters"]["SYSTEM"].get( "tot_charge", 0 ) + if "lspinorb" in system: + self.spin_orbit.value = "soc" + else: + self.spin_orbit.value = "wo_soc" + # van der waals correction + self.van_der_waals.value = self.dftd3_version.get( + system.get("dftd3_version"), + parameters["pw"]["parameters"]["SYSTEM"].get("vdw_corr", "none"), + ) + + # convergence threshold setting + self.forc_conv_thr.value = ( + parameters.get("pw", {}) + .get("parameters", {}) + .get("CONTROL", {}) + .get("forc_conv_thr", 0.0) + ) + self.etot_conv_thr.value = ( + parameters.get("pw", {}) + .get("parameters", {}) + .get("CONTROL", {}) + .get("etot_conv_thr", 0.0) + ) + self.scf_conv_thr.value = ( + parameters.get("pw", {}) + .get("parameters", {}) + .get("ELECTRONS", {}) + .get("conv_thr", 0.0) + ) + + # Max electron SCF steps + self.electron_maxstep.value = ( + parameters.get("pw", {}) + .get("parameters", {}) + .get("ELECTRONS", {}) + .get("electron_maxstep", 80) + ) + + # Logic to set the magnetization if parameters.get("initial_magnetic_moments"): self.magnetization._set_magnetization_values( parameters.get("initial_magnetic_moments") ) + if "tot_magnetization" in parameters["pw"]["parameters"]["SYSTEM"]: + self.magnetization.magnetization_type.value = "tot_magnetization" + self.magnetization._set_tot_magnetization( + parameters["pw"]["parameters"]["SYSTEM"]["tot_magnetization"] + ) + + if parameters.get("hubbard_parameters"): + self.hubbard_widget.activate_hubbard.value = True + self.hubbard_widget.set_hubbard_widget( + parameters["hubbard_parameters"]["hubbard_u"] + ) + starting_ns_eigenvalue = ( + parameters.get("pw", {}) + .get("parameters", {}) + .get("SYSTEM", {}) + .get("starting_ns_eigenvalue") + ) + + if starting_ns_eigenvalue is not None: + self.hubbard_widget.eigenvalues_label.value = True + self.hubbard_widget.set_eigenvalues_widget(starting_ns_eigenvalue) + def reset(self): """Reset the widget and the traitlets""" @@ -299,21 +642,31 @@ def reset(self): self._update_settings_from_protocol(self.protocol) # reset the pseudo family - pseudo_family_dict = DEFAULT_PARAMETERS["advanced"]["pseudo_family"] - pseudo_family = PseudoFamily(**pseudo_family_dict) - - self.pseudo_family_selector.load_from_pseudo_family(pseudo_family) + self.pseudo_family_selector.reset() # reset total charge self.total_charge.value = DEFAULT_PARAMETERS["advanced"]["tot_charge"] + # reset the van der waals correction + self.van_der_waals.value = DEFAULT_PARAMETERS["advanced"]["vdw_corr"] + # reset the override checkbox self.override.value = False self.smearing.reset() # reset the pseudo setter - self.pseudo_setter._reset() + if self.input_structure is None: + self.pseudo_setter.structure = None + self.pseudo_setter._reset() + else: + self.pseudo_setter._reset() + if self.input_structure.pbc == (False, False, False): + self.kpoints_distance.value = 100.0 + self.kpoints_distance.disabled = True + # reset the magnetization self.magnetization.reset() + # reset the hubbard widget + self.hubbard_widget.reset() # reset mesh grid if self.input_structure is None: self.mesh_grid.value = " " @@ -327,7 +680,7 @@ def _display_mesh(self, _=None): mesh = create_kpoints_from_distance.process_class._func( self.input_structure, orm.Float(self.kpoints_distance.value), - orm.Bool(True), + orm.Bool(False), ) self.mesh_grid.value = "Mesh " + str(mesh.get_kpoints_mesh()[0]) else: @@ -335,10 +688,13 @@ def _display_mesh(self, _=None): class MagnetizationSettings(ipw.VBox): - """Widget to set the initial magnetic moments for each kind names defined in the StructureData (StructureDtaa.get_kind_names()) + """Widget to set the type of magnetization used in the calculation: + 1) Tot_magnetization: Total majority spin charge - minority spin charge. + 2) Starting magnetization: Starting spin polarization on atomic type 'i' in a spin polarized (LSDA or noncollinear/spin-orbit) calculation. + + For Starting magnetization you can set each kind names defined in the StructureData (StructureDtaa.get_kind_names()) Usually these are the names of the elements in the StructureData (For example 'C' , 'N' , 'Fe' . However the StructureData can have defined kinds like 'Fe1' and 'Fe2') - The widget generate a dictionary that can be used to set initial_magnetic_moments in the builder of PwBaseWorkChain Attributes: @@ -346,30 +702,45 @@ class MagnetizationSettings(ipw.VBox): """ input_structure = tl.Instance(orm.StructureData, allow_none=True) - + electronic_type = tl.Unicode() disabled = tl.Bool() + _DEFAULT_TOT_MAGNETIZATION = 0.0 + _DEFAULT_DESCRIPTION = "Magnetization: Input structure not confirmed" def __init__(self, **kwargs): self.input_structure = orm.StructureData() self.input_structure_labels = [] - self.description = ipw.HTML( - "Define magnetization: Input structure not confirmed" + self.tot_magnetization = ipw.BoundedIntText( + min=0, + max=100, + step=1, + value=self._DEFAULT_TOT_MAGNETIZATION, + disabled=True, + description="Total magnetization:", + style={"description_width": "initial"}, + ) + self.magnetization_type = ipw.ToggleButtons( + options=[ + ("Starting Magnetization", "starting_magnetization"), + ("Tot. Magnetization", "tot_magnetization"), + ], + value="starting_magnetization", + style={"description_width": "initial"}, ) + self.description = ipw.HTML(self._DEFAULT_DESCRIPTION) self.kinds = self.create_kinds_widget() self.kinds_widget_out = ipw.Output() + self.magnetization_out = ipw.Output() + self.magnetization_type.observe(self._render, "value") super().__init__( children=[ - ipw.HBox( - [ - self.description, - self.kinds_widget_out, - ], - ), + self.description, + self.magnetization_out, + self.kinds_widget_out, ], layout=ipw.Layout(justify_content="space-between"), **kwargs, ) - self.display_kinds() @tl.observe("disabled") def _disabled_changed(self, _): @@ -377,19 +748,19 @@ def _disabled_changed(self, _): if hasattr(self.kinds, "children") and self.kinds.children: for i in range(len(self.kinds.children)): self.kinds.children[i].disabled = self.disabled + self.tot_magnetization.disabled = self.disabled + self.magnetization_type.disabled = self.disabled def reset(self): self.disabled = True + self.tot_magnetization.value = self._DEFAULT_TOT_MAGNETIZATION + # if self.input_structure is None: - self.description.value = ( - "Define magnetization: Input structure not confirmed" - ) + self.description.value = self._DEFAULT_DESCRIPTION self.kinds = None - with self.kinds_widget_out: - clear_output() - else: - self.update_kinds_widget() + self.description.value = "Magnetization" + self.kinds = self.create_kinds_widget() def create_kinds_widget(self): if self.input_structure_labels: @@ -410,11 +781,30 @@ def create_kinds_widget(self): return kinds_widget + @tl.observe("electronic_type") + def _electronic_type_changed(self, change): + with self.magnetization_out: + clear_output() + if change["new"] == "metal": + display(self.magnetization_type) + self._render({"new": self.magnetization_type.value}) + else: + display(self.tot_magnetization) + with self.kinds_widget_out: + clear_output() + def update_kinds_widget(self): self.input_structure_labels = self.input_structure.get_kind_names() self.kinds = self.create_kinds_widget() - self.description.value = "Define magnetization: " - self.display_kinds() + self.description.value = "Magnetization" + + def _render(self, value): + if value["new"] == "tot_magnetization": + with self.kinds_widget_out: + clear_output() + display(self.tot_magnetization) + else: + self.display_kinds() def display_kinds(self): if "PYTEST_CURRENT_TEST" not in os.environ and self.kinds: @@ -425,6 +815,7 @@ def display_kinds(self): def _update_widget(self, change): self.input_structure = change["new"] self.update_kinds_widget() + self.display_kinds() def get_magnetization(self): """Method to generate the dictionary with the initial magnetic moments""" @@ -445,6 +836,10 @@ def _set_magnetization_values(self, magnetic_moments): else: self.kinds.children[i].value = magnetic_moments + def _set_tot_magnetization(self, tot_magnetization): + """Set the total magnetization""" + self.tot_magnetization.value = tot_magnetization + class SmearingSettings(ipw.VBox): # accept protocol as input and set the values diff --git a/src/aiidalab_qe/app/configuration/pseudos.py b/src/aiidalab_qe/app/configuration/pseudos.py index 012c41956..1d969779f 100644 --- a/src/aiidalab_qe/app/configuration/pseudos.py +++ b/src/aiidalab_qe/app/configuration/pseudos.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from __future__ import annotations import io @@ -6,18 +5,18 @@ import ipywidgets as ipw import traitlets as tl + from aiida import orm from aiida.common import exceptions from aiida.plugins import DataFactory, GroupFactory from aiida_quantumespresso.workflows.pw.base import PwBaseWorkChain -from aiidalab_widgets_base.utils import StatusHTML - from aiidalab_qe.app.parameters import DEFAULT_PARAMETERS -from aiidalab_qe.common.setup_pseudos import ( +from aiidalab_qe.setup.pseudos import ( PSEUDODOJO_VERSION, SSSP_VERSION, PseudoFamily, ) +from aiidalab_widgets_base.utils import StatusHTML UpfData = DataFactory("pseudo.upf") SsspFamily = GroupFactory("pseudo.family.sssp") @@ -30,6 +29,20 @@ class PseudoFamilySelector(ipw.VBox): """

Accuracy and precision

""" ) + PSEUDO_HELP_SOC = """
+ Spin-orbit coupling (SOC) calculations are supported exclusively with PseudoDojo pseudopotentials. + PseudoDojo offers these pseudopotentials in two versions: standard and stringent. + Here, we utilize the FR (fully relativistic) type from PseudoDojo. + Please ensure you choose appropriate cutoff values for your calculations. +
""" + + PSEUDO_HELP_WO_SOC = """
+ If you are unsure, select 'SSSP efficiency', which for + most calculations will produce sufficiently accurate results at + comparatively small computational costs. If your calculations require a + higher accuracy, select 'SSSP accuracy' or 'PseudoDojo stringent', which will be computationally + more expensive. SSSP is the standard solid-state pseudopotentials. + The PseudoDojo used here has the SR relativistic type.
""" description = ipw.HTML( """
@@ -45,15 +58,7 @@ class PseudoFamilySelector(ipw.VBox): Pseudopotential family
""" ) - pseudo_family_help = ipw.HTML( - """
- If you are unsure, select 'SSSP efficiency', which for - most calculations will produce sufficiently accurate results at - comparatively small computational costs. If your calculations require a - higher accuracy, select 'SSSP accuracy' or 'PseudoDojo stringent', which will be computationally - more expensive. SSSP is the standard solid-state pseudopotentials. - The PseudoDojo used here has the SR relativistic type.
""" - ) + pseudo_family_help = ipw.HTML(PSEUDO_HELP_WO_SOC) dft_functional_prompt = ipw.HTML( """ @@ -68,6 +73,7 @@ class PseudoFamilySelector(ipw.VBox): ) protocol = tl.Unicode(allow_none=True) disabled = tl.Bool() + spin_orbit = tl.Unicode() # output pseudo family widget which is the string of the pseudo family (of the AiiDA group). value = tl.Unicode(allow_none=True) @@ -78,7 +84,6 @@ def __init__(self, **kwargs): self.override = ipw.Checkbox( description="", indent=False, - value=False, layout=ipw.Layout(max_width="10%"), ) self.set_pseudo_family_box = ipw.HBox( @@ -122,6 +127,7 @@ def __init__(self, **kwargs): self.pseudo_family_help, ], layout=ipw.Layout(max_width="60%"), + **kwargs, ) ipw.dlink((self.show_ui, "value"), (self.library_selection, "disabled")) ipw.dlink((self.show_ui, "value"), (self.dft_functional, "disabled")) @@ -140,6 +146,7 @@ def __init__(self, **kwargs): # this will trigger the callback to set the value of widgets to the default self._default_protocol = DEFAULT_PARAMETERS["workchain"]["protocol"] self.protocol = self._default_protocol + self.override.value = False def set_value(self, _=None): """The callback when the selection of pseudo family or dft functional is changed. @@ -150,9 +157,14 @@ def set_value(self, _=None): functional = self.dft_functional.value # XXX (jusong.yu): a validator is needed to check the family string is consistent with the list of pseudo families defined in the setup_pseudos.py if library == "PseudoDojo": - pseudo_family_string = ( - f"PseudoDojo/{PSEUDODOJO_VERSION}/{functional}/SR/{accuracy}/upf" - ) + if self.spin_orbit == "soc": + pseudo_family_string = ( + f"PseudoDojo/{PSEUDODOJO_VERSION}/{functional}/FR/{accuracy}/upf" + ) + else: + pseudo_family_string = ( + f"PseudoDojo/{PSEUDODOJO_VERSION}/{functional}/SR/{accuracy}/upf" + ) elif library == "SSSP": pseudo_family_string = f"SSSP/{SSSP_VERSION}/{functional}/{accuracy}" else: @@ -190,6 +202,24 @@ def reset(self): # stay the same while xc selection is changed. self._update_settings_from_protocol(self.protocol) + @tl.observe("spin_orbit") + def _update_library_selection(self, _): + """Update the library selection according to the spin orbit value.""" + if self.spin_orbit == "soc": + self.library_selection.options = [ + "PseudoDojo standard", + "PseudoDojo stringent", + ] + self.pseudo_family_help.value = self.PSEUDO_HELP_SOC + else: + self.library_selection.options = [ + "SSSP efficiency", + "SSSP precision", + "PseudoDojo standard", + "PseudoDojo stringent", + ] + self.pseudo_family_help.value = self.PSEUDO_HELP_WO_SOC + @tl.observe("protocol") def _protocol_changed(self, _): """Input protocol changed, update the value of widgets.""" @@ -197,9 +227,18 @@ def _protocol_changed(self, _): def _update_settings_from_protocol(self, protocol): """Update the widget values from the given protocol, and trigger the callback.""" - pseudo_family_string = PwBaseWorkChain.get_protocol_inputs(protocol)[ - "pseudo_family" - ] + # FIXME: this rely on the aiida-quantumespresso, which is not ideal + + if self.spin_orbit == "soc": + if protocol in ["fast", "moderate"]: + pseudo_family_string = "PseudoDojo/0.4/PBE/FR/standard/upf" + else: + pseudo_family_string = "PseudoDojo/0.4/PBE/FR/stringent/upf" + else: + pseudo_family_string = PwBaseWorkChain.get_protocol_inputs(protocol)[ + "pseudo_family" + ] + pseudo_family = PseudoFamily.from_string(pseudo_family_string) self.load_from_pseudo_family(pseudo_family) @@ -299,7 +338,7 @@ def _reset_traitlets(self): """Reset the traitlets to the initial state""" self.ecutwfc = 0 self.ecutrho = 0 - self.pseudos = dict() + self.pseudos = {} def _reset(self): """Reset the pseudo setting widgets according to the structure @@ -338,7 +377,7 @@ def _reset(self): pseudo_family = self._get_pseudos_family(self.pseudo_family) except exceptions.NotExistent as exception: self._status_message.message = ( - f"""
ERROR: {str(exception)}
""" + f"""
ERROR: {exception!s}
""" ) return diff --git a/src/aiidalab_qe/app/configuration/workflow.py b/src/aiidalab_qe/app/configuration/workflow.py index 444213630..cba0df521 100644 --- a/src/aiidalab_qe/app/configuration/workflow.py +++ b/src/aiidalab_qe/app/configuration/workflow.py @@ -1,11 +1,13 @@ -# -*- coding: utf-8 -*- """Widgets for the submission of bands work chains. Authors: AiiDAlab team """ + import ipywidgets as ipw -from aiida_quantumespresso.common.types import RelaxType +import traitlets as tl +from aiida import orm +from aiida_quantumespresso.common.types import RelaxType from aiidalab_qe.app.parameters import DEFAULT_PARAMETERS from aiidalab_qe.app.utils import get_entry_items from aiidalab_qe.common.panel import Panel @@ -38,14 +40,6 @@ class WorkChainSettings(Panel): """

Properties

""" ) - properties_help = ipw.HTML( - """
""" - ) - protocol_title = ipw.HTML( """

Protocol

""" @@ -57,6 +51,8 @@ class WorkChainSettings(Panel): with less precision and the "precise" protocol to aim at best accuracy (at the price of longer/costlier calculations).
""" ) + input_structure = tl.Instance(orm.StructureData, allow_none=True) + def __init__(self, **kwargs): # RelaxType: degrees of freedom in geometry optimization self.relax_type = ipw.ToggleButtons( @@ -88,15 +84,34 @@ def __init__(self, **kwargs): value="moderate", ) self.properties = {} + self.reminder_info = {} self.property_children = [ self.properties_title, ipw.HTML("Select which properties to calculate:"), ] entries = get_entry_items("aiidalab_qe.properties", "outline") + setting_entries = get_entry_items("aiidalab_qe.properties", "setting") for name, entry_point in entries.items(): self.properties[name] = entry_point() - self.property_children.append(self.properties[name]) - self.property_children.append(self.properties_help) + self.reminder_info[name] = ipw.HTML() + self.property_children.append( + ipw.HBox([self.properties[name], self.reminder_info[name]]) + ) + + # observer change to update the reminder text + def update_reminder_info(change, name=name): + if change["new"]: + self.reminder_info[ + name + ].value = ( + f"""Customize {name} settings in the panel above if needed.""" + ) + else: + self.reminder_info[name].value = "" + + if name in setting_entries: + self.properties[name].run.observe(update_reminder_info, "value") + self.children = [ self.structure_title, self.structure_help, @@ -130,6 +145,37 @@ def __init__(self, **kwargs): **kwargs, ) + @tl.observe("input_structure") + def _on_input_structure_change(self, change): + """Update the relax type options based on the input structure.""" + structure = change["new"] + if structure is None or structure.pbc != (False, False, False): + self.relax_type.options = [ + ("Structure as is", "none"), + ("Atomic positions", "positions"), + ("Full geometry", "positions_cell"), + ] + # Ensure the value is in the options + if self.relax_type.value not in [ + option[1] for option in self.relax_type.options + ]: + self.relax_type.value = "positions_cell" + + self.properties["bands"].run.disabled = False + elif structure.pbc == (False, False, False): + self.relax_type.options = [ + ("Structure as is", "none"), + ("Atomic positions", "positions"), + ] + # Ensure the value is in the options + if self.relax_type.value not in [ + option[1] for option in self.relax_type.options + ]: + self.relax_type.value = "positions" + + self.properties["bands"].run.value = False + self.properties["bands"].run.disabled = True + def get_panel_value(self): # Work chain settings relax_type = self.relax_type.value @@ -181,6 +227,7 @@ def set_panel_value(self, parameters): def reset(self): """Reset the panel to the default value.""" + self.input_structure = None for key in ["relax_type", "spin_type", "electronic_type"]: getattr(self, key).value = DEFAULT_PARAMETERS["workchain"][key] self.workchain_protocol.value = DEFAULT_PARAMETERS["workchain"]["protocol"] diff --git a/src/aiidalab_qe/app/main.py b/src/aiidalab_qe/app/main.py index 71b12a3b0..471cba3f0 100644 --- a/src/aiidalab_qe/app/main.py +++ b/src/aiidalab_qe/app/main.py @@ -1,23 +1,26 @@ -# -*- coding: utf-8 -*- """The main widget that shows the application in the Jupyter notebook. Authors: AiiDAlab team """ import ipywidgets as ipw -from aiida.orm import load_node -from aiidalab_widgets_base import WizardAppWidget, WizardAppWidgetStep +import traitlets as tl +from IPython.display import Javascript, display +from aiida.orm import load_node from aiidalab_qe.app.configuration import ConfigureQeAppWorkChainStep from aiidalab_qe.app.result import ViewQeAppWorkChainStatusAndResultsStep from aiidalab_qe.app.structure import StructureSelectionStep from aiidalab_qe.app.submission import SubmitQeAppWorkChainStep -from aiidalab_qe.common import QeAppWorkChainSelector +from aiidalab_widgets_base import WizardAppWidget, WizardAppWidgetStep class App(ipw.VBox): """The main widget that combines all the application steps together.""" + # The PK or UUID of the work chain node. + process = tl.Union([tl.Unicode(), tl.Int()], allow_none=True) + def __init__(self, qe_auto_setup=True): # Create the application steps self.structure_step = StructureSelectionStep(auto_advance=True) @@ -65,23 +68,27 @@ def __init__(self, qe_auto_setup=True): ("Status & Results", self.results_step), ] ) + # hide the header + self._wizard_app_widget.children[0].layout.display = "none" self._wizard_app_widget.observe(self._observe_selected_index, "selected_index") - # Add process selection header - self.work_chain_selector = QeAppWorkChainSelector( - layout=ipw.Layout(width="auto") + # Add a button to start a new calculation + self.new_work_chains_button = ipw.Button( + description="Start New Calculation", + tooltip="Open a new page to start a separate calculation", + button_style="success", + icon="plus-circle", + layout=ipw.Layout(width="30%"), ) - self.work_chain_selector.observe(self._observe_process_selection, "value") - ipw.dlink( - (self.submit_step, "process"), - (self.work_chain_selector, "value"), - transform=lambda node: None if node is None else node.pk, - ) + def on_button_click(_): + display(Javascript("window.open('./qe.ipynb', '_blank')")) + + self.new_work_chains_button.on_click(on_button_click) super().__init__( children=[ - self.work_chain_selector, + self.new_work_chains_button, self._wizard_app_widget, ] ) @@ -120,7 +127,8 @@ def _observe_selected_index(self, change): ) self.submit_step.external_submission_blockers = blockers - def _observe_process_selection(self, change): + @tl.observe("process") + def _observe_process(self, change): from aiida.orm.utils.serialize import deserialize_unsafe if change["old"] == change["new"]: @@ -137,15 +145,16 @@ def _observe_process_selection(self, change): self.structure_step.manager.viewer.structure = ( process.inputs.structure.get_ase() ) - self.structure_step.confirmed_structure = process.inputs.structure - self.configure_step.state = WizardAppWidgetStep.State.SUCCESS + self.structure_step.structure = process.inputs.structure + self.structure_step.confirm() self.submit_step.process = process + # set ui_parameters # print out error message if yaml format ui_parameters is not reachable ui_parameters = process.base.extras.get("ui_parameters", {}) if ui_parameters and isinstance(ui_parameters, str): ui_parameters = deserialize_unsafe(ui_parameters) self.configure_step.set_configuration_parameters(ui_parameters) - self.configure_step.state = self.configure_step.State.SUCCESS + self.configure_step.confirm() self.submit_step.set_submission_parameters(ui_parameters) self.submit_step.state = self.submit_step.State.SUCCESS diff --git a/src/aiidalab_qe/app/parameters/qeapp.yaml b/src/aiidalab_qe/app/parameters/qeapp.yaml index 613f25dc9..1fba8c032 100644 --- a/src/aiidalab_qe/app/parameters/qeapp.yaml +++ b/src/aiidalab_qe/app/parameters/qeapp.yaml @@ -17,13 +17,25 @@ workchain: advanced: pseudo_family: library: SSSP - version: 1.2 + version: 1.3 functional: PBEsol accuracy: efficiency tot_charge: 0 + vdw_corr: none -## Codes +## Computational resources codes: - dos: dos-7.2@localhost - projwfc: projwfc-7.2@localhost - pw: pw-7.2@localhost + dos: + code: dos-7.2@localhost + projwfc: + code: projwfc-7.2@localhost + projwfc_bands: + code: projwfc-7.2@localhost + pw: + code: pw-7.2@localhost + pp: + code: pp-7.2@localhost + xspectra: + code: xspectra-7.2@localhost + hp: + code: hp-7.2@localhost diff --git a/src/aiidalab_qe/app/result/__init__.py b/src/aiidalab_qe/app/result/__init__.py index 020574f33..f9a56019d 100644 --- a/src/aiidalab_qe/app/result/__init__.py +++ b/src/aiidalab_qe/app/result/__init__.py @@ -1,5 +1,6 @@ import ipywidgets as ipw import traitlets as tl + from aiida import orm from aiida.engine import ProcessState from aiida.engine.processes import control @@ -12,6 +13,10 @@ # trigger registration of the viewer widget: from .workchain_viewer import WorkChainViewer # noqa: F401 +PROCESS_COMPLETED = "

Workflow completed successfully!

" +PROCESS_EXCEPTED = "

Workflow is excepted!

" +PROCESS_RUNNING = "

Workflow is running!

" + class ViewQeAppWorkChainStatusAndResultsStep(ipw.VBox, WizardAppWidgetStep): process = tl.Unicode(allow_none=True) @@ -41,12 +46,46 @@ def __init__(self, **kwargs): description="Kill workchain", tooltip="Kill the below workchain.", button_style="danger", - icon="window-close", - layout=ipw.Layout(width="120px", height="40px"), + icon="stop", + layout=ipw.Layout(width="120px", display="none", margin="0px 20px 0px 0px"), ) self.kill_button.on_click(self._on_click_kill_button) - super().__init__([self.kill_button, self.process_status], **kwargs) + self.clean_scratch_button = ipw.Button( + description="Clean remote data", + tooltip="Clean the remote folders of the workchain.", + button_style="danger", + icon="trash", + layout=ipw.Layout(width="150px", display="none", margin="0px 20px 0px 0px"), + ) + self.clean_scratch_button.on_click(self._on_click_clean_scratch_button) + self.update_result_button = ipw.Button( + description="Update results tabs", + tooltip="Trigger the update of the results tabs.", + button_style="success", + icon="refresh", + layout=ipw.Layout( + width="150px", display="block", margin="0px 20px 0px 0px" + ), + ) + self.update_result_button.on_click(self._on_click_update_result_button) + + self.process_info = ipw.HTML() + + super().__init__( + [ + self.process_info, + ipw.HBox( + children=[ + self.kill_button, + self.update_result_button, + self.clean_scratch_button, + ] + ), + self.process_status, + ], + **kwargs, + ) self._update_kill_button_layout() @@ -70,21 +109,33 @@ def _update_state(self): ProcessState.WAITING, ): self.state = self.State.ACTIVE + self.process_info.value = PROCESS_RUNNING elif ( process_state in (ProcessState.EXCEPTED, ProcessState.KILLED) or process.is_failed ): self.state = self.State.FAIL + self.process_info.value = PROCESS_EXCEPTED elif process.is_finished_ok: self.state = self.State.SUCCESS + self.process_info.value = PROCESS_COMPLETED + # trigger the update of kill and clean button. + if self.state in [self.State.SUCCESS, self.State.FAIL]: + self._update_kill_button_layout() + self._update_clean_scratch_button_layout() def _update_kill_button_layout(self): """Update the layout of the kill button.""" # If no process is selected, hide the button. - if self.process is None: + if self.process is None or self.process == "": self.kill_button.layout.display = "none" else: - self.kill_button.layout.display = "block" + process = orm.load_node(self.process) + # If the process is terminated, hide the button. + if process.is_terminated: + self.kill_button.layout.display = "none" + else: + self.kill_button.layout.display = "block" # If the step is not activated, no point to click the button, so disable it. # Only enable it if the process is on (RUNNING, CREATED, WAITING). @@ -93,6 +144,32 @@ def _update_kill_button_layout(self): else: self.kill_button.disabled = True + def _update_clean_scratch_button_layout(self): + """Update the layout of the kill button.""" + # The button is hidden by default, but if we load a new process, we hide again. + if not self.process: + self.clean_scratch_button.layout.display = "none" + else: + process = orm.load_node(self.process) + # If the process is terminated, show the button. + if process.is_terminated: + self.clean_scratch_button.layout.display = "block" + else: + self.clean_scratch_button.layout.display = "none" + + # If the scratch is already empty, we should deactivate the button. + # not sure about the performance if descendants are several. + cleaned_bool = [] + for called_descendant in process.called_descendants: + if isinstance(called_descendant, orm.CalcJobNode): + try: + cleaned_bool.append( + called_descendant.outputs.remote_folder.is_empty + ) + except Exception: + pass + self.clean_scratch_button.disabled = all(cleaned_bool) + def _on_click_kill_button(self, _=None): """callback for the kill button. First kill the process, then update the kill button layout. @@ -103,6 +180,28 @@ def _on_click_kill_button(self, _=None): # update the kill button layout self._update_kill_button_layout() + def _on_click_clean_scratch_button(self, _=None): + """callback for the clean scratch button. + First clean the remote folders, then update the clean button layout. + """ + process = orm.load_node(self.process) + + for called_descendant in process.called_descendants: + if isinstance(called_descendant, orm.CalcJobNode): + try: + called_descendant.outputs.remote_folder._clean() + except Exception: + pass + + # update the kill button layout + self._update_clean_scratch_button_layout() + + def _on_click_update_result_button(self, _=None): + """Trigger the update of the results tabs.""" + # change the node to trigger the update of the view. + self.node_view.node = None + self.node_view.node = orm.load_node(self.process) + @tl.observe("process") def _observe_process(self, _): """Callback for when the process is changed.""" @@ -110,6 +209,7 @@ def _observe_process(self, _): # as the self.state is updated in the _update_state method. self._update_state() self._update_kill_button_layout() + self._update_clean_scratch_button_layout() def _update_node_view(self, change): """Callback for when the a new node is selected.""" diff --git a/src/aiidalab_qe/app/result/summary_viewer.py b/src/aiidalab_qe/app/result/summary_viewer.py index 45880cbc8..c0a64af02 100644 --- a/src/aiidalab_qe/app/result/summary_viewer.py +++ b/src/aiidalab_qe/app/result/summary_viewer.py @@ -1,4 +1,5 @@ import ipywidgets as ipw + from aiida_quantumespresso.workflows.pw.bands import PwBandsWorkChain FUNCTIONAL_LINK_MAP = { @@ -22,6 +23,16 @@ (True, True, True): "xyz", (True, True, False): "xy", (True, False, False): "x", + (False, False, False): "molecule", +} + +VDW_CORRECTION_VERSION = { + 3: "Grimme-D3", + 4: "Grimme-D3BJ", + 5: "Grimme-D3M", + 6: "Grimme-D3MBJ", + "ts-vdw": "Tkatchenko-Scheffler", + "none": "None", } @@ -46,7 +57,9 @@ def generate_report_parameters(qeapp_wc): if "workchain" not in ui_parameters: return {} report = { - "relaxed": ui_parameters["workchain"]["relax_type"], + "relaxed": None + if ui_parameters["workchain"]["relax_type"] == "none" + else ui_parameters["workchain"]["relax_type"], "relax_method": ui_parameters["workchain"]["relax_type"], "electronic_type": ui_parameters["workchain"]["electronic_type"], "material_magnetic": ui_parameters["workchain"]["spin_type"], @@ -94,7 +107,9 @@ def generate_report_parameters(qeapp_wc): energy_cutoff_wfc = pw_parameters["SYSTEM"]["ecutwfc"] energy_cutoff_rho = pw_parameters["SYSTEM"]["ecutrho"] occupation = pw_parameters["SYSTEM"]["occupations"] - scf_kpoints_distance = qeapp_wc.inputs.relax.base.kpoints_distance.value + scf_kpoints_distance = ( + qeapp_wc.inputs.relax.base.kpoints_distance.base.attributes.get("value") + ) report.update( { "energy_cutoff_wfc": energy_cutoff_wfc, @@ -107,9 +122,26 @@ def generate_report_parameters(qeapp_wc): report["degauss"] = pw_parameters["SYSTEM"]["degauss"] report["smearing"] = pw_parameters["SYSTEM"]["smearing"] report["tot_charge"] = pw_parameters["SYSTEM"].get("tot_charge", 0.0) + report["vdw_corr"] = VDW_CORRECTION_VERSION.get( + pw_parameters["SYSTEM"].get("dftd3_version"), + pw_parameters["SYSTEM"].get("vdw_corr", "none"), + ) report["periodicity"] = PERIODICITY_MAPPING.get( qeapp_wc.inputs.structure.pbc, "xyz" ) + + # Spin-Oribit coupling + report["spin_orbit"] = pw_parameters["SYSTEM"].get("lspinorb", False) + + # DFT+U + hubbard_dict = ui_parameters["advanced"].pop("hubbard_parameters", None) + if hubbard_dict: + hubbard_parameters = hubbard_dict["hubbard_u"] + report["hubbard_u"] = hubbard_parameters + report["tot_magnetization"] = pw_parameters["SYSTEM"].get( + "tot_magnetization", False + ) + # hard code bands and pdos if "bands" in qeapp_wc.inputs: report["bands_kpoints_distance"] = PwBandsWorkChain.get_protocol_inputs( @@ -117,9 +149,9 @@ def generate_report_parameters(qeapp_wc): )["bands_kpoints_distance"] if "pdos" in qeapp_wc.inputs: - report[ - "nscf_kpoints_distance" - ] = qeapp_wc.inputs.pdos.nscf.kpoints_distance.value + report["nscf_kpoints_distance"] = ( + qeapp_wc.inputs.pdos.nscf.kpoints_distance.base.attributes.get("value") + ) return report @@ -127,11 +159,11 @@ def _generate_report_html(report): """Read from the bulider parameters and generate a html for reporting the inputs for the `QeAppWorkChain`. """ - from importlib import resources + from importlib.resources import files from jinja2 import Environment - from aiidalab_qe.app import static + from aiidalab_qe.app.static import styles, templates def _fmt_yes_no(truthy): return "Yes" if truthy else "No" @@ -142,8 +174,8 @@ def _fmt_yes_no(truthy): "fmt_yes_no": _fmt_yes_no, } ) - template = resources.read_text(static, "workflow_summary.jinja") - style = resources.read_text(static, "style.css") + template = files(templates).joinpath("workflow_summary.jinja").read_text() + style = files(styles).joinpath("style.css").read_text() report = {key: value for key, value in report.items() if value is not None} return env.from_string(template).render(style=style, **report) diff --git a/src/aiidalab_qe/app/result/utils/__init__.py b/src/aiidalab_qe/app/result/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/aiidalab_qe/app/result/utils/download_data.py b/src/aiidalab_qe/app/result/utils/download_data.py new file mode 100644 index 000000000..ae276c437 --- /dev/null +++ b/src/aiidalab_qe/app/result/utils/download_data.py @@ -0,0 +1,183 @@ +import base64 +import pathlib +import tempfile +from threading import Thread + +import ipywidgets as ipw + + +class DownloadDataWidget(ipw.VBox): + def __init__(self, workchain_node): + # + self.download_archive_button = ipw.Button( + description="Download AiiDA archive.aiida data", + icon="download", + button_style="primary", + disabled=False, + tooltip="Download the AiiDA archive of the simulation, ready to be shared or imported into another AiiDA profile", + layout=ipw.Layout(width="auto"), + ) + self.download_archive_button.on_click(self._download_data_thread) + + self.download_raw_button = ipw.Button( + description="Download AiiDA raw data (zip format)", + icon="download", + button_style="primary", + disabled=False, + tooltip="Download the raw data of the simulation, organized in intuitive directory paths.", + layout=ipw.Layout(width="auto"), + ) + try: + # check that we can import the ProcessDumper (not implemented in old AiiDA versions) + # pre-commit: allow any unused imports in the next line + from aiida.tools.dumping.processes import ProcessDumper # noqa: F401 + + self.download_raw_button.on_click(self._download_data_thread) + dumper_is_available = True + except Exception: + dumper_is_available = False + + self.download_raw_button.disabled = not dumper_is_available + + self.node = workchain_node + + super().__init__( + children=[ + ipw.HTML( + "

Download the data

" + "It is possible to download raw data (i.e. input and output files) and/or the AiiDA archive (ready to be shared or imported into another AiiDA profile)" + ), + ipw.HBox( + children=[self.download_raw_button], + layout=ipw.Layout(width="700px"), # Set the desired width here + ), + ipw.HBox( + children=[self.download_archive_button], + layout=ipw.Layout(width="700px"), # Set the desired width here + ), + ], + ) + + if not dumper_is_available: + self.children[1].children += ( + ipw.HTML( + "

The raw data download is not available because the AiiDA version is too old.

" + ), + ) + + def _download_data_thread(self, button_instance): + thread = Thread(target=lambda: self._download_data(button_instance)) + thread.start() + + def _download_data(self, button_instance): + """ + This method handles the download process when a download button is clicked. + It updates the button's description to indicate that the download is in progress, + determines whether to download the archive or raw data based on the button's description, + generates the appropriate bitstream from the specified node, initiates the download + with a filename based on the node's primary key, and then resets the button description + to its original state. + + Args: + button_instance (ipywidgets.Button): The button instance that was clicked. + """ + + if "archive" in button_instance.description: + what = "archive" + filename = f"export_qeapp_calculation_pk_{self.node.pk}.aiida" + box = self.children[2] + else: + what = "raw" + filename = f"export_{self.node.pk}_raw.zip" + box = self.children[1] + + box.children += (ipw.HTML("Downloading data..."),) + + data = self.produce_bitestream(self.node, what=what) + self._download(payload=data, filename=filename) + del data + box.children = box.children[:1] + + @staticmethod + def _download(payload, filename): + from IPython.display import Javascript, display + + javas = Javascript( + f""" + var link = document.createElement('a'); + link.href = 'data:application;base64,{payload}' + link.download = '{filename}' + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + """ + ) + display(javas) + + @staticmethod + def produce_bitestream(node, what="archive"): + """ + Produce a base64-encoded bitstream of the specified node data. + + Parameters: + node (orm.Node): The AiiDA node to be processed. + what (str): The type of data to produce. Options are "archive" or "raw". + Defaults to "archive". + + Returns: + str: A base64-encoded string representing the requested data. + + Raises: + KeyError: If the 'what' parameter is not "archive" or "raw". + + The function supports two modes: + 1. "archive": Creates an AiiDA archive of the node. + 2. "raw": Dumps the raw data of the process node into a zip file. + + NB: The function uses a temporary directory to store the data before converting it to a base64 string. + Moreover, the node has to be reloaded because otherwise the SQLAlchemy will compleain on Db request + not being in the same thread (the notebook session) as the original node. + """ + from aiida import orm + + reloaded_node = orm.load_node(node.pk) + with tempfile.TemporaryDirectory() as dirpath: + if what == "archive": + from aiida.tools.archive.create import create_archive + + path = pathlib.Path(dirpath) / "archive.aiida" + create_archive( + entities=[reloaded_node], + filename=path, + call_calc_backward=False, + call_work_backward=False, + create_backward=False, + ) + with open(path, "rb") as f: + zip_data = f.read() + + # Convert the ZIP data to base64 so it can be used as a payload in JavaScript + bitestream = base64.b64encode(zip_data).decode() + + elif what == "raw": + import shutil + + from aiida.tools.dumping.processes import ProcessDumper + + path = pathlib.Path(dirpath) / "raw_data" + output_zip_path = pathlib.Path(dirpath) / "raw_data.zip" + dumper = ProcessDumper() + dumper.dump(process_node=reloaded_node, output_path=path) + # writing files to a zipfile + shutil.make_archive(pathlib.Path(dirpath) / "raw_data", "zip", path) + + with open(output_zip_path, "rb") as f: + raw_data = f.read() + + # Convert the raw_data to base64 so it can be used as a payload in JavaScript + bitestream = base64.b64encode(raw_data).decode() + + else: + raise KeyError("You should ask for `archive` or `raw` only!") + + return bitestream diff --git a/src/aiidalab_qe/app/result/workchain_viewer.py b/src/aiidalab_qe/app/result/workchain_viewer.py index b90d76586..48cf9f734 100644 --- a/src/aiidalab_qe/app/result/workchain_viewer.py +++ b/src/aiidalab_qe/app/result/workchain_viewer.py @@ -1,37 +1,39 @@ import shutil import typing as t -from importlib import resources +from importlib.resources import files from pathlib import Path from tempfile import TemporaryDirectory import ipywidgets as ipw import traitlets as tl -from aiida import orm -from aiida.cmdline.utils.common import get_workchain_report -from aiida.common import LinkType -from aiida.orm.utils.serialize import deserialize_unsafe -from aiidalab_widgets_base import ProcessMonitor, register_viewer_widget -from aiidalab_widgets_base.viewers import StructureDataViewer from filelock import FileLock, Timeout from IPython.display import HTML, display from jinja2 import Environment -from aiidalab_qe.app import static +from aiida import orm +from aiida.cmdline.utils.common import get_workchain_report +from aiida.common import LinkType +from aiida.orm.utils.serialize import deserialize_unsafe +from aiidalab_qe.app.static import styles, templates from aiidalab_qe.app.utils import get_entry_items +from aiidalab_widgets_base import register_viewer_widget +from aiidalab_widgets_base.viewers import StructureDataViewer from .summary_viewer import SummaryView +from .utils.download_data import DownloadDataWidget @register_viewer_widget("process.workflow.workchain.WorkChainNode.") class WorkChainViewer(ipw.VBox): _results_shown = tl.Set() + process_uuid = tl.Unicode(allow_none=True) def __init__(self, node, **kwargs): if node.process_label != "QeAppWorkChain": super().__init__() return - self.node = node + self.process_uuid = node.uuid # In the new version of the plugin, the ui_parameters are stored as a yaml string # which is then converted to a dictionary ui_parameters = node.base.extras.get("ui_parameters", {}) @@ -41,12 +43,12 @@ def __init__(self, node, **kwargs): self.title = ipw.HTML( f"""
-

QE App Workflow (pk: {self.node.pk}) — - {self.node.inputs.structure.get_formula()} +

QE App Workflow (pk: {node.pk}) — + {node.inputs.structure.get_formula()}

""" ) - self.workflows_summary = SummaryView(self.node) + self.workflows_summary = SummaryView(node) self.summary_tab = ipw.VBox(children=[self.workflows_summary]) # Only the summary tab is shown by default @@ -59,7 +61,7 @@ def __init__(self, node, **kwargs): self.results = {} entries = get_entry_items("aiidalab_qe.properties", "result") for identifier, entry_point in entries.items(): - result = entry_point(self.node) + result = entry_point(node) self.results[identifier] = result self.results[identifier].identifier = identifier @@ -89,23 +91,31 @@ def toggle_camera(): children=[self.title, self.result_tabs], **kwargs, ) - self._process_monitor = ProcessMonitor( - process=self.node, - callbacks=[ - self._update_view, - ], - ) + # self.process_monitor = ProcessMonitor( + # timeout=1.0, + # on_sealed=[ + # self._update_view, + # ], + # ) + # ipw.dlink((self, "process_uuid"), (self.process_monitor, "value")) + + @property + def node(self): + """Load the workchain node using the process_uuid. + Because the workchain node is used in another thread inside the process monitor, + we need to load the node from the database, instead of passing the node object. + Otherwise, we will get a "Instance is not persistent" error. + """ + return orm.load_node(self.process_uuid) def _update_view(self): with self.hold_trait_notifications(): - if self.node.is_finished: + node = self.node + if node.is_finished: self._show_workflow_output() # if the structure is present in the workchain, # the structure tab will be added. - if ( - "structure" not in self._results_shown - and "structure" in self.node.outputs - ): + if "structure" not in self._results_shown and "structure" in node.outputs: self._show_structure() self.result_tabs.children += (self.structure_tab,) # index of the last tab @@ -119,7 +129,7 @@ def _update_view(self): if result.identifier not in self._results_shown: # check if the all required results are in the outputs results_ready = [ - label in self.node.outputs for label in result.workchain_labels + label in node.outputs for label in result.workchain_labels ] if all(results_ready): result._update_view() @@ -147,7 +157,9 @@ class WorkChainOutputs(ipw.VBox): _busy = tl.Bool(read_only=True) def __init__(self, node, export_dir=None, **kwargs): - self.export_dir = Path.cwd().joinpath("exports") + if export_dir is None: + export_dir = Path.cwd().joinpath("exports") + self.export_dir = export_dir if node.process_label != "QeAppWorkChain": raise KeyError(str(node.node_type)) @@ -165,7 +177,7 @@ def __init__(self, node, export_dir=None, **kwargs): icon="download", ) self._download_archive_button.on_click(self._download_archive) - self._download_button_container = ipw.Box([self._download_archive_button]) + self._download_button_widget = DownloadDataWidget(workchain_node=self.node) if node.exit_status != 0: title = ipw.HTML( @@ -173,8 +185,8 @@ def __init__(self, node, export_dir=None, **kwargs): ) final_calcjob = self._get_final_calcjob(node) env = Environment() - template = resources.read_text(static, "workflow_failure.jinja") - style = resources.read_text(static, "style.css") + template = files(templates).joinpath("workflow_failure.jinja").read_text() + style = files(styles).joinpath("style.css").read_text() output = ipw.HTML( env.from_string(template).render( style=style, @@ -188,8 +200,8 @@ def __init__(self, node, export_dir=None, **kwargs): super().__init__( children=[ - ipw.HBox( - children=[title, self._download_button_container], + ipw.VBox( + children=[self._download_button_widget, title], layout=ipw.Layout(justify_content="space-between", margin="10px"), ), output, @@ -233,7 +245,7 @@ def _download_archive(self, _): finally: self.set_trait("_busy", False) - id = f"dl_{self.node.uuid}" + link_id = f"dl_{self.node.uuid}" display( HTML( @@ -241,7 +253,7 @@ def _download_archive(self, _): diff --git a/src/aiidalab_qe/app/static/styles/README.md b/src/aiidalab_qe/app/static/styles/README.md new file mode 100644 index 000000000..2dd79ba7c --- /dev/null +++ b/src/aiidalab_qe/app/static/styles/README.md @@ -0,0 +1,13 @@ +# Stylesheets for the Quantum ESPRESSO app + +This folder contains `css` stylesheets. These can be loaded from the styles folder using + +```python +from aiidalab_widgets_base.utils.loaders import load_css + +load_css(css_path="src/aiidalab_qe/app/static/styles") # load all stylesheets in the styles folder + +# or + +load_css(css_path="src/aiidalab_qe/app/static/styles/.css") # load a single stylesheet +``` diff --git a/src/aiidalab_qe/app/static/styles/__init__.py b/src/aiidalab_qe/app/static/styles/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/aiidalab_qe/app/static/styles/custom.css b/src/aiidalab_qe/app/static/styles/custom.css new file mode 100644 index 000000000..0cf6e78a3 --- /dev/null +++ b/src/aiidalab_qe/app/static/styles/custom.css @@ -0,0 +1,48 @@ +/* + Override Jupyter width limitation to + make apps take full notebook width +*/ +.output_subarea { + max-width: none !important; +} +/* end override */ + +.app-header { + margin-bottom: 1em; +} + +.logo { + text-align: center; +} + +#subtitle { + text-align: center; + font-style: italic; +} + +.info-toggles { + margin: 0 auto; +} +.info-toggles button { + width: 100px; + margin: 1em 0.5em; +} +.info-toggles button:focus { + outline: none !important; +} + +.guide ol { + list-style: none; +} +.guide p:not(:last-of-type) { + margin-bottom: 0.5em; +} + +#loading { + text-align: center; + font-size: large; +} + +footer { + text-align: right; +} diff --git a/src/aiidalab_qe/app/static/styles/infobox.css b/src/aiidalab_qe/app/static/styles/infobox.css new file mode 100644 index 000000000..a25861e42 --- /dev/null +++ b/src/aiidalab_qe/app/static/styles/infobox.css @@ -0,0 +1,15 @@ +.info-box { + display: none; + margin: 2px; + padding: 1em; + border: 3px solid orangered; + background-color: #ffedcc; + border-radius: 1em; + -webkit-border-radius: 1em; + -moz-border-radius: 1em; + -ms-border-radius: 1em; + -o-border-radius: 1em; +} +.info-box p { + line-height: 24px; +} diff --git a/src/aiidalab_qe/app/static/style.css b/src/aiidalab_qe/app/static/styles/style.css similarity index 100% rename from src/aiidalab_qe/app/static/style.css rename to src/aiidalab_qe/app/static/styles/style.css diff --git a/src/aiidalab_qe/app/static/templates/__init__.py b/src/aiidalab_qe/app/static/templates/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/aiidalab_qe/app/static/templates/about.jinja b/src/aiidalab_qe/app/static/templates/about.jinja new file mode 100644 index 000000000..0b617a231 --- /dev/null +++ b/src/aiidalab_qe/app/static/templates/about.jinja @@ -0,0 +1,10 @@ +
+

+ The Quantum ESPRESSO app + (or QE app for short) is a graphical front end for calculating materials properties using + Quantum ESPRESSO (QE). Each property is calculated by workflows powered by the + AiiDA engine, and maintained in the + aiida-quantumespresso plugin and many other plugins developed by the community. + for AiiDA. +

+
diff --git a/src/aiidalab_qe/app/static/templates/guide.jinja b/src/aiidalab_qe/app/static/templates/guide.jinja new file mode 100644 index 000000000..14b10f914 --- /dev/null +++ b/src/aiidalab_qe/app/static/templates/guide.jinja @@ -0,0 +1,42 @@ +
+

+ The QE app allows you to calculate properties in a simple 4-step process: +

+ +
    +
  1. + πŸ” Step 1: Select the structure you want to run. +
  2. +
  3. + βš™οΈ Step 2: Select the properties you are interested in. +
  4. +
  5. + πŸ’» Step 3: Choose the computational resources you want to run on. +
  6. +
  7. + πŸš€ Step 4: Submit your workflow. +
  8. +
+ +

+ New users can go straight to the first step and select their structure. +

+ +

+ Completed workflows can be selected at the top of the app. +

+ +

+ You can also check out the + basic tutorial to get started + with the Quantum ESPRESSO app, or try out the + advanced tutorial to learn + additional features offered by the app. +

+ +

+ For a more in-depth dive into the app's features, please refer to the + how-to guides. +

+

+
diff --git a/src/aiidalab_qe/app/static/workflow_failure.jinja b/src/aiidalab_qe/app/static/templates/workflow_failure.jinja similarity index 92% rename from src/aiidalab_qe/app/static/workflow_failure.jinja rename to src/aiidalab_qe/app/static/templates/workflow_failure.jinja index fe4ea7e00..b7adb3a8a 100644 --- a/src/aiidalab_qe/app/static/workflow_failure.jinja +++ b/src/aiidalab_qe/app/static/templates/workflow_failure.jinja @@ -1,9 +1,3 @@ - - - -
diff --git a/src/aiidalab_qe/app/static/workflow_summary.jinja b/src/aiidalab_qe/app/static/templates/workflow_summary.jinja similarity index 82% rename from src/aiidalab_qe/app/static/workflow_summary.jinja rename to src/aiidalab_qe/app/static/templates/workflow_summary.jinja index 5ff5d32d8..01743dca4 100644 --- a/src/aiidalab_qe/app/static/workflow_summary.jinja +++ b/src/aiidalab_qe/app/static/templates/workflow_summary.jinja @@ -1,9 +1,3 @@ - - - -
@@ -100,10 +94,35 @@ Total Charge {{ tot_charge }} + + Van der Waals Correction + {{ vdw_corr }} + + + {% if tot_magnetization %} + + Total magnetization + {{ tot_magnetization }} + + {% else %} Initial Magnetic Moments {{ initial_magnetic_moments }} + {% endif %} + {% if hubbard_u %} + + DFT+U + {{ hubbard_u }} + + {% endif %} + {% if spin_orbit %} + + Spin-Orbit + {{ spin_orbit }} + + {% endif %} +
diff --git a/src/aiidalab_qe/app/static/welcome.jinja b/src/aiidalab_qe/app/static/welcome.jinja deleted file mode 100644 index cdf8830a3..000000000 --- a/src/aiidalab_qe/app/static/welcome.jinja +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - -
-

Welcome to the AiiDAlab Quantum ESPRESSO app! πŸ‘‹

- - The Quantum ESPRESSO app (or QE app for short) is a graphical front end for calculating materials properties using Quantum ESPRESSO (QE). - Each property is calculated by workflows powered by the AiiDA engine, and maintained in the Quantum ESPRESSO plugin for AiiDA. - -

The QE app allows you to calculate properties in a simple 4-step process:

- -
    -
  1. πŸ” Step 1: Select the structure you want to run.
  2. -
  3. βš™οΈ Step 2: Select the properties you are interested in.
  4. -
  5. πŸ’» Step 3: Choose the computational resources you want to run on.
  6. -
  7. πŸš€ Step 4: Submit your workflow.
  8. -
- -

New users can go straight to the first step and select their structure. Once you've already run some calculations, you can select the corresponding workflow using the dropdown below.

-

Happy computing! πŸŽ‰

-
- - diff --git a/src/aiidalab_qe/app/structure/__init__.py b/src/aiidalab_qe/app/structure/__init__.py index a1b915454..cb5940ccf 100644 --- a/src/aiidalab_qe/app/structure/__init__.py +++ b/src/aiidalab_qe/app/structure/__init__.py @@ -5,9 +5,13 @@ import pathlib -import aiida import ipywidgets as ipw -import traitlets +import traitlets as tl + +import aiida +from aiida_quantumespresso.data.hubbard_structure import HubbardStructureData +from aiidalab_qe.app.utils import get_entry_items +from aiidalab_qe.common import AddingTagsEditor from aiidalab_widgets_base import ( BasicCellEditor, BasicStructureEditor, @@ -19,23 +23,22 @@ WizardAppWidgetStep, ) -from aiidalab_qe.app.utils import get_entry_items -from aiidalab_qe.common import AddingTagsEditor - # The Examples list of (name, file) tuple curretly passed to # StructureExamplesWidget. file_path = pathlib.Path(__file__).parent Examples = [ - ("Silicon (diamond)", file_path / "examples" / "Si.xyz"), - ("Silicon oxide", file_path / "examples" / "SiO2.xyz"), - ("Diamond", file_path / "examples" / "diamond.cif"), - ("Gallium arsenide", file_path / "examples" / "GaAs.xyz"), - ("Gold (fcc)", file_path / "examples" / "Au.cif"), - ("Cobalt (hcp)", file_path / "examples" / "Co.cif"), + ("Bulk silicon (primitive cell)", file_path / "examples" / "Si.cif"), + ("Silicon oxide (alpha quartz)", file_path / "examples" / "SiO2.cif"), + ("Diamond (primitive cell)", file_path / "examples" / "Diamond.cif"), + ("Gallium arsenide (primitive cell)", file_path / "examples" / "GaAs.cif"), + ("Gold (conventional cell)", file_path / "examples" / "Au.cif"), + ("Cobalt (primitive cell)", file_path / "examples" / "Co.cif"), + ("Lithium carbonate", file_path / "examples" / "Li2CO3.cif"), + ("Phenylacetylene molecule", file_path / "examples" / "Phenylacetylene.xyz"), + ("ETFA molecule", file_path / "examples" / "ETFA.xyz"), + ("LiCoO2", file_path / "examples" / "LiCoO2.cif"), ] -OptimadeQueryWidget.title = "OPTIMADE" # monkeypatch - class StructureSelectionStep(ipw.VBox, WizardAppWidgetStep): """Integrated widget for the selection and edition of structure. @@ -44,14 +47,21 @@ class StructureSelectionStep(ipw.VBox, WizardAppWidgetStep): structure importers and the structure editors can be extended by plugins. """ - structure = traitlets.Instance(aiida.orm.StructureData, allow_none=True) - confirmed_structure = traitlets.Instance(aiida.orm.StructureData, allow_none=True) + structure = tl.Instance(aiida.orm.StructureData, allow_none=True) + confirmed_structure = tl.Instance(aiida.orm.StructureData, allow_none=True) def __init__(self, description=None, **kwargs): importers = [ StructureUploadWidget(title="Upload file"), OptimadeQueryWidget(embedded=False), - StructureBrowserWidget(title="AiiDA database"), + StructureBrowserWidget( + title="AiiDA database", + query_types=( + aiida.orm.StructureData, + aiida.orm.CifData, + HubbardStructureData, + ), + ), StructureExamplesWidget(title="From Examples", examples=Examples), ] # add plugin specific structure importers @@ -78,9 +88,7 @@ def __init__(self, description=None, **kwargs): description = ipw.HTML( """

Select a structure from one of the following sources and then click - "Confirm" to go to the next step.

Currently only three-dimensional structures are - supported. + "Confirm" to go to the next step.

""" ) self.description = description @@ -115,10 +123,10 @@ def __init__(self, description=None, **kwargs): self.message_area, self.confirm_button, ], - **kwargs + **kwargs, ) - @traitlets.default("state") + @tl.default("state") def _default_state(self): return self.State.INIT @@ -134,7 +142,7 @@ def _update_state(self): else: self.state = self.State.SUCCESS - @traitlets.observe("structure") + @tl.observe("structure") def _observe_structure(self, change): structure = change["new"] with self.hold_trait_notifications(): @@ -145,12 +153,12 @@ def _observe_structure(self, change): self.structure_name_text.value = str(self.structure.get_formula()) self._update_state() - @traitlets.observe("confirmed_structure") + @tl.observe("confirmed_structure") def _observe_confirmed_structure(self, _): with self.hold_trait_notifications(): self._update_state() - @traitlets.observe("state") + @tl.observe("state") def _observe_state(self, change): with self.hold_trait_notifications(): state = change["new"] diff --git a/src/aiidalab_qe/app/structure/examples/Au.cif b/src/aiidalab_qe/app/structure/examples/Au.cif index 3744f75b0..cf9007556 100644 --- a/src/aiidalab_qe/app/structure/examples/Au.cif +++ b/src/aiidalab_qe/app/structure/examples/Au.cif @@ -1,36 +1,29 @@ -#====================================================================== +data_image0 +_chemical_formula_structural Au4 +_chemical_formula_sum "Au4" +_cell_length_a 4.078 +_cell_length_b 4.078 +_cell_length_c 4.078 +_cell_angle_alpha 90 +_cell_angle_beta 90 +_cell_angle_gamma 90 -# CRYSTAL DATA - -#---------------------------------------------------------------------- - -data_VESTA_phase_1 - - -_pd_phase_name 'Au ' -_cell_length_a 4.17410 -_cell_length_b 4.17410 -_cell_length_c 4.17410 -_cell_angle_alpha 90 -_cell_angle_beta 90 -_cell_angle_gamma 90 -_symmetry_space_group_name_H-M 'P 1' -_symmetry_Int_Tables_number 1 +_space_group_name_H-M_alt "P 1" +_space_group_IT_number 1 loop_ -_symmetry_equiv_pos_as_xyz - 'x, y, z' + _space_group_symop_operation_xyz + 'x, y, z' loop_ - _atom_site_label - _atom_site_occupancy - _atom_site_fract_x - _atom_site_fract_y - _atom_site_fract_z - _atom_site_thermal_displace_type - _atom_site_B_iso_or_equiv - _atom_site_type_symbol - Au1 1.0 0 0 0 Biso 1.000 Au - Au2 1.0 0 0.50000 0.50000 Biso 1.000 Au - Au3 1.0 0.50000 0 0.50000 Biso 1.000 Au - Au4 1.0 0.50000 0.50000 0 Biso 1.000 Au + _atom_site_type_symbol + _atom_site_label + _atom_site_symmetry_multiplicity + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_occupancy + Au Au1 1.0 0.00000 0.00000 0.00000 1.0000 + Au Au2 1.0 0.00000 0.50000 0.50000 1.0000 + Au Au3 1.0 0.50000 0.00000 0.50000 1.0000 + Au Au4 1.0 0.50000 0.50000 0.00000 1.0000 diff --git a/src/aiidalab_qe/app/structure/examples/Co.cif b/src/aiidalab_qe/app/structure/examples/Co.cif index 40bc71a66..a27239aa9 100644 --- a/src/aiidalab_qe/app/structure/examples/Co.cif +++ b/src/aiidalab_qe/app/structure/examples/Co.cif @@ -1,34 +1,27 @@ -#====================================================================== +data_image0 +_chemical_formula_structural Co2 +_chemical_formula_sum "Co2" +_cell_length_a 2.507 +_cell_length_b 2.507 +_cell_length_c 4.069 +_cell_angle_alpha 90 +_cell_angle_beta 90 +_cell_angle_gamma 120 -# CRYSTAL DATA - -#---------------------------------------------------------------------- - -data_VESTA_phase_1 - - -_pd_phase_name 'Co ' -_cell_length_a 2.49680 -_cell_length_b 2.49680 -_cell_length_c 4.03081 -_cell_angle_alpha 90 -_cell_angle_beta 90 -_cell_angle_gamma 60.00000 -_symmetry_space_group_name_H-M 'P 1' -_symmetry_Int_Tables_number 1 +_space_group_name_H-M_alt "P 1" +_space_group_IT_number 1 loop_ -_symmetry_equiv_pos_as_xyz - 'x, y, z' + _space_group_symop_operation_xyz + 'x, y, z' loop_ - _atom_site_label - _atom_site_occupancy - _atom_site_fract_x - _atom_site_fract_y - _atom_site_fract_z - _atom_site_thermal_displace_type - _atom_site_B_iso_or_equiv - _atom_site_type_symbol - Co1 1.0 0 0 0 Biso 1.000 Co - Co2 1.0 0.33333 0.33333 0.50000 Biso 1.000 Co + _atom_site_type_symbol + _atom_site_label + _atom_site_symmetry_multiplicity + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_occupancy + Co Co1 1.0 0.00000 0.00000 0.00000 1.0000 + Co Co2 1.0 0.33333 0.66667 0.50000 1.0000 diff --git a/src/aiidalab_qe/app/structure/examples/Diamond.cif b/src/aiidalab_qe/app/structure/examples/Diamond.cif new file mode 100644 index 000000000..c21e6ac3e --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/Diamond.cif @@ -0,0 +1,27 @@ +data_image0 +_chemical_formula_structural C2 +_chemical_formula_sum "C2" +_cell_length_a 2.52225 +_cell_length_b 2.52225 +_cell_length_c 2.52225 +_cell_angle_alpha 60 +_cell_angle_beta 60 +_cell_angle_gamma 60 + +_space_group_name_H-M_alt "P 1" +_space_group_IT_number 1 + +loop_ + _space_group_symop_operation_xyz + 'x, y, z' + +loop_ + _atom_site_type_symbol + _atom_site_label + _atom_site_symmetry_multiplicity + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_occupancy + C C1 1.0 0.00000 0.00000 0.00000 1.0000 + C C2 1.0 0.25000 0.25000 0.25000 1.0000 diff --git a/src/aiidalab_qe/app/structure/examples/ETFA.xyz b/src/aiidalab_qe/app/structure/examples/ETFA.xyz new file mode 100644 index 000000000..798a6382f --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/ETFA.xyz @@ -0,0 +1,16 @@ +14 +Lattice="12.30241974 0.0 0.0 0.0 14.640175719999998 0.0 0.0 0.0 15.46505195" Properties=species:S:1:pos:R:3 pbc="T T T" +C 6.21899370 8.39832269 5.55679337 +C 6.34421988 8.42035817 7.11190719 +C 6.34626493 7.04152238 9.04380147 +C 6.24209921 5.56976459 9.37839976 +F 6.36233512 9.64017572 5.05341708 +F 7.17124959 7.60031062 5.00000000 +F 5.00000000 7.92279567 5.17742669 +O 6.50098824 9.43606057 7.75095904 +O 6.24663056 7.16890953 7.58195838 +H 7.30241974 7.48055705 9.35298092 +H 5.53775445 7.63550949 9.48679926 +H 5.28506039 5.15498764 9.04352071 +H 7.05371060 5.00000000 8.91281931 +H 6.31087194 5.44012134 10.46505195 diff --git a/src/aiidalab_qe/app/structure/examples/GaAs.cif b/src/aiidalab_qe/app/structure/examples/GaAs.cif new file mode 100644 index 000000000..827bba710 --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/GaAs.cif @@ -0,0 +1,29 @@ + +########################################################################## +# Crystallographic Information Format file +# Produced by PyCifRW module +# From Materials cloud: https://mc3d.materialscloud.org/#/details/AsGa/mc3d-13434/pbe +# Modified using experimental lattice constants +########################################################################## + +data_0 + +loop_ + _atom_site_label + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_type_symbol + Ga1 0.0 0.0 0.0 Ga + As1 0.750 0.750 0.750 As +_cell_angle_alpha 60.0 +_cell_angle_beta 60.0 +_cell_angle_gamma 60.0 +_cell_length_a 3.997 +_cell_length_b 3.997 +_cell_length_c 3.997 +loop_ + _symmetry_equiv_pos_as_xyz + 'x, y, z' +_symmetry_int_tables_number 1 +_symmetry_space_group_name_H-M 'P 1' diff --git a/src/aiidalab_qe/app/structure/examples/GaAs.xyz b/src/aiidalab_qe/app/structure/examples/GaAs.xyz deleted file mode 100644 index d6f21c056..000000000 --- a/src/aiidalab_qe/app/structure/examples/GaAs.xyz +++ /dev/null @@ -1,4 +0,0 @@ -2 -Lattice="4.30538 0.0 0.0 2.1526900000000007 3.7285684529454466 0.0 2.1526900000000007 1.2428561509818157 3.515328049594613" Properties=species:S:1:pos:R:3 spacegroup="P 1" unit_cell=conventional pbc="T T T" -Ga 0.00000000 0.00000000 0.00000000 -As 2.15269000 1.24285615 0.87883201 diff --git a/src/aiidalab_qe/app/structure/examples/Li2CO3.cif b/src/aiidalab_qe/app/structure/examples/Li2CO3.cif new file mode 100644 index 000000000..687ef2a01 --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/Li2CO3.cif @@ -0,0 +1,43 @@ +# generated using pymatgen +data_Li2CO3 +_symmetry_space_group_name_H-M C2/c +_cell_length_a 8.28221965 +_cell_length_b 4.96902479 +_cell_length_c 6.07631725 +_cell_angle_alpha 90.00000000 +_cell_angle_beta 113.74593345 +_cell_angle_gamma 90.00000000 +_symmetry_Int_Tables_number 15 +_chemical_formula_structural Li2CO3 +_chemical_formula_sum 'Li8 C4 O12' +_cell_volume 228.89737819 +_cell_formula_units_Z 4 +loop_ + _symmetry_equiv_pos_site_id + _symmetry_equiv_pos_as_xyz + 1 'x, y, z' + 2 '-x, -y, -z' + 3 '-x, y, -z+1/2' + 4 'x, -y, z+1/2' + 5 'x+1/2, y+1/2, z' + 6 '-x+1/2, -y+1/2, -z' + 7 '-x+1/2, y+1/2, -z+1/2' + 8 'x+1/2, -y+1/2, z+1/2' +loop_ + _atom_type_symbol + _atom_type_oxidation_number + Li+ 1.0 + C4+ 4.0 + O2- -2.0 +loop_ + _atom_site_type_symbol + _atom_site_label + _atom_site_symmetry_multiplicity + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_occupancy + Li+ Li0 8 0.19639168 0.44656424 0.83739024 1 + C4+ C1 4 0.00000000 0.06619574 0.25000000 1 + O2- O2 8 0.14745362 0.06495296 0.81423924 1 + O2- O3 4 0.00000000 0.32319230 0.25000000 1 diff --git a/src/aiidalab_qe/app/structure/examples/LiCoO2.cif b/src/aiidalab_qe/app/structure/examples/LiCoO2.cif new file mode 100644 index 000000000..71b6ce33b --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/LiCoO2.cif @@ -0,0 +1,29 @@ +data_image0 +_chemical_formula_structural CoO2Li +_chemical_formula_sum "Co1 O2 Li1" +_cell_length_a 4.95865 +_cell_length_b 4.95865 +_cell_length_c 4.95865 +_cell_angle_alpha 32.9927 +_cell_angle_beta 32.9927 +_cell_angle_gamma 32.9927 + +_space_group_name_H-M_alt "P 1" +_space_group_IT_number 1 + +loop_ + _space_group_symop_operation_xyz + 'x, y, z' + +loop_ + _atom_site_type_symbol + _atom_site_label + _atom_site_symmetry_multiplicity + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_occupancy + Co Co1 1.0 0.00000 0.00000 0.00000 1.0000 + O O1 1.0 0.26049 0.26049 0.26049 1.0000 + O O2 1.0 0.73945 0.73946 0.73945 1.0000 + Li Li1 1.0 0.50000 0.50000 0.50000 1.0000 diff --git a/src/aiidalab_qe/app/structure/examples/Phenylacetylene.xyz b/src/aiidalab_qe/app/structure/examples/Phenylacetylene.xyz new file mode 100644 index 000000000..e6393c580 --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/Phenylacetylene.xyz @@ -0,0 +1,16 @@ +14 +Lattice="17.572400000000002 0.0 0.0 0.0 14.3161 0.0 0.0 0.0 10.0002" Properties=species:S:1:pos:R:3 pbc="T T T" +C 8.87580000 7.15810000 5.00010000 +C 8.17830000 8.36610000 5.00010000 +C 8.17830000 5.95000000 5.00010000 +C 6.78350000 8.36630000 5.00010000 +C 6.78340000 5.95020000 5.00010000 +C 6.08610000 7.15830000 5.00010000 +C 10.30480000 7.15810000 5.00010000 +C 11.50750000 7.15840000 5.00010000 +H 8.70750000 9.31610000 5.00000000 +H 8.70750000 5.00000000 5.00010000 +H 6.24030000 9.30680000 5.00010000 +H 6.24010000 5.00980000 5.00010000 +H 5.00000000 7.15830000 5.00020000 +H 12.57240000 7.15850000 5.00020000 diff --git a/src/aiidalab_qe/app/structure/examples/Si.cif b/src/aiidalab_qe/app/structure/examples/Si.cif new file mode 100644 index 000000000..1bb8d7eb6 --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/Si.cif @@ -0,0 +1,27 @@ +data_image0 +_chemical_formula_structural Si2 +_chemical_formula_sum "Si2" +_cell_length_a 3.8403 +_cell_length_b 3.8403 +_cell_length_c 3.8403 +_cell_angle_alpha 60 +_cell_angle_beta 60 +_cell_angle_gamma 60 + +_space_group_name_H-M_alt "P 1" +_space_group_IT_number 1 + +loop_ + _space_group_symop_operation_xyz + 'x, y, z' + +loop_ + _atom_site_type_symbol + _atom_site_label + _atom_site_symmetry_multiplicity + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_occupancy + Si Si1 1.0 0.00000 0.00000 0.00000 1.0000 + Si Si2 1.0 0.25000 0.25000 0.25000 1.0000 diff --git a/src/aiidalab_qe/app/structure/examples/Si.xyz b/src/aiidalab_qe/app/structure/examples/Si.xyz deleted file mode 100644 index 031661728..000000000 --- a/src/aiidalab_qe/app/structure/examples/Si.xyz +++ /dev/null @@ -1,4 +0,0 @@ -2 -Lattice="3.81196 0.0 0.0 1.9059800000000005 3.3012541982101284 0.0 1.9059800000000005 1.100418066070043 3.112452306633254" Properties=species:S:1:pos:R:3 spacegroup="P 1" unit_cell=conventional pbc="T T T" -Si 0.00000000 0.00000000 0.00000000 -Si 1.90598000 1.10041807 0.77811308 diff --git a/src/aiidalab_qe/app/structure/examples/SiO2.cif b/src/aiidalab_qe/app/structure/examples/SiO2.cif new file mode 100644 index 000000000..39db723e5 --- /dev/null +++ b/src/aiidalab_qe/app/structure/examples/SiO2.cif @@ -0,0 +1,37 @@ + +########################################################################## +# Crystallographic Information Format file +# Produced by PyCifRW module +# +# From Materials cloud: https://mc3d.materialscloud.org/#/details/O6Si3/mc3d-39110/pbe +# Modified using experimental lattice constants +########################################################################## + +data_0 + +loop_ + _atom_site_label + _atom_site_fract_x + _atom_site_fract_y + _atom_site_fract_z + _atom_site_type_symbol + Si1 0.47712909575306034 0.4771290957385052 0.0 Si + Si2 0.5228709042418711 0.0 0.6666666666847871 Si + Si3 5.068439702542783e-12 0.5228709042614949 0.33333333333333337 Si + O1 0.25459298917216666 0.8398983384689859 0.46225430374647286 O + O2 0.1601016615162286 0.41469465066118705 0.12892097039501907 O + O3 0.5853053493200082 0.7454070108468681 0.7955876370979267 O + O4 0.8398983384782023 0.2545929891531319 0.5377456962535272 O + O5 0.41469465068648104 0.1601016615310142 0.871079029604981 O + O6 0.7454070108666793 0.585305349338813 0.20441236292019382 O +_cell_angle_alpha 90.0 +_cell_angle_beta 90.0 +_cell_angle_gamma 120.00000000057742 +_cell_length_a 4.993 +_cell_length_b 4.993 +_cell_length_c 5.428 +loop_ + _symmetry_equiv_pos_as_xyz + 'x, y, z' +_symmetry_int_tables_number 1 +_symmetry_space_group_name_H-M 'P 1' diff --git a/src/aiidalab_qe/app/structure/examples/SiO2.xyz b/src/aiidalab_qe/app/structure/examples/SiO2.xyz deleted file mode 100644 index 5838ae55c..000000000 --- a/src/aiidalab_qe/app/structure/examples/SiO2.xyz +++ /dev/null @@ -1,8 +0,0 @@ -6 -Lattice="4.1801 0.0 0.0 0.0 4.1801 0.0 0.0 0.0 2.6678" Properties=species:S:1:pos:R:3:tags:I:1 spacegroup="P 42/m n m" unit_cell=conventional pbc="T T T" -Si 0.00000000 0.00000000 0.00000000 0 -Si 2.09005000 2.09005000 1.33390000 0 -O 1.28203667 1.28203667 0.00000000 1 -O 2.89806333 2.89806333 0.00000000 1 -O 3.37208667 0.80801333 1.33390000 1 -O 0.80801333 3.37208667 1.33390000 1 diff --git a/src/aiidalab_qe/app/structure/examples/diamond.cif b/src/aiidalab_qe/app/structure/examples/diamond.cif deleted file mode 100644 index 844368a0f..000000000 --- a/src/aiidalab_qe/app/structure/examples/diamond.cif +++ /dev/null @@ -1,33 +0,0 @@ -data_image0 -_cell_length_a 4.332 -_cell_length_b 5.098 -_cell_length_c 4.432 -_cell_angle_alpha 90 -_cell_angle_beta 90 -_cell_angle_gamma 90 - -loop_ - _atom_site_label - _atom_site_occupancy - _atom_site_fract_x - _atom_site_fract_y - _atom_site_fract_z - _atom_site_thermal_displace_type - _atom_site_B_iso_or_equiv - _atom_site_type_symbol - C1 1.0000 0.10045 0.00336 0.25487 Biso 1.000 C - C2 1.0000 0.60034 0.24661 0.25487 Biso 1.000 C - C3 1.0000 0.93283 0.24667 0.75481 Biso 1.000 C - C4 1.0000 0.43520 0.49797 0.30226 Biso 1.000 C - C5 1.0000 0.10042 1.00341 0.74519 Biso 1.000 C - C6 1.0000 0.59801 0.75203 0.80226 Biso 1.000 C - C7 1.0000 0.93509 0.75205 0.30228 Biso 1.000 C - C8 1.0000 0.43519 0.49797 0.69776 Biso 1.000 C - C9 1.0000 0.43298 0.00331 0.24525 Biso 1.000 C - C10 1.0000 0.93287 0.24666 0.24525 Biso 1.000 C - C11 1.0000 0.60031 0.24661 0.74519 Biso 1.000 C - C12 1.0000 0.09813 0.49798 0.19779 Biso 1.000 C - C13 1.0000 0.43295 1.00335 0.75480 Biso 1.000 C - C14 1.0000 0.93508 0.75206 0.69775 Biso 1.000 C - C15 1.0000 0.59802 0.75204 0.19776 Biso 1.000 C - C16 1.0000 0.09811 0.49799 0.80224 Biso 1.000 C diff --git a/src/aiidalab_qe/app/submission/__init__.py b/src/aiidalab_qe/app/submission/__init__.py index 372df860b..de0272b63 100644 --- a/src/aiidalab_qe/app/submission/__init__.py +++ b/src/aiidalab_qe/app/submission/__init__.py @@ -1,27 +1,28 @@ -# -*- coding: utf-8 -*- """Widgets for the submission of bands work chains. Authors: AiiDAlab team """ + from __future__ import annotations import os import ipywidgets as ipw import traitlets as tl + from aiida import orm from aiida.common import NotExistent from aiida.engine import ProcessBuilderNamespace, submit -from aiidalab_widgets_base import ComputationalResourcesWidget, WizardAppWidgetStep -from IPython.display import display - from aiidalab_qe.app.parameters import DEFAULT_PARAMETERS from aiidalab_qe.app.utils import get_entry_items from aiidalab_qe.common.setup_codes import QESetupWidget from aiidalab_qe.common.setup_pseudos import PseudosInstallWidget +from aiidalab_qe.common.widgets import ( + PwCodeResourceSetupWidget, + QEAppComputationalResourcesWidget, +) from aiidalab_qe.workflows import QeAppWorkChain - -from .resource import ParallelizationSettings, ResourceSelectionWidget +from aiidalab_widgets_base import WizardAppWidgetStep class SubmitQeAppWorkChainStep(ipw.VBox, WizardAppWidgetStep): @@ -38,6 +39,13 @@ class SubmitQeAppWorkChainStep(ipw.VBox, WizardAppWidgetStep): configure new ones on potentially more powerful machines by clicking on "Setup new code".""" ) + process_label_help = ipw.HTML( + """
+

Labeling Your Job

+

Label your job and provide a brief description. These details help identify the job later and make the search process easier. While optional, adding a description is recommended for better clarity.

+
""" + ) # This number provides a rough estimate for how many MPI tasks are needed # for a given structure. @@ -46,6 +54,7 @@ class SubmitQeAppWorkChainStep(ipw.VBox, WizardAppWidgetStep): # Warn the user if they are trying to run calculations for a large # structure on localhost. RUN_ON_LOCALHOST_NUM_SITES_WARN_THRESHOLD = 10 + RUN_ON_LOCALHOST_VOLUME_WARN_THRESHOLD = 1000 # \AA^3 # Put a limit on how many MPI tasks you want to run per k-pool by default MAX_MPI_PER_POOL = 20 @@ -58,20 +67,14 @@ class SubmitQeAppWorkChainStep(ipw.VBox, WizardAppWidgetStep): external_submission_blockers = tl.List(tl.Unicode()) def __init__(self, qe_auto_setup=True, **kwargs): - self.message_area = ipw.Output() self._submission_blocker_messages = ipw.HTML() + self._submission_warning_messages = ipw.HTML() - self.pw_code = ComputationalResourcesWidget( + self.pw_code = PwCodeResourceSetupWidget( description="pw.x:", default_calc_job_plugin="quantumespresso.pw" ) - self.resources_config = ResourceSelectionWidget() - self.parallelization = ParallelizationSettings() - - self.set_resource_defaults() - self.pw_code.observe(self._update_state, "value") - self.pw_code.observe(self._update_resources, "value") # add plugin's entry points self.codes = {"pw": self.pw_code} @@ -86,8 +89,13 @@ def __init__(self, qe_auto_setup=True, **kwargs): self.codes[name] = code code.observe(self._update_state, "value") self.code_children.append(self.codes[name]) - # set default codes - self.set_selected_codes(DEFAULT_PARAMETERS["codes"]) + # set process label and description + self.process_label = ipw.Text( + description="Label:", layout=ipw.Layout(width="auto", indent="0px") + ) + self.process_description = ipw.Textarea( + description="Description", layout=ipw.Layout(width="auto", indent="0px") + ) # self.submit_button = ipw.Button( description="Submit", @@ -123,22 +131,30 @@ def __init__(self, qe_auto_setup=True, **kwargs): super().__init__( children=[ *self.code_children, - self.resources_config, - self.parallelization, - self.message_area, self.sssp_installation_status, self.qe_setup_status, self._submission_blocker_messages, + self._submission_warning_messages, + self.process_label_help, + self.process_label, + self.process_description, self.submit_button, - ] + ], + **kwargs, ) + # set default codes + self.set_selected_codes(DEFAULT_PARAMETERS["codes"]) + + # observe these two for the resource checking: + self.pw_code.num_cpus.observe(self._check_resources, "value") + self.pw_code.num_nodes.observe(self._check_resources, "value") @tl.observe("internal_submission_blockers", "external_submission_blockers") def _observe_submission_blockers(self, _change): """Observe the submission blockers and update the message area.""" blockers = self.internal_submission_blockers + self.external_submission_blockers if any(blockers): - fmt_list = "\n".join((f"
  • {item}
  • " for item in sorted(blockers))) + fmt_list = "\n".join(f"
  • {item}
  • " for item in sorted(blockers)) self._submission_blocker_messages.value = f"""
    The submission is blocked, due to the following reason(s): @@ -165,6 +181,16 @@ def _identify_submission_blockers(self): if not self.sssp_installation_status.installed: yield "The SSSP library is not installed." + # check if the QEAppComputationalResourcesWidget is used + for name, code in self.codes.items(): + # skip if the code is not displayed, convenient for the plugin developer + if code.layout.display == "none": + continue + if not isinstance(code, QEAppComputationalResourcesWidget): + yield ( + f"Error: hi, plugin developer, please use the QEAppComputationalResourcesWidget from aiidalab_qe.common.widgets for code {name}." + ) + def _update_state(self, _=None): # If the previous step has failed, this should fail as well. if self.previous_step_state is self.State.FAIL: @@ -197,107 +223,106 @@ def _toggle_install_widgets(self, change): def _auto_select_code(self, change): if change["new"] and not change["old"]: - for name, code_widget in self.codes.items(): - try: - code_widget.refresh() - code_widget.value = orm.load_code( - DEFAULT_PARAMETERS["codes"][name] - ).uuid - except NotExistent: - pass + self.set_selected_codes(DEFAULT_PARAMETERS["codes"]) _ALERT_MESSAGE = """
    × - × {message}
    """ def _show_alert_message(self, message, alert_class="info"): - with self.message_area: - display( - ipw.HTML( - self._ALERT_MESSAGE.format(alert_class=alert_class, message=message) - ) - ) - - def _update_resources(self, change): - if change["new"] and ( - change["old"] is None - or orm.load_code(change["new"]).computer.pk - != orm.load_code(change["old"]).computer.pk - ): - self.set_resource_defaults(orm.load_code(change["new"]).computer) - - def get_resources(self): - resources = { - "num_machines": self.resources_config.num_nodes.value, - "num_mpiprocs_per_machine": self.resources_config.num_cpus.value, - "npools": self.parallelization.npools.value, - } - return resources - - def set_resources(self, resources): - self.resources_config.num_nodes.value = resources["num_machines"] - self.resources_config.num_cpus.value = resources["num_mpiprocs_per_machine"] - self.parallelization.npools.value = resources["npools"] - - def set_resource_defaults(self, computer=None): - if computer is None or computer.hostname == "localhost": - self.resources_config.num_nodes.disabled = True - self.resources_config.num_nodes.value = 1 - self.resources_config.num_cpus.max = os.cpu_count() - self.resources_config.num_cpus.value = 1 - self.resources_config.num_cpus.description = "CPUs" - self.parallelization.npools.value = 1 - else: - default_mpiprocs = computer.get_default_mpiprocs_per_machine() - self.resources_config.num_nodes.disabled = False - self.resources_config.num_cpus.max = default_mpiprocs - self.resources_config.num_cpus.value = default_mpiprocs - self.resources_config.num_cpus.description = "CPUs/node" - self.parallelization.npools.value = self._get_default_parallelization() - - self._check_resources() - - def _get_default_parallelization(self): - """A _very_ rudimentary approach for obtaining a minimal npools setting.""" - num_mpiprocs = ( - self.resources_config.num_nodes.value * self.resources_config.num_cpus.value + self._submission_warning_messages.value = self._ALERT_MESSAGE.format( + alert_class=alert_class, message=message ) - for i in range(1, num_mpiprocs + 1): - if num_mpiprocs % i == 0 and num_mpiprocs // i < self.MAX_MPI_PER_POOL: - return i - - def _check_resources(self): + @tl.observe("input_structure") + def _check_resources(self, _change=None): """Check whether the currently selected resources will be sufficient and warn if not.""" - if not self.pw_code.value: - return # No code selected, nothing to do. + if not self.pw_code.value or not self.input_structure: + return # No code selected or no structure, so nothing to do. - num_cpus = self.resources_config.num_cpus.value + num_cpus = self.pw_code.num_cpus.value * self.pw_code.num_nodes.value on_localhost = ( orm.load_node(self.pw_code.value).computer.hostname == "localhost" ) - if self.pw_code.value and on_localhost and num_cpus > 1: - self._show_alert_message( - "The selected code would be executed on the local host, but " - "the number of CPUs is larger than one. Please review " - "the configuration and consider to select a code that runs " - "on a larger system if necessary.", - alert_class="warning", + num_sites = len(self.input_structure.sites) + volume = self.input_structure.get_cell_volume() + try: + localhost_cpus = len(os.sched_getaffinity(0)) + except ( + Exception + ): # fallback, in some OS os.sched_getaffinity(0) is not supported + localhost_cpus = os.cpu_count() # however, not so realiable in containers. + + large_system = ( + num_sites > self.RUN_ON_LOCALHOST_NUM_SITES_WARN_THRESHOLD + or volume > self.RUN_ON_LOCALHOST_VOLUME_WARN_THRESHOLD + ) + + estimated_CPUs = self._estimate_min_cpus( + num_sites, volume + ) # estimated number of CPUs for a run less than 12 hours. + + # List of possible suggestions for warnings: + suggestions = { + "more_resources": f"
  • Increase the resources (total number of CPUs should be equal or more than {min(100,estimated_CPUs)}, if possible)
  • ", + "change_configuration": "
  • Review the configuration (e.g. choosing fast protocol - this will affect precision)
  • ", + "go_remote": "
  • Select a code that runs on a larger machine
  • ", + "avoid_overloading": "
  • Reduce the number of CPUs to avoid the overloading of the local machine
  • ", + } + + alert_message = "" + if large_system and estimated_CPUs > num_cpus: + # This part is in common between Warnings 1 (2): (not) on localhost, big system and few cpus + warnings_1_2 = ( + f" Warning: The selected structure is large, with {num_sites} atoms " + f"and a volume of {int(volume)} Γ…3, " + "making it computationally demanding " + "to run at the localhost. Consider the following: " + if on_localhost + else "to run in a reasonable amount of time. Consider the following: " + ) + + # Warning 1: on localhost, big system and few cpus + if on_localhost: + alert_message += ( + warnings_1_2 + + "
      " + + suggestions["more_resources"] + + suggestions["change_configuration"] + + "
    " + ) + # Warning 2: not on localhost, big system and few cpus + else: + alert_message += ( + warnings_1_2 + + "
      " + + suggestions["go_remote"] + + suggestions["more_resources"] + + suggestions["change_configuration"] + + "
    " + ) + if on_localhost and num_cpus / localhost_cpus > 0.8: + # Warning-3: on localhost, more than half of the available cpus + alert_message += ( + " Warning: the selected pw.x code will run locally, but " + f"the number of requested CPUs ({num_cpus}) is larger than the 80% of the available resources ({localhost_cpus}). " + "Please be sure that your local " + "environment has enough free CPUs for the calculation. Consider the following: " + "
      " + + suggestions["avoid_overloading"] + + suggestions["go_remote"] + + "
    " ) - elif ( - self.input_structure - and on_localhost - and len(self.input_structure.sites) - > self.RUN_ON_LOCALHOST_NUM_SITES_WARN_THRESHOLD + + if not (on_localhost and num_cpus / localhost_cpus) > 0.8 and not ( + large_system and estimated_CPUs > num_cpus ): + self._submission_warning_messages.value = "" + else: self._show_alert_message( - "The selected code would be executed on the local host, but the " - "number of sites of the selected structure is relatively large. " - "Consider to select a code that runs on a larger system if " - "necessary.", + message=alert_message, alert_class="warning", ) @@ -310,6 +335,7 @@ def _observe_state(self, change): def _observe_input_structure(self, _): self._update_state() self.update_codes_display() + self._update_process_label() @tl.observe("process") def _observe_process(self, change): @@ -328,10 +354,14 @@ def get_selected_codes(self): return: A dict with the code names as keys and the code UUIDs as values. """ - codes = {key: code.value for key, code in self.codes.items()} + codes = { + key: code.parameters + for key, code in self.codes.items() + if code.layout.display != "none" + } return codes - def set_selected_codes(self, codes): + def set_selected_codes(self, code_data): """Set the inputs in the GUI based on a set of codes.""" # Codes @@ -344,7 +374,20 @@ def _get_code_uuid(code): with self.hold_trait_notifications(): for name, code in self.codes.items(): - code.value = _get_code_uuid(codes.get(name)) + if name not in code_data: + continue + # check if the code is installed and usable + # note: if code is imported from another user, it is not usable and thus will not be + # treated as an option in the ComputationalResourcesWidget. + code_options = [ + o[1] for o in code.code_selection.code_select_dropdown.options + ] + if _get_code_uuid(code_data.get(name)["code"]) in code_options: + # get code uuid from code label in case of using DEFAULT_PARAMETERS + code_data.get(name)["code"] = _get_code_uuid( + code_data.get(name)["code"] + ) + code.parameters = code_data.get(name) def update_codes_display(self): """Hide code if no related property is selected.""" @@ -368,85 +411,129 @@ def submit(self, _=None): with self.hold_trait_notifications(): process = submit(builder) - process.label = self._generate_label() + process.label = self.process_label.value + process.description = self.process_description.value # since AiiDA data node may exist in the ui_parameters, # we serialize it to yaml process.base.extras.set("ui_parameters", serialize(self.ui_parameters)) + # store the workchain name in extras, this will help to filter the workchain in the future + process.base.extras.set("workchain", self.ui_parameters["workchain"]) + process.base.extras.set("structure", self.input_structure.get_formula()) self.process = process self._update_state() - def _generate_label(self) -> dict: + def _update_process_label(self) -> dict: """Generate a label for the work chain based on the input parameters.""" - formula = self.input_structure.get_formula() - properties = [ - p for p in self.input_parameters["workchain"]["properties"] if p != "realx" - ] - relax_type = self.input_parameters["workchain"].get("relax_type") + if not self.input_structure: + return "" + structure_label = ( + self.input_structure.label + if len(self.input_structure.label) > 0 + else self.input_structure.get_formula() + ) + workchain_data = self.input_parameters.get("workchain", {"properties": []}) + properties = [p for p in workchain_data["properties"] if p != "relax"] + # relax_info + relax_type = workchain_data.get("relax_type", "none") + relax_info = "unrelaxed" if relax_type != "none": - relax_info = "structure is relaxed" - else: - relax_info = "structure is not relaxed" - if not properties: - properties_info = "" - else: - properties_info = f"properties on {', '.join(properties)}" - - label = "{} {} {}".format(formula, relax_info, properties_info) - return label + relax_info = ( + "relax: atoms+cell" if "cell" in relax_type else "relax: atoms only" + ) + # protocol_info + protocol_and_magnetic_info = f"{workchain_data['protocol']} protocol" + # magnetic_info + if workchain_data["spin_type"] != "none": + protocol_and_magnetic_info += ", magnetic" + # properties_info + properties_info = "" + if properties: + properties_info = f"β†’ {', '.join(properties)}" + + label = f"{structure_label} [{relax_info}, {protocol_and_magnetic_info}] {properties_info}".strip() + self.process_label.value = label def _create_builder(self) -> ProcessBuilderNamespace: """Create the builder for the `QeAppWorkChain` submit.""" from copy import deepcopy self.ui_parameters = deepcopy(self.input_parameters) - self.ui_parameters["resources"] = self.get_resources() # add codes and resource info into ui_parameters - self.ui_parameters.update(self.get_submission_parameters()) + submission_parameters = self.get_submission_parameters() + self.ui_parameters.update(submission_parameters) builder = QeAppWorkChain.get_builder_from_protocol( structure=self.input_structure, parameters=deepcopy(self.ui_parameters), ) - self._update_builder(builder, self.MAX_MPI_PER_POOL) + self._update_builder(builder, submission_parameters["codes"]) return builder - def _update_builder(self, buildy, max_mpi_per_pool): - resources = self.get_resources() - npools = resources.pop("npools", 1) - """Update the resources and parallelization of the ``QeAppWorkChain`` builder.""" - for k, v in buildy.items(): - if isinstance(v, (dict, ProcessBuilderNamespace)): - if k == "pw" and v["pseudos"]: - v["parallelization"] = orm.Dict(dict={"npool": npools}) - if k == "projwfc": - v["settings"] = orm.Dict(dict={"cmdline": ["-nk", str(npools)]}) - if k == "dos": - v["metadata"]["options"]["resources"] = { - "num_machines": 1, - "num_mpiprocs_per_machine": min( - max_mpi_per_pool, - resources["num_mpiprocs_per_machine"], - ), - } - # Continue to the next item to avoid overriding the resources in the - # recursive `update_builder` call. - continue - if k == "resources": - buildy["resources"] = resources - else: - self._update_builder(v, max_mpi_per_pool) + def _update_builder(self, builder, codes): + """Update the resources and parallelization of the ``relax`` builder.""" + # update resources + builder.relax.base.pw.metadata.options.resources = { + "num_machines": codes.get("pw")["nodes"], + "num_mpiprocs_per_machine": codes.get("pw")["ntasks_per_node"], + "num_cores_per_mpiproc": codes.get("pw")["cpus_per_task"], + } + builder.relax.base.pw.metadata.options["max_wallclock_seconds"] = codes.get( + "pw" + )["max_wallclock_seconds"] + builder.relax.base.pw.parallelization = orm.Dict( + dict=codes["pw"]["parallelization"] + ) + + def _estimate_min_cpus( + self, n, v, n0=9, v0=117, num_cpus0=4, t0=129.6, tmax=12 * 60 * 60, scf_cycles=5 + ): + """ + Estimate the minimum number of CPUs required to complete a task within a given time limit. + Parameters: + n (int): The number of atoms in the system. + v (float): The volume of the system. + n0 (int, optional): Reference number of atoms. Default is 9. + v0 (float, optional): Reference volume. Default is 117. + num_cpus0 (int, optional): Reference number of CPUs. Default is 4. + scf_cycles (int, optional): Reference number of SCF cycles in a relaxation. Default is 5. + + NB: Defaults (a part scf_cycles) are taken from a calculation done for SiO2. This is just a dummy + and not well tested estimation, placeholder for a more rigourous one. + """ + import numpy as np + + return int( + np.ceil( + scf_cycles * num_cpus0 * (n / n0) ** 3 * (v / v0) ** 1.5 * t0 / tmax + ) + ) def set_submission_parameters(self, parameters): - self.set_resources(parameters["resources"]) + # backward compatibility for v2023.11 + # which have a separate "resources" section for pw code + if "resources" in parameters: + parameters["codes"] = { + key: {"code": value} for key, value in parameters["codes"].items() + } + parameters["codes"]["pw"]["nodes"] = parameters["resources"]["num_machines"] + parameters["codes"]["pw"]["cpus"] = parameters["resources"][ + "num_mpiprocs_per_machine" + ] + parameters["codes"]["pw"]["parallelization"] = { + "npool": parameters["resources"]["npools"] + } self.set_selected_codes(parameters["codes"]) + # label and description are not stored in the parameters, but in the process directly + if self.process: + self.process_label.value = self.process.label + self.process_description.value = self.process.description def get_submission_parameters(self): """Get the parameters for the submission step.""" return { "codes": self.get_selected_codes(), - "resources": self.get_resources(), } def reset(self): @@ -455,4 +542,3 @@ def reset(self): self.process = None self.input_structure = None self.set_selected_codes(DEFAULT_PARAMETERS["codes"]) - self.set_resource_defaults() diff --git a/src/aiidalab_qe/app/submission/resource.py b/src/aiidalab_qe/app/submission/resource.py deleted file mode 100644 index 3d3e5fb9b..000000000 --- a/src/aiidalab_qe/app/submission/resource.py +++ /dev/null @@ -1,85 +0,0 @@ -# -*- coding: utf-8 -*- -"""Widgets for the submission of bands work chains. - -Authors: AiiDAlab team -""" -import ipywidgets as ipw - - -class ResourceSelectionWidget(ipw.VBox): - """Widget for the selection of compute resources.""" - - title = ipw.HTML( - """
    -

    Resources

    -
    """ - ) - prompt = ipw.HTML( - """
    -

    - Specify the resources to use for the pw.x calculation. -

    """ - ) - - def __init__(self, **kwargs): - extra = { - "style": {"description_width": "150px"}, - "layout": {"min_width": "180px"}, - } - self.num_nodes = ipw.BoundedIntText( - value=1, step=1, min=1, max=1000, description="Nodes", **extra - ) - self.num_cpus = ipw.BoundedIntText( - value=1, step=1, min=1, description="CPUs", **extra - ) - - super().__init__( - children=[ - self.title, - ipw.HBox( - children=[self.prompt, self.num_nodes, self.num_cpus], - layout=ipw.Layout(justify_content="space-between"), - ), - ] - ) - - def reset(self): - self.num_nodes.value = 1 - self.num_cpus.value = 1 - - -class ParallelizationSettings(ipw.VBox): - """Widget for setting the parallelization settings.""" - - title = ipw.HTML( - """
    -

    Parallelization

    -
    """ - ) - prompt = ipw.HTML( - """
    -

    - Specify the number of k-points pools for the calculations. -

    """ - ) - - def __init__(self, **kwargs): - extra = { - "style": {"description_width": "150px"}, - "layout": {"min_width": "180px"}, - } - self.npools = ipw.BoundedIntText( - value=1, step=1, min=1, max=128, description="Number of k-pools", **extra - ) - super().__init__( - children=[ - self.title, - ipw.HBox( - children=[self.prompt, self.npools], - layout=ipw.Layout(justify_content="space-between"), - ), - ] - ) - - def reset(self): - self.npools.value = 1 diff --git a/src/aiidalab_qe/app/utils/__init__.py b/src/aiidalab_qe/app/utils/__init__.py index dccd39282..a62d8c148 100644 --- a/src/aiidalab_qe/app/utils/__init__.py +++ b/src/aiidalab_qe/app/utils/__init__.py @@ -1,10 +1,30 @@ +from importlib.metadata import distributions + + +def print_error(entry_point, e): + print(f"\033[91mFailed to load plugin entry point {entry_point.name}.\033[0m") + print( + "\033[93mThis may be due to compatibility issues with the current QEApp version.\033[0m" + ) + print("\033[93mPlease contact the plugin author for further assistance.\033[0m") + print( + "\033[93mThus, the plugin will not be available. However, you can still use the rest of the app.\033[0m" + ) + print(f"\033[91mError message: {e}\033[0m\n") + + # load entry points def get_entries(entry_point_name="aiidalab_qe.properties"): from importlib.metadata import entry_points entries = {} for entry_point in entry_points().get(entry_point_name, []): - entries[entry_point.name] = entry_point.load() + try: + # Attempt to load the entry point + loaded_entry_point = entry_point.load() + entries[entry_point.name] = loaded_entry_point + except Exception as e: + print_error(entry_point, e) return entries @@ -16,3 +36,48 @@ def get_entry_items(entry_point_name, item_name="outline"): for name, entry_point in entries.items() if entry_point.get(item_name, False) } + + +def get_entry_points_for_package( + package_name: str, group: str = "aiidalab_qe.properties" +): + """Find the entry points for the specified package""" + entry_points_list = [] + + dist = next( + (d for d in distributions() if d.metadata["Name"] == package_name), None + ) + if not dist: + raise ValueError(f"Package '{package_name}' not found.") + # Retrieve all entry points associated with this distribution + if dist.entry_points: + for ep in dist.entry_points: + if ep.group == group: + entry_points_list.append(ep) + else: + print(f"No entry points found for package '{package_name}'.") + return entry_points_list + + +def test_plugin_functionality(plugin_name): + """Test the functionality of the plugin. + 1) loading all entry points. + 2) check if the plugin use correct QEAppComputationalResourcesWidget + """ + from aiidalab_qe.common.widgets import QEAppComputationalResourcesWidget + + try: + eps = get_entry_points_for_package(plugin_name) + # check if we can load all entry points + for ep in eps: + loaded_ep = ep.load() + # check if the code uses the correct widget + for name, code in loaded_ep.get("code", {}).items(): + if not isinstance(code, QEAppComputationalResourcesWidget): + return ( + False, + f"\nPlugin {plugin_name} code {name} must use QEAppComputationalResourcesWidget class", + ) + except Exception as e: + return False, f"Failed to get entry points for package {plugin_name}: {e}" + return True, "" diff --git a/src/aiidalab_qe/app/utils/search_jobs.py b/src/aiidalab_qe/app/utils/search_jobs.py new file mode 100644 index 000000000..c5e0e6f45 --- /dev/null +++ b/src/aiidalab_qe/app/utils/search_jobs.py @@ -0,0 +1,196 @@ +class QueryInterface: + def __init__(self): + pass + + def setup_table(self): + import ipywidgets as ipw + + self.df = self.load_data() + self.table = ipw.HTML() + self.setup_widgets() + + def load_data(self): + import pandas as pd + + from aiida.orm import QueryBuilder + from aiidalab_qe.workflows import QeAppWorkChain + + projections = [ + "id", + "extras.structure", + "ctime", + "attributes.process_state", + "label", + "extras.workchain.relax_type", + "extras.workchain.properties", + ] + headers = [ + "PK", + "Structure", + "ctime", + "State", + "Label", + "Relax_type", + "Properties", + ] + + qb = QueryBuilder() + qb.append(QeAppWorkChain, project=projections, tag="process") + qb.order_by({"process": {"ctime": "desc"}}) + results = qb.all() + + df = pd.DataFrame(results, columns=headers) + # Check if DataFrame is not empty + if not df.empty: + df["Creation time"] = df["ctime"].apply( + lambda x: x.strftime("%Y-%m-%d %H:%M:%S") + ) + df["Delete"] = df["PK"].apply( + lambda pk: f'Delete' + ) + df["Inspect"] = df["PK"].apply( + lambda pk: f'Inspect' + ) + else: + # Initialize empty columns for an empty DataFrame + df["Creation time"] = pd.Series(dtype="str") + df["Delete"] = pd.Series(dtype="str") + df["Inspect"] = pd.Series(dtype="str") + return df[ + [ + "PK", + "Creation time", + "Structure", + "State", + "Label", + "Relax_type", + "Delete", + "Inspect", + "Properties", + "ctime", + ] + ] + + def setup_widgets(self): + import ipywidgets as ipw + + self.css_style = """ + + """ + + unique_properties = set(self.df["Properties"].explode().dropna()) + unique_properties.discard(None) + property_checkboxes = [ + ipw.Checkbox( + value=False, + description=prop, + Layout=ipw.Layout(description_width="initial"), + indent=False, + ) + for prop in unique_properties + ] + self.properties_box = ipw.HBox( + children=property_checkboxes, description="Properties:" + ) + # Replace 'None' in 'Properties' with an empty list + self.df["Properties"] = self.df["Properties"].apply( + lambda x: [] if x is None else x + ) + self.job_state_dropdown = ipw.Dropdown( + options=["", "finished", "waiting", "except", "killed"], + value="", + description="Job State:", + ) + self.label_search_field = ipw.Text( + value="", + placeholder="Enter label to search", + description="Search Label:", + disabled=False, + style={"description_width": "initial"}, + ) + self.time_start = ipw.DatePicker(description="Start Time:") + self.time_end = ipw.DatePicker(description="End Time:") + self.time_box = ipw.HBox([self.time_start, self.time_end]) + # self.apply_filters_btn = ipw.Button(description='Apply Filters') + # self.apply_filters_btn.on_click(self.apply_filters) + for cb in property_checkboxes: + cb.observe(self.apply_filters, names="value") + self.time_start.observe(self.apply_filters, names="value") + self.time_end.observe(self.apply_filters, names="value") + self.job_state_dropdown.observe(self.apply_filters, names="value") + self.label_search_field.observe(self.apply_filters, names="value") + + self.filters_layout = ipw.VBox( + [ + ipw.HTML("

    Search results:

    "), + ipw.VBox( + [ + ipw.HBox( + [ipw.HTML("Properties: "), self.properties_box] + ), + self.label_search_field, + self.job_state_dropdown, + self.time_box, + # self.apply_filters_btn, + ] + ), + ] + ) + self.get_table_value(self.df) + + def get_table_value(self, display_df): + if display_df.empty: + self.table.value = "

    No results found

    " + return + display_df = display_df.drop(columns=["Properties", "ctime"]) + self.table.value = self.css_style + display_df.to_html( + classes="df", escape=False, index=False + ) + + def apply_filters(self, _): + import pandas as pd + + selected_properties = [ + cb.description for cb in self.properties_box.children if cb.value + ] + filtered_df = self.df.copy() + filtered_df = filtered_df[ + filtered_df["State"].str.contains(self.job_state_dropdown.value) + ] + if self.label_search_field.value: + filtered_df = filtered_df[ + filtered_df["Label"].str.contains( + self.label_search_field.value, case=False, na=False + ) + ] + if selected_properties: + filtered_df = filtered_df[ + filtered_df["Properties"].apply( + lambda x: all(item in x for item in selected_properties) + ) + ] + if self.time_start.value and self.time_end.value: + start_time = pd.to_datetime(self.time_start.value).normalize() + end_time = pd.to_datetime(self.time_end.value).normalize() + pd.Timedelta( + days=1, milliseconds=-1 + ) + start_time = start_time.tz_localize("UTC") + end_time = end_time.tz_localize("UTC") + filtered_df = filtered_df[ + (filtered_df["ctime"] >= start_time) + & (filtered_df["ctime"] <= end_time) + ] + self.get_table_value(filtered_df) + + def display(self): + from IPython.display import display + + display(self.filters_layout) + display(self.table) diff --git a/src/aiidalab_qe/app/wrapper.py b/src/aiidalab_qe/app/wrapper.py new file mode 100644 index 000000000..baddf8c66 --- /dev/null +++ b/src/aiidalab_qe/app/wrapper.py @@ -0,0 +1,212 @@ +from __future__ import annotations + +import ipywidgets as ipw +import traitlets + + +def without_triggering(toggle: str): + """Decorator to prevent the other toggle from triggering its callback.""" + + def decorator(func): + def wrapper(self, change: dict): + """Toggle off other button without triggering its callback.""" + view: AppWrapperView = self._view + button: ipw.ToggleButton = getattr(view, toggle) + callback = getattr(self, f"_on_{toggle}") + button.unobserve(callback, "value") + button.value = False + func(self, change) + button.observe(callback, "value") + + return wrapper + + return decorator + + +class AppWrapperContoller: + """An MVC controller for `AppWrapper`.""" + + def __init__( + self, + model: AppWrapperModel, + view: AppWrapperView, + ) -> None: + """`AppWrapperController` constructor. + + Parameters + ---------- + `model` : `AppWrapperModel` + The associated model. + `view` : `AppWrapperView` + The associated view. + """ + self._model = model + self._view = view + self._set_event_handlers() + + def enable_toggles(self) -> None: + """Enable the toggle buttons.""" + self._view.guide_toggle.disabled = False + self._view.about_toggle.disabled = False + self._view.job_history_toggle.disabled = False + + @without_triggering("about_toggle") + def _on_guide_toggle(self, change: dict): + """Toggle the guide section.""" + if change["new"]: + self._view.job_history_toggle.value = False + self._view.info_container.children = [self._view.guide] if change["new"] else [] + self._view.info_container.layout.display = "flex" if change["new"] else "none" + + @without_triggering("guide_toggle") + def _on_about_toggle(self, change: dict): + """Toggle the about section.""" + if change["new"]: + self._view.job_history_toggle.value = False + self._view.info_container.children = [self._view.about] if change["new"] else [] + self._view.info_container.layout.display = "flex" if change["new"] else "none" + + def _on_job_history_toggle(self, change: dict): + """Toggle the job list section.""" + if change["new"]: + self._view.about_toggle.value = False + self._view.guide_toggle.value = False + self._view.job_history.setup_table() + self._view.main.children = [ + self._view.job_history.filters_layout, + self._view.job_history.table, + ] + else: + self._view.main.children = [self._view.app] + + def _set_event_handlers(self) -> None: + """Set up event handlers.""" + self._view.guide_toggle.observe(self._on_guide_toggle, "value") + self._view.about_toggle.observe(self._on_about_toggle, "value") + self._view.job_history_toggle.observe(self._on_job_history_toggle, "value") + + +class AppWrapperModel(traitlets.HasTraits): + """An MVC model for `AppWrapper`.""" + + def __init__(self): + """`AppWrapperModel` constructor.""" + + +class AppWrapperView(ipw.VBox): + """An MVC view for `AppWrapper`.""" + + def __init__(self) -> None: + """`AppWrapperView` constructor.""" + + ################# LAZY LOADING ################# + + from datetime import datetime + + from importlib_resources import files + from IPython.display import Image, display + from jinja2 import Environment + + from aiidalab_qe.app.static import templates + from aiidalab_qe.app.utils.search_jobs import QueryInterface + from aiidalab_qe.common.infobox import InfoBox + from aiidalab_qe.version import __version__ + + ################################################# + + self.output = ipw.Output() + + logo_img = Image( + filename="docs/source/_static/logo.png", + width="700", + ) + logo = ipw.Output() + with logo: + display(logo_img) + logo.add_class("logo") + + subtitle = ipw.HTML("

    πŸŽ‰ Happy computing πŸŽ‰

    ") + + self.guide_toggle = ipw.ToggleButton( + button_style="", + icon="book", + value=False, + description="Getting Started", + tooltip="Learn how to use the app", + disabled=True, + layout=ipw.Layout(width="140px"), + ) + + self.about_toggle = ipw.ToggleButton( + button_style="", + icon="info", + value=False, + description="About", + tooltip="Learn about the app", + disabled=True, + ) + + self.job_history_toggle = ipw.ToggleButton( + button_style="", + icon="list", + value=False, + description="Job History", + tooltip="View all jobs run with this app", + disabled=True, + layout=ipw.Layout(width="140px"), + ) + + info_toggles = ipw.HBox( + children=[ + self.guide_toggle, + self.about_toggle, + self.job_history_toggle, + ] + ) + info_toggles.add_class("info-toggles") + + env = Environment() + guide_template = files(templates).joinpath("guide.jinja").read_text() + about_template = files(templates).joinpath("about.jinja").read_text() + + self.guide = ipw.HTML(env.from_string(guide_template).render()) + self.about = ipw.HTML(env.from_string(about_template).render()) + + self.info_container = InfoBox() + self.job_history = QueryInterface() + + header = ipw.VBox( + children=[ + logo, + subtitle, + info_toggles, + self.info_container, + ], + ) + header.add_class("app-header") + + loading = ipw.HTML(""" +
    + Loading the app +
    + """) + + self.main = ipw.VBox(children=[loading]) + + current_year = datetime.now().year + footer = ipw.HTML(f""" +
    + Copyright (c) {current_year} AiiDAlab team
    + Version: {__version__} +
    + """) + + super().__init__( + layout={}, + children=[ + self.output, + header, + self.main, + footer, + ], + ) diff --git a/src/aiidalab_qe/common/bandpdoswidget.py b/src/aiidalab_qe/common/bandpdoswidget.py new file mode 100644 index 000000000..ea95f932a --- /dev/null +++ b/src/aiidalab_qe/common/bandpdoswidget.py @@ -0,0 +1,1463 @@ +import base64 +import json +import re + +import ipywidgets as ipw +import numpy as np +import plotly.graph_objects as go +from IPython.display import clear_output, display +from plotly.subplots import make_subplots +from pymatgen.core.periodic_table import Element + +from aiida.orm import ProjectionData +from aiidalab_widgets_base.utils import StatusHTML, string_range_to_list + + +class BandPdosPlotly: + SETTINGS = { + "axis_linecolor": "#111111", + "bands_linecolor": "#111111", + "bands_up_linecolor": "rgba(205, 0, 0, 0.4)", # Red Opacitiy 40% + "bands_down_linecolor": "rgba(72,118,255, 0.4)", # Blue Opacitiy 40% + "combined_plot_height": 600, + "combined_plot_width": 900, + "combined_column_widths": [0.7, 0.3], + "bands_plot_height": 600, + "bands_plot_width": 850, + "pdos_plot_height": 600, + "pdos_plot_width": 850, + "vertical_linecolor": "#111111", + "horizontal_linecolor": "#111111", + "vertical_range_bands": [-10, 10], + "horizontal_range_pdos": [-10, 10], + } + + def __init__(self, bands_data=None, pdos_data=None, project_bands=False): + self.bands_data = bands_data + self.pdos_data = pdos_data + self.fermi_energy = self._get_fermi_energy() + self.project_bands = project_bands and "projected_bands" in self.bands_data + + # Plotly Axis + # Plotly settings + self._bands_xaxis = self._band_xaxis() + self._bands_yaxis = self._band_yaxis() + self._pdos_xaxis = self._pdos_xaxis() + self._pdos_yaxis = self._pdos_yaxis() + + @property + def plot_type(self): + """Define the plot type.""" + if self.bands_data and self.pdos_data: + return "combined" + elif self.bands_data: + return "bands" + elif self.pdos_data: + return "pdos" + + @property + def bandspdosfigure(self): + return self._get_bandspdos_plot() + + def _get_fermi_energy(self): + """Function to return the Fermi energy information depending on the data available.""" + fermi_data = {} + if self.pdos_data: + if "fermi_energy_up" in self.pdos_data: + fermi_data["fermi_energy_up"] = self.pdos_data["fermi_energy_up"] + fermi_data["fermi_energy_down"] = self.pdos_data["fermi_energy_down"] + else: + fermi_data["fermi_energy"] = self.pdos_data["fermi_energy"] + else: + if "fermi_energy_up" in self.bands_data: + fermi_data["fermi_energy_up"] = self.bands_data["fermi_energy_up"] + fermi_data["fermi_energy_down"] = self.bands_data["fermi_energy_down"] + else: + fermi_data["fermi_energy"] = self.bands_data["fermi_energy"] + return fermi_data + + def _band_xaxis(self): + """Function to return the xaxis for the bands plot.""" + + if not self.bands_data: + return None + paths = self.bands_data.get("paths") + slider_bands = go.layout.xaxis.Rangeslider( + thickness=0.08, + range=[0, paths[-1]["x"][-1]], + ) + bandxaxis = go.layout.XAxis( + title="k-points", + range=[0, paths[-1]["x"][-1]], + showgrid=True, + showline=True, + tickmode="array", + rangeslider=slider_bands, + fixedrange=False, + tickvals=self.bands_data["pathlabels"][1], # ,self.band_labels[1], + ticktext=self.bands_data["pathlabels"][0], # self.band_labels[0], + showticklabels=True, + linecolor=self.SETTINGS["axis_linecolor"], + mirror=True, + linewidth=2, + type="linear", + ) + + return bandxaxis + + def _band_yaxis(self): + """Function to return the yaxis for the bands plot.""" + + if not self.bands_data: + return None + + bandyaxis = go.layout.YAxis( + title={"text": "Electronic Bands (eV)", "standoff": 1}, + side="left", + showgrid=True, + showline=True, + zeroline=True, + range=self.SETTINGS["vertical_range_bands"], + fixedrange=False, + automargin=True, + ticks="inside", + linewidth=2, + linecolor=self.SETTINGS["axis_linecolor"], + tickwidth=2, + zerolinewidth=2, + ) + + return bandyaxis + + def _pdos_xaxis(self): + """Function to return the xaxis for the pdos plot.""" + + if not self.pdos_data: + return None + # For combined plot + axis_settings = { + "showgrid": True, + "showline": True, + "mirror": "ticks", + "ticks": "inside", + "linewidth": 2, + "tickwidth": 2, + "linecolor": self.SETTINGS["axis_linecolor"], + "title": "Density of states", + "side": "bottom", + "automargin": True, + } + + if self.plot_type != "combined": + axis_settings["title"] = "Density of states (eV)" + axis_settings["range"] = self.SETTINGS["horizontal_range_pdos"] + axis_settings.pop("side") + axis_settings.pop("automargin") + + return go.layout.XAxis(**axis_settings) + + def _pdos_yaxis(self): + """Function to return the yaxis for the pdos plot.""" + + if not self.pdos_data: + return None + + axis_settings = { + "showgrid": True, + "showline": True, + "side": "right" if self.plot_type == "combined" else "left", + "mirror": "ticks", + "ticks": "inside", + "linewidth": 2, + "tickwidth": 2, + "linecolor": self.SETTINGS["axis_linecolor"], + "zerolinewidth": 2, + } + + return go.layout.YAxis(**axis_settings) + + def _get_bandspdos_plot(self): + """Function to return the bands plot widget.""" + + fig = self._create_fig() + if self.bands_data: + self._add_band_traces(fig) + + band_labels = self.bands_data.get("pathlabels") + for label in band_labels[1]: + fig.add_vline( + x=label, + line={"color": self.SETTINGS["vertical_linecolor"], "width": 1}, + ) + + if self.project_bands: + self._add_projection_traces(fig) + + if self.pdos_data: + self._add_pdos_traces(fig) + if self.plot_type == "pdos": + fig.add_vline( + x=0, + line={ + "color": self.SETTINGS["vertical_linecolor"], + "width": 1, + "dash": "dot", + }, + ) + + if self.plot_type == "combined": + self._customize_combined_layout(fig) + else: + self._customize_single_layout(fig) + + return go.FigureWidget(fig) + + def _create_fig(self): + """Create a plotly figure. + + The figure layout is different depending on the plot type. + """ + if self.plot_type != "combined": + return go.Figure() + + fig = make_subplots( + rows=1, + cols=2, + shared_yaxes=True, + column_widths=self.SETTINGS["combined_column_widths"], + horizontal_spacing=0.015, + ) + return fig + + def _add_traces_to_fig(self, fig, traces, col): + """Add a list of traces to a figure.""" + if self.plot_type == "combined": + rows = [1] * len(traces) + cols = [col] * len(traces) + fig.add_traces(traces, rows=rows, cols=cols) + else: + fig.add_traces(traces) + + def _add_band_traces(self, fig): + """Generate the band traces and add them to the figure.""" + colors = { + (True, 0): self.SETTINGS["bands_up_linecolor"], + (True, 1): self.SETTINGS["bands_down_linecolor"], + (False, 0): self.SETTINGS["bands_linecolor"], + } + fermi_energy_mapping = { + (False, 0): self.fermi_energy.get("fermi_energy_up", None), + (False, 1): self.fermi_energy.get("fermi_energy_down", None), + } + + bands_data = self.bands_data + # Convert paths to a list of Scatter objects + scatter_objects = [] + + spin_polarized = 1 in bands_data["band_type_idx"] + for spin in [0, 1]: + # In case of non-spin-polarized or SOC calculations, the spin index is only 0 + if spin not in bands_data["band_type_idx"]: + continue + + x_bands = np.array(bands_data["x"]).reshape(1, -1) + # New shape: (number of bands, number of kpoints) + y_bands = bands_data["y"][:, bands_data["band_type_idx"] == spin].T + # Concatenate the bands and prepare the traces + x_bands_comb, y_bands_comb = _prepare_combined_plotly_traces( + x_bands, y_bands + ) + + fermi_energy = fermi_energy_mapping.get( + ("fermi_energy" in self.fermi_energy, spin), + self.fermi_energy.get("fermi_energy"), + ) + + scatter_objects.append( + go.Scattergl( + x=x_bands_comb, + y=y_bands_comb - fermi_energy, + mode="lines", + line={ + "color": colors[(spin_polarized, spin)], + "shape": "linear", + }, + showlegend=False, + ) + ) + + self._add_traces_to_fig(fig, scatter_objects, 1) + + def _add_pdos_traces(self, fig): + # Extract DOS data + dos_data = self.pdos_data["dos"] + + # Pre-allocate memory for Scatter objects + num_traces = len(dos_data) + scatter_objects = [None] * num_traces + + # dictionary with keys (bool(spin polarized), bool(spin up)) + fermi_energy_spin_mapping = { + (False, True): self.fermi_energy.get("fermi_energy_up", None), + (False, False): self.fermi_energy.get("fermi_energy_down", None), + } + + # Vectorize Scatter object creation + for i, trace in enumerate(dos_data): + dos_np = np.array(trace["x"]) + fill = "tozerox" if self.plot_type == "combined" else "tozeroy" + fermi_energy = fermi_energy_spin_mapping.get( + ("fermi_energy" in self.fermi_energy, trace["label"].endswith("(↑)")), + self.fermi_energy.get("fermi_energy"), + ) + + x_data = ( + trace["y"] if self.plot_type == "combined" else dos_np - fermi_energy + ) + y_data = ( + dos_np - fermi_energy if self.plot_type == "combined" else trace["y"] + ) + scatter_objects[i] = go.Scattergl( + x=x_data, + y=y_data, + fill=fill, + name=trace["label"], + line={ + "color": trace["borderColor"], + "shape": "linear", + }, + legendgroup=trace["label"], + ) + + self._add_traces_to_fig(fig, scatter_objects, 2) + + def _add_projection_traces(self, fig): + """Function to add the projected bands traces to the bands plot.""" + projected_bands = self.bands_data["projected_bands"] + # dictionary with keys (bool(spin polarized), bool(spin up)) + fermi_energy_spin_mapping = { + (False, True): self.fermi_energy.get("fermi_energy_up", None), + (False, False): self.fermi_energy.get("fermi_energy_down", None), + } + + scatter_objects = [] + for proj_bands in projected_bands: + fermi_energy = fermi_energy_spin_mapping.get( + ( + "fermi_energy" in self.fermi_energy, + proj_bands["label"].endswith("(↑)"), + ), + self.fermi_energy.get("fermi_energy"), + ) + scatter_objects.append( + go.Scattergl( + x=proj_bands["x"], + y=np.array(proj_bands["y"]) - fermi_energy, + fill="toself", + legendgroup=proj_bands["label"], + mode="lines", + line={"width": 0, "color": proj_bands["color"]}, + name=proj_bands["label"], + # If PDOS is present, use those legend entries + showlegend=True if self.plot_type == "bands" else False, + ) + ) + + self._add_traces_to_fig(fig, scatter_objects, 1) + + def _customize_combined_layout(self, fig): + self._customize_layout(fig, self._bands_xaxis, self._bands_yaxis) + self._customize_layout(fig, self._pdos_xaxis, self._pdos_yaxis, col=2) + fig.update_layout( + legend={"xanchor": "left", "x": 1.06}, + height=self.SETTINGS["combined_plot_height"], + width=self.SETTINGS["combined_plot_width"], + plot_bgcolor="white", + ) + self._update_dos_layout(fig) + + def _customize_layout(self, fig, xaxis, yaxis, row=1, col=1): + fig.update_xaxes(patch=xaxis, row=row, col=col) + fig.update_yaxes(patch=yaxis, row=row, col=col, showticklabels=True) + fig.add_hline( + y=0, + line={ + "color": self.SETTINGS["horizontal_linecolor"], + "width": 1, + "dash": "dot", + }, + row=row, + col=col, + ) + + def _customize_single_layout(self, fig): + xaxis = getattr(self, f"_{self.plot_type}_xaxis") + yaxis = getattr(self, f"_{self.plot_type}_yaxis") + + fig.update_layout( + xaxis=xaxis, + yaxis=yaxis, + plot_bgcolor="white", + height=self.SETTINGS[f"{self.plot_type}_plot_height"], + width=self.SETTINGS[f"{self.plot_type}_plot_width"], + ) + self._update_dos_layout(fig) + + def _update_dos_layout(self, fig): + def update_layout_spin_polarized( + x_data_up, + y_data_up, + x_data_down, + y_data_down, + x_min, + x_max, + update_func, + layout_type, + ): + most_negative_down, max_up = find_max_up_and_down( + x_data_up, y_data_up, x_data_down, y_data_down, x_min, x_max + ) + if layout_type == "layout": + update_func(yaxis={"range": [most_negative_down * 1.10, max_up * 1.10]}) + elif layout_type == "xaxes": + update_func( + patch={"range": [most_negative_down * 1.10, max_up * 1.10]}, + row=1, + col=2, + ) + + def update_layout_non_spin_polarized( + total_dos_xdata, total_dos_ydata, x_min, x_max, update_func, layout_type + ): + max_value = find_max_in_range( + total_dos_xdata, total_dos_ydata, x_min, x_max + ) + if layout_type == "layout": + update_func(yaxis={"range": [0, max_value * 1.10]}) + elif layout_type == "xaxes": + update_func(patch={"range": [0, max_value * 1.10]}, row=1, col=2) + + def get_x_min_max(fermi_energy): + return ( + self.SETTINGS["horizontal_range_pdos"][0] + fermi_energy, + self.SETTINGS["horizontal_range_pdos"][1] + fermi_energy, + ) + + def handle_spin_polarization(fermi_energy, update_func, layout_type): + spin_polarized = "(↑)" in self.pdos_data["dos"][0]["label"] + if not spin_polarized: + total_dos_xdata = self.pdos_data["dos"][0]["x"] + total_dos_ydata = self.pdos_data["dos"][0]["y"] + x_min, x_max = get_x_min_max(fermi_energy) + update_layout_non_spin_polarized( + total_dos_xdata, + total_dos_ydata, + x_min, + x_max, + update_func, + layout_type, + ) + else: + x_data_up = self.pdos_data["dos"][0]["x"] + y_data_up = self.pdos_data["dos"][0]["y"] + x_data_down = self.pdos_data["dos"][1]["x"] + y_data_down = self.pdos_data["dos"][1]["y"] + x_min, x_max = get_x_min_max(fermi_energy) + update_layout_spin_polarized( + x_data_up, + y_data_up, + x_data_down, + y_data_down, + x_min, + x_max, + update_func, + layout_type, + ) + + # PDOS plot type + if self.plot_type == "pdos": + fermi_energy = self.fermi_energy.get( + "fermi_energy", self.fermi_energy.get("fermi_energy_up") + ) + handle_spin_polarization(fermi_energy, fig.update_layout, "layout") + + # Combined plot type + if self.plot_type == "combined": + fermi_energy = self.fermi_energy.get( + "fermi_energy", self.fermi_energy.get("fermi_energy_up") + ) + handle_spin_polarization(fermi_energy, fig.update_xaxes, "xaxes") + + +class BandPdosWidget(ipw.VBox): + """ + A widget for plotting band structure and projected density of states (PDOS) data. + + Parameters: + - bands (optional): A node containing band structure data. + - pdos (optional): A node containing PDOS data. + + Attributes: + - description: HTML description of the widget. + - dos_atoms_group: Dropdown widget to select the grouping of atoms for PDOS plotting. + - dos_plot_group: Dropdown widget to select the type of PDOS contributions to plot. + - selected_atoms: Text widget to select specific atoms for PDOS plotting. + - update_plot_button: Button widget to update the plot. + - download_button: Button widget to download the data. + - project_bands_box: Checkbox widget to choose whether projected bands should be plotted. + - dos_data: PDOS data. + - bands_data: Band structure data. + - bandsplot_widget: Plotly widget for band structure and PDOS plot. + - bands_widget: Output widget to display the bandsplot widget. + - pdos_options_out: Output widget to clear specific widgets. + """ + + widget_description = ipw.HTML( + """
    + Hover over the plot to reveal controls for zoom, pan, and downloading the image. Use the zoom tools or your mouse to zoom in on specific regions, and click on the axes for interactive features. The home button resets to the default view, and the autoscale option displays all computed data, including semicore states. +
    """ + ) + + description = ipw.HTML( + """
    + Select the style of plotting the projected density of states. +
    """ + ) + + legend_interaction_description = ipw.HTML( + """
    + The legend entries can be clicked to hide or show the corresponding data. Double-clicking on a legend entry will isolate it. +
    """ + ) + + def __init__(self, bands=None, pdos=None, **kwargs): + if bands is None and pdos is None: + raise ValueError("Either bands or pdos must be provided") + + self.bands = bands # bands node + self.pdos = pdos # pdos node + + self.dos_atoms_group = ipw.Dropdown( + description="Group by:", + options=[ + ("Kinds", "kinds"), + ("Atomic position", "atoms"), + ], + value="kinds", + style={"description_width": "initial"}, + ) + self.dos_plot_group = ipw.Dropdown( + description="Plot contributions:", + options=[ + ("Total", "total"), + ("Orbital", "orbital"), + ("Angular momentum", "angular_momentum"), + ], + value="total", + style={"description_width": "initial"}, + ) + self.selected_atoms = ipw.Text( + placeholder="e.g. 1..5 8 10", + description="Select atoms:", + value="", + style={"description_width": "initial"}, + ) + self._wrong_syntax = StatusHTML(clear_after=8) + self.update_plot_button = ipw.Button( + description="Apply selection", + icon="pencil", + button_style="primary", + disabled=False, + ) + self.download_button = ipw.Button( + description="Download Data", + icon="download", + button_style="primary", + disabled=False, + layout=ipw.Layout(visibility="hidden"), + ) + self.project_bands_box = ipw.Checkbox( + value=False, + description="Add `fat bands` projections", + style={"description_width": "initial"}, + ) + self.proj_bands_width_slider = ipw.FloatSlider( + value=0.5, + min=0.01, + max=2.0, + step=0.01, + description="`Fat bands` max width (eV):", + orientation="horizontal", + readout=True, + readout_format=".2f", + style={"description_width": "initial"}, + layout=ipw.Layout(width="380px", visibility="hidden"), + ) + + # Information for the plot + self.pdos_data = self._get_pdos_data() + self.bands_data = self._get_bands_data() + # Plotly widget + self.bandsplot_widget = BandPdosPlotly( + bands_data=self.bands_data, pdos_data=self.pdos_data + ).bandspdosfigure + # Output widget to display the bandsplot widget + self.bands_widget = ipw.Output() + # Output widget to clear the specific widgets + self.pdos_options_out = ipw.Output() + + pdos_options_list = [ + self.description, + self.dos_atoms_group, + self.dos_plot_group, + ipw.HBox( + [self.selected_atoms, self._wrong_syntax, self.update_plot_button] + ), + ] + # If projections are available in the bands data, include the box to plot fat-bands + if self.bands_data and "projected_bands" in self.bands_data: + pdos_options_list.insert(4, self.project_bands_box) + pdos_options_list.insert(5, self.proj_bands_width_slider) + + self.pdos_options = ipw.VBox(pdos_options_list) + + self._initial_view() + + # Set the event handlers + self.download_button.on_click(self.download_data) + self.update_plot_button.on_click(self._update_plot) + self.proj_bands_width_slider.observe(self._update_plot, names="value") + self.project_bands_box.observe(self._update_plot, names="value") + self.dos_atoms_group.observe(self._update_plot, names="value") + self.dos_plot_group.observe(self._update_plot, names="value") + + super().__init__( + children=[ + self.widget_description, + self.pdos_options_out, + self.download_button, + self.bands_widget, # Add the output widget to the VBox + ], + **kwargs, + ) + + # Plot the options only if the pdos is provided or in case the bands data contains projections + if self.pdos or (self.bands_data and "projected_bands" in self.bands_data): + # Add the legend interaction description after the download button + self.children = ( + self.children[ + :3 + ] # Get the first three children: widget_description, pdos_options_out and download_button + + ( + self.legend_interaction_description, + ) # Add the legend interaction description as a tuple + + self.children[3:] # Add the rest of the children + ) + with self.pdos_options_out: + display(self.pdos_options) + + def download_data(self, _=None): + """Function to download the data.""" + file_name_bands = "bands_data.json" + file_name_pdos = "dos_data.json" + if self.bands_data: + bands_data_export = {} + for key, value in self.bands_data.items(): + if isinstance(value, np.ndarray): + bands_data_export[key] = value.tolist() + else: + bands_data_export[key] = value + + json_str = json.dumps(bands_data_export) + b64_str = base64.b64encode(json_str.encode()).decode() + self._download(payload=b64_str, filename=file_name_bands) + if self.pdos_data: + json_str = json.dumps(self.pdos_data) + b64_str = base64.b64encode(json_str.encode()).decode() + self._download(payload=b64_str, filename=file_name_pdos) + + @staticmethod + def _download(payload, filename): + """Download payload as a file named as filename.""" + from IPython.display import Javascript + + javas = Javascript( + f""" + var link = document.createElement('a'); + link.href = 'data:text/json;charset=utf-8;base64,{payload}' + link.download = "{filename}" + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + """ + ) + display(javas) + + def _get_pdos_data(self): + if not self.pdos: + return None + expanded_selection, syntax_ok = string_range_to_list( + self.selected_atoms.value, shift=-1 + ) + if syntax_ok: + pdos = get_pdos_data( + self.pdos, + group_tag=self.dos_atoms_group.value, + plot_tag=self.dos_plot_group.value, + selected_atoms=expanded_selection, + ) + return pdos + return None + + def _get_bands_data(self): + if not self.bands: + return None + + expanded_selection, syntax_ok = string_range_to_list( + self.selected_atoms.value, shift=-1 + ) + if syntax_ok: + bands = get_bands_projections_data( + self.bands, + group_tag=self.dos_atoms_group.value, + plot_tag=self.dos_plot_group.value, + selected_atoms=expanded_selection, + bands_width=self.proj_bands_width_slider.value, + ) + return bands + return None + + def _initial_view(self): + with self.bands_widget: + self._clear_output_and_display(self.bandsplot_widget) + self.download_button.layout.visibility = "visible" + self.project_bands_box.layout.visibility = "visible" + + def _update_plot(self, _=None): + with self.bands_widget: + expanded_selection, syntax_ok = string_range_to_list( + self.selected_atoms.value, shift=-1 + ) + if not syntax_ok: + self._wrong_syntax.message = """
    ERROR: Invalid syntax for selected atoms
    """ + clear_output(wait=True) + else: + self.pdos_data = self._get_pdos_data() + self.bands_data = self._get_bands_data() + + # Get current axis range + xaxis_range = list(self.bandsplot_widget.layout["xaxis"]["range"]) + yaxis_range = list(self.bandsplot_widget.layout["yaxis"]["range"]) + + self.bandsplot_widget = BandPdosPlotly( + bands_data=self.bands_data, + pdos_data=self.pdos_data, + project_bands=self.project_bands_box.value, + ).bandspdosfigure + self._clear_output_and_display(self.bandsplot_widget) + + # Restore Old axis range. I do it after the plot is displayed to the Reset button always return to the Default SETTINGs + if self.bands_data: + self.bandsplot_widget.plotly_relayout({"yaxis.range": yaxis_range}) + if self.pdos_data and not self.bands_data: + self.bandsplot_widget.plotly_relayout({"xaxis.range": xaxis_range}) + + self.proj_bands_width_slider.layout.visibility = ( + "visible" if self.project_bands_box.value else "hidden" + ) + + def _clear_output_and_display(self, widget=None): + clear_output(wait=True) + if widget: + display(widget) + + +def _prepare_combined_plotly_traces(x_to_conc, y_to_conc): + """Combine multiple lines into a single trace. + + The rows of y are concatenated with a np.nan column as a separator. Moreover, + the x values are ajduced to match the shape of the concatenated y values. These + transfomred arrays, representing multiple datasets/lines, can be plotted in a single trace. + """ + if y_to_conc.ndim != 2: + raise ValueError("y must be a 2D array") + + y_dim0 = y_to_conc.shape[0] + + # Add a np.nan column as a separator + y_transf = np.hstack( + [ + y_to_conc, + np.full((y_dim0, 1), np.nan), + ] + ).flatten() + + # Same logic for the x axis + x_transf = x_to_conc.reshape(1, -1) * np.ones(y_dim0).reshape(-1, 1) + x_transf = np.hstack([x_transf, np.full((y_dim0, 1), np.nan)]).flatten() + + return x_transf, y_transf + + +def _prepare_projections_to_plot(bands_data, projections, bands_width): + """Prepare the projected bands to be plotted. + + This function transforms the projected bands into a format that can be plotted + in a single trace. To use the fill option `toself`, + a band needs to be concatenated with its mirror image, first. + """ + projected_bands = [] + for spin in [0, 1]: + # In case of non-spin-polarized calculations, the spin index is only 0 + if spin not in bands_data["band_type_idx"]: + continue + + x_bands = bands_data["x"] + # New shape: (number of bands, number of kpoints) + y_bands = bands_data["y"][:, bands_data["band_type_idx"] == spin].T + + for proj in projections[spin]: + # Create the upper and lower boundary of the fat bands based on the orbital projections + y_bands_proj_upper = y_bands + bands_width / 2 * proj["projections"].T + y_bands_proj_lower = y_bands - bands_width / 2 * proj["projections"].T + # As mentioned above, the bands need to be concatenated with their mirror image + # to create the filled areas properly + y_bands_mirror = np.hstack( + [y_bands_proj_upper, y_bands_proj_lower[:, ::-1]] + ) + # Same logic for the energy axis + x_bands_mirror = np.concatenate([x_bands, x_bands[::-1]]).reshape(1, -1) + x_bands_comb, y_bands_proj_comb = _prepare_combined_plotly_traces( + x_bands_mirror, y_bands_mirror + ) + + projected_bands.append( + { + "x": x_bands_comb.tolist(), + "y": y_bands_proj_comb.tolist(), + "label": proj["label"], + "color": proj["color"], + } + ) + return projected_bands + + +def get_bands_projections_data( + outputs, group_tag, plot_tag, selected_atoms, bands_width, fermi_energy=None +): + """Extract the bandstructure and possibly the projections along the bands.""" + if "band_structure" not in outputs: + return None + + bands_data = outputs.band_structure._get_bandplot_data( + cartesian=True, prettify_format=None, join_symbol=None, get_segments=True + ) + # The fermi energy from band calculation is not robust. + if "fermi_energy_up" in outputs.band_parameters: + bands_data["fermi_energy_up"] = outputs.band_parameters["fermi_energy_up"] + bands_data["fermi_energy_down"] = outputs.band_parameters["fermi_energy_down"] + else: + bands_data["fermi_energy"] = ( + outputs.band_parameters["fermi_energy"] or fermi_energy + ) + + bands_data["pathlabels"] = get_bands_labeling(bands_data) + + if "projwfc" in outputs: + projections = [] + + if "projections" in outputs.projwfc: + projections.append( + _projections_curated_options( + outputs.projwfc.projections, + spin_type="none", + group_tag=group_tag, + plot_tag=plot_tag, + selected_atoms=selected_atoms, + projections_pdos="projections", + ) + ) + else: + for spin_proj, spin_type in zip( + [ + outputs.projwfc.projections_up, + outputs.projwfc.projections_down, + ], + ["up", "down"], + ): + projections.append( + _projections_curated_options( + spin_proj, + spin_type=spin_type, + group_tag=group_tag, + plot_tag=plot_tag, + selected_atoms=selected_atoms, + projections_pdos="projections", + ) + ) + + bands_data["projected_bands"] = _prepare_projections_to_plot( + bands_data, projections, bands_width + ) + if plot_tag != "total": + bands_data["projected_bands"] = update_pdos_labels( + bands_data["projected_bands"] + ) + return bands_data + + +def get_pdos_data(pdos, group_tag, plot_tag, selected_atoms): + dos = [] + + if "output_dos" not in pdos.dos: + return None + + _, energy_dos, _ = pdos.dos.output_dos.get_x() + tdos_values = {f"{n}": v for n, v, _ in pdos.dos.output_dos.get_y()} + + if "projections" in pdos.projwfc: + # Total DOS + tdos = { + "label": "Total DOS", + "x": energy_dos.tolist(), + "y": tdos_values.get("dos").tolist(), + "borderColor": "#8A8A8A", # dark gray + "backgroundColor": "#999999", # light gray + "backgroundAlpha": "40%", + "lineStyle": "solid", + } + dos.append(tdos) + dos += _projections_curated_options( + pdos.projwfc.projections, + spin_type="none", + group_tag=group_tag, + plot_tag=plot_tag, + selected_atoms=selected_atoms, + ) + else: + # Total DOS (↑) and Total DOS (↓) + tdos_up = { + "label": "Total DOS (↑)", + "x": energy_dos.tolist(), + "y": tdos_values.get("dos_spin_up").tolist(), + "borderColor": "#8A8A8A", # dark gray + "backgroundColor": "#999999", # light gray + "backgroundAlpha": "40%", + "lineStyle": "solid", + } + tdos_down = { + "label": "Total DOS (↓)", + "x": energy_dos.tolist(), + "y": (-tdos_values.get("dos_spin_down")).tolist(), + "borderColor": "#8A8A8A", # dark gray + "backgroundColor": "#999999", # light gray + "backgroundAlpha": "40%", + "lineStyle": "dash", + } + dos += [tdos_up, tdos_down] + + # Spin-up (↑) and Spin-down (↓) + dos += _projections_curated_options( + pdos.projwfc.projections_up, + spin_type="up", + group_tag=group_tag, + plot_tag=plot_tag, + selected_atoms=selected_atoms, + ) + dos += _projections_curated_options( + pdos.projwfc.projections_down, + spin_type="down", + line_style="dash", + group_tag=group_tag, + plot_tag=plot_tag, + selected_atoms=selected_atoms, + ) + + data_dict = { + "dos": dos, + } + if "fermi_energy_up" in pdos.nscf.output_parameters: + data_dict["fermi_energy_up"] = pdos.nscf.output_parameters["fermi_energy_up"] + data_dict["fermi_energy_down"] = pdos.nscf.output_parameters[ + "fermi_energy_down" + ] + else: + data_dict["fermi_energy"] = pdos.nscf.output_parameters["fermi_energy"] + + # Updata labels if plot_tag is different than total + if plot_tag != "total": + data_dict = update_pdos_labels(data_dict) + # data_dict = deepcopy(new_dict) + + return json.loads(json.dumps(data_dict)) + + +def _get_grouping_key( + group_tag, + plot_tag, + atom_position, + kind_name, + orbital_name_plotly, + orbital_angular_momentum, +): + """Generates the grouping key based on group_tag and plot_tag.""" + + key_formats = { + ("atoms", "total"): r"{var1}-{var}", + ("kinds", "total"): r"{var1}", + ("atoms", "orbital"): r"{var1}-{var2}
    {var}", + ("kinds", "orbital"): r"{var1}-{var2}", + ("atoms", "angular_momentum"): r"{var1}-{var3}
    {var}", + ("kinds", "angular_momentum"): r"{var1}-{var3}", + } + + key = key_formats.get((group_tag, plot_tag)) + if key is not None: + return key.format( + var=atom_position, + var1=kind_name, + var2=orbital_name_plotly, + var3=orbital_angular_momentum, + ) + else: + return None + + +def _curate_orbitals(orbital): + """Curate and transform the orbital data into the desired format.""" + # Constants for HTML tags + HTML_TAGS = { + "s": "s", + "pz": "pz", + "px": "px", + "py": "py", + "dz2": "dz2", + "dxy": "dxy", + "dxz": "dxz", + "dyz": "dyz", + "dx2-y2": "dx2-y2", + "fz3": "fz3", + "fxz2": "fxz2", + "fyz2": "fyz2", + "fxyz": "fxzy", + "fx(x2-3y2)": "fx(x2-3y2)", + "fy(3x2-y2)": "fy(3x2-y2)", + "fy(x2-z2)": "fy(x2-z2)", + 0.5: "+1/2", + -0.5: "-1/2", + 1.5: "+3/2", + -1.5: "-3/2", + 2.5: "+5/2", + -2.5: "-5/2", + } + + orbital_data = orbital.get_orbital_dict() + kind_name = orbital_data["kind_name"] + atom_position = [round(i, 2) for i in orbital_data["position"]] + radial_node = orbital_data["radial_nodes"] + + try: + orbital_name = orbital.get_name_from_quantum_numbers( + orbital_data["angular_momentum"], orbital_data["magnetic_number"] + ).lower() + orbital_name_plotly = ( + f"r{radial_node} {HTML_TAGS.get(orbital_name, orbital_name)}" + ) + orbital_angular_momentum = f"r{radial_node} {orbital_name[0]}" + + except AttributeError: + # Set quanutum numbers + qn_j = orbital_data["total_angular_momentum"] + qn_l = orbital_data["angular_momentum"] + qn_m_j = orbital_data["magnetic_number"] + orbital_name = f"j {qn_j} l {qn_l} m_j{qn_m_j}" + orbital_name_plotly = f"j={HTML_TAGS.get(qn_j, qn_j)} l={qn_l} mj={HTML_TAGS.get(qn_m_j, qn_m_j)}" + orbital_angular_momentum = f"l {qn_l} " + + return orbital_name_plotly, orbital_angular_momentum, kind_name, atom_position + + +def _projections_curated_options( + projections: ProjectionData, + group_tag, + plot_tag, + selected_atoms, + projections_pdos="pdos", + spin_type="none", + line_style="solid", +): + """Extract and curate the projections. + + This function can be used to extract the PDOS or the projections data. + """ + _proj_pdos = {} + list_positions = [] + + # Constants for spin types + SPIN_LABELS = {"up": "(↑)", "down": "(↓)", "none": ""} + SIGN_MULT_FACTOR = {"up": 1, "down": -1, "none": 1} + + if projections_pdos == "pdos": + proj_data = projections.get_pdos() + elif projections_pdos == "projections": + proj_data = projections.get_projections() + else: + raise ValueError(f"Invalid value for `projections_pdos`: {projections_pdos}") + + for orb_proj in proj_data: + if projections_pdos == "pdos": + orbital, proj_pdos, energy = orb_proj + elif projections_pdos == "projections": + orbital, proj_pdos = orb_proj + energy = None + + ( + orbital_name_plotly, + orbital_angular_momentum, + kind_name, + atom_position, + ) = _curate_orbitals(orbital) + + if atom_position not in list_positions: + list_positions.append(atom_position) + + key = _get_grouping_key( + group_tag, + plot_tag, + atom_position, + kind_name, + orbital_name_plotly, + orbital_angular_momentum, + ) + if not selected_atoms: + if key: + _proj_pdos.setdefault(key, [energy, 0])[1] += proj_pdos + + else: + try: + index = list_positions.index(atom_position) + if index in selected_atoms: + if key: + _proj_pdos.setdefault(key, [energy, 0])[1] += proj_pdos + + except ValueError: + pass + + curated_proj = [] + for label, (energy, proj_pdos) in _proj_pdos.items(): + label += SPIN_LABELS[spin_type] # noqa: PLW2901 + if projections_pdos == "pdos": + orbital_proj_pdos = { + "label": label, + "x": energy.tolist(), + "y": (SIGN_MULT_FACTOR[spin_type] * proj_pdos).tolist(), + "borderColor": cmap(label), + "lineStyle": line_style, + } + else: + orbital_proj_pdos = { + "label": label, + "projections": proj_pdos, + "color": cmap(label), + } + curated_proj.append(orbital_proj_pdos) + + return curated_proj + + +def get_bands_labeling(bandsdata: dict) -> list: + """Function to return two lists containing the labels and values (kpoint) for plotting. + params: + - bandsdata: dictionary from `get_bands_projections_data` function + output: update bandsdata with a new key "pathlabels" including (list of str), label_values (list of float) + """ + UNICODE_SYMBOL = { + "GAMMA": "\u0393", + "DELTA": "\u0394", + "LAMBDA": "\u039b", + "SIGMA": "\u03a3", + "EPSILON": "\u0395", + } + paths = bandsdata.get("paths") + labels = [] + for path in paths: # Remove duplicates + label_a = [path["from"], path["x"][0]] + label_b = [path["to"], path["x"][-1]] + if label_a not in labels: + labels.append(label_a) + if label_b not in labels: + labels.append(label_b) + + clean_labels = [] # Format + for i in labels: + if clean_labels: + if (i not in clean_labels) and (clean_labels[-1][-1] == i[1]): + clean_labels[-1][0] = clean_labels[-1][0] + "|" + i[0] + else: + clean_labels.append(i) + else: + clean_labels.append(i) + + path_labels = [label[0] for label in clean_labels] + for i, label in enumerate(path_labels): + path_labels[i] = re.sub( + r"([A-Z]+)", lambda x: UNICODE_SYMBOL.get(x.group(), x.group()), label + ) + path_values = [label[1] for label in clean_labels] + return [path_labels, path_values] + + +def cmap(label: str) -> str: + """Return RGB string of color for given pseudo info + Hardcoded at the momment. + """ + import random + + # if a unknow type generate random color based on ascii sum + ascn = sum([ord(c) for c in label]) + random.seed(ascn) + + return f"#{random.randint(0, 0xFFFFFF):06x}" + + +def find_extreme_in_range( + x_data, y_data, x_min, x_max, is_max=True, initial_value=float("-inf") +): + """ + General function to find the extreme value (max or min) in a given range. + + Parameters: + - x_data: List of x values. + - y_data: List of y values. + - x_min: Minimum x value for the range. + - x_max: Maximum x value for the range. + - is_max: Boolean to determine whether to find the maximum or minimum value. + - initial_value: Initial value for extreme (default is -inf for max and 0 for min). + + Returns: + - Extreme value found in the range, or None if no valid values are found. + """ + extreme_value = initial_value + + for x, y in zip(x_data, y_data): + if x_min <= x <= x_max: + if (is_max and y > extreme_value) or (not is_max and y < extreme_value): + extreme_value = y + + return extreme_value if extreme_value != initial_value else None + + +def find_max_up_and_down(x_data_up, y_data_up, x_data_down, y_data_down, x_min, x_max): + """ + Function to find the maximum positive value and the most negative value. + + Parameters: + - x_data_up: List of x values for the positive part. + - y_data_up: List of y values for the positive part. + - x_data_down: List of x values for the negative part. + - y_data_down: List of y values for the negative part. + - x_min: Minimum x value for the range. + - x_max: Maximum x value for the range. + + Returns: + - most_negative_down: Most negative value found in the down part. + - max_up: Maximum value found in the up part. + """ + max_up = find_extreme_in_range(x_data_up, y_data_up, x_min, x_max, is_max=True) + most_negative_down = find_extreme_in_range( + x_data_down, y_data_down, x_min, x_max, is_max=False, initial_value=0 + ) + + return most_negative_down, max_up + + +def find_max_in_range(x_data, y_data, x_min, x_max): + """ + Function to find the maximum value in a specified range. + + Parameters: + - x_data: List of x values. + - y_data: List of y values. + - x_min: Minimum x value for the range. + - x_max: Maximum x value for the range. + + Returns: + - Maximum value found in the range, or None if no valid values are found. + """ + return find_extreme_in_range(x_data, y_data, x_min, x_max, is_max=True) + + +def get_labels_radial_nodes(pdos_dict): + """ + Extracts the original labels from the PDOS data and constructs an orbital dictionary. + + Args: + pdos_dict (dict): Dictionary containing PDOS data with 'dos' key representing orbital information. + + Returns: + tuple: + - original_labels (list): List of strings representing the original orbital labels. + - orbital_dict (dict): A nested dictionary mapping atom kinds to orbital types and their corresponding radial nodes. + """ + original_labels = [] + orbital_dict = {} + + label_data_list = pdos_dict["dos"] if "dos" in pdos_dict else pdos_dict + for label_data in label_data_list: + # for label_data in pdos_dict["dos"]: + label_str = label_data["label"] + original_labels.append(label_str) + + parts = label_str.split("-") + if len(parts) < 2: + continue # Skip invalid or non-orbital labels + + atom = parts[0] # Atom type (e.g., 'Fe1') + radial_orbital = parts[1].split() # Splits 'r# orbital' (e.g., 'r0 s') + + if len(radial_orbital) < 2: + continue # Malformed label + + radial_node = int(radial_orbital[0][1:]) # Extract radial index from 'r#' + orbital = radial_orbital[1][0] # Orbital type ('s', 'p', 'd', 'f') + + # Populate orbital_dict with atoms, orbitals, and radial nodes + orbital_dict.setdefault(atom, {}).setdefault(orbital, set()).add(radial_node) + + return original_labels, orbital_dict + + +def assign_orbital_labels(orbital_dict): + """ + Assigns orbital labels to atoms based on their radial nodes and electronic structure. + + Args: + orbital_dict (dict): A nested dictionary mapping atom kinds to orbital types and their corresponding radial nodes. + + Returns: + dict: A dictionary mapping atoms and orbitals to their assigned radial labels. + """ + result = {} + + for atom_with_number, orbitals in orbital_dict.items(): + # Extract element name (remove numeric suffixes) + atom = re.sub(r"\d+", "", atom_with_number) + element = Element(atom) + electronic_structure = list(reversed(element.full_electronic_structure)) + + orbital_assignment = {orb: {} for orb in ["s", "p", "d", "f"]} + + # Map orbitals from electronic structure + orbital_map = { + "s": [ + f"{n}{orbital}" + for n, orbital, _ in electronic_structure + if orbital == "s" + ], + "p": [ + f"{n}{orbital}" + for n, orbital, _ in electronic_structure + if orbital == "p" + ], + "d": [ + f"{n}{orbital}" + for n, orbital, _ in electronic_structure + if orbital == "d" + ], + "f": [ + f"{n}{orbital}" + for n, orbital, _ in electronic_structure + if orbital == "f" + ], + } + + # Assign radial nodes to orbitals in reverse order + for orb_type in ["s", "p", "d", "f"]: + if orb_type in orbitals: + sorted_indices = sorted(orbitals[orb_type], reverse=True) + for idx, radial_node in enumerate(sorted_indices): + if radial_node < len(orbital_map[orb_type]): + orbital_assignment[orb_type][idx] = orbital_map[orb_type][ + radial_node + ][0] + + # Clean up empty orbital assignments + result[atom_with_number] = { + orb: val for orb, val in orbital_assignment.items() if val + } + + return result + + +def get_new_pdos_labels(input_list, orbital_dict): + output_list = [] + + for item in input_list: + # Check if the label contains a '-' to proceed with splitting + if "-" in item: + before_dash, after_dash = item.split("-", 1) + + # Split the part after the dash into words to isolate the radial node (r#) + parts = after_dash.split() + + if parts[0].startswith("r"): + radial_index = int(parts[0][1:]) # Extract the number after 'r' + + # Check if the first element after removing the radial part corresponds to an orbital + orbital = parts[1] + + # If the atom and orbital type exist in the orbital_dict, map the radial node + if ( + before_dash in orbital_dict + and orbital[0] in orbital_dict[before_dash] + ): + if radial_index in orbital_dict[before_dash][orbital[0]]: + # Get the mapped radial value + new_radial_value = orbital_dict[before_dash][orbital[0]][ + radial_index + ] + + # Rebuild the string, removing the space before the orbital + after_dash = after_dash.replace( + f"r{radial_index}", new_radial_value, 1 + ) + after_dash = after_dash.replace( + " ", "", 1 + ) # Remove the space after the radial value + new_item = f"{before_dash}-{after_dash}" + else: + new_item = ( + item # If radial index not found, use the original item + ) + else: + new_item = item # If no match in orbital_dict, use original label + else: + new_item = item # In case there's no valid 'r#' part + else: + new_item = item # If no dash, use the original item + + output_list.append(new_item) + + return output_list + + +def update_pdos_labels(pdos_data): + """ + Updates PDOS labels by assigning correct radial nodes to orbitals based on their electronic structure. + + Args: + pdos_data (dict): PDOS data structure containing 'dos' key with orbital information. + + Returns: + tuple: + - pdos_data (dict): Updated PDOS data with correct orbital labels. + """ + original_labels, orbital_dict = get_labels_radial_nodes(pdos_data) + orbital_assignment = assign_orbital_labels(orbital_dict) + updated_labels = get_new_pdos_labels(original_labels, orbital_assignment) + + label_data_list = pdos_data["dos"] if "dos" in pdos_data else pdos_data + for idx, label in enumerate(updated_labels): + label_data_list[idx]["label"] = label + + return pdos_data diff --git a/src/aiidalab_qe/common/infobox.py b/src/aiidalab_qe/common/infobox.py new file mode 100644 index 000000000..86a7eb26b --- /dev/null +++ b/src/aiidalab_qe/common/infobox.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +import ipywidgets as ipw + + +class InfoBox(ipw.VBox): + """The `InfoBox` component is used to provide additional info regarding a widget or an app.""" + + def __init__(self, classes: list[str] | None = None, **kwargs): + """`InfoBox` constructor. + + Parameters + ---------- + `classes` : `list[str]`, optional + One or more CSS classes. + """ + super().__init__(**kwargs) + self.add_class("info-box") + for custom_classes in classes or []: + for custom_class in custom_classes.split(" "): + if custom_class: + self.add_class(custom_class) diff --git a/src/aiidalab_qe/common/node_view.py b/src/aiidalab_qe/common/node_view.py index 8e7fcdd2f..3b8d49661 100644 --- a/src/aiidalab_qe/common/node_view.py +++ b/src/aiidalab_qe/common/node_view.py @@ -6,9 +6,10 @@ import ipywidgets as ipw import nglview import traitlets as tl +from ase import Atoms + from aiida import orm from aiidalab_widgets_base import register_viewer_widget -from ase import Atoms from .widgets import CalcJobOutputFollower, LogOutputWidget @@ -29,10 +30,10 @@ def __init__(self, structure, *args, **kwargs): self.structure = structure super().__init__( + *args, children=[ self._viewer, ], - *args, **kwargs, ) @@ -45,7 +46,7 @@ def _default_supercell(self): return [1, 1, 1] @tl.validate("structure") - def _valid_structure(self, change): # pylint: disable=no-self-use + def _valid_structure(self, change): """Update structure.""" structure = change["value"] @@ -74,11 +75,9 @@ def _update_displayed_structure(self, change): def _update_structure_viewer(self, change): """Update the view if displayed_structure trait was modified.""" with self.hold_trait_notifications(): - for ( - comp_id - ) in self._viewer._ngl_component_ids: # pylint: disable=protected-access + for comp_id in self._viewer._ngl_component_ids: self._viewer.remove_component(comp_id) - self.selection = list() + self.selection = [] if change["new"] is not None: self._viewer.add_component(nglview.ASEStructure(change["new"])) self._viewer.clear() @@ -89,7 +88,7 @@ def _update_structure_viewer(self, change): class VBoxWithCaption(ipw.VBox): def __init__(self, caption, body, *args, **kwargs): - super().__init__(children=[ipw.HTML(caption), body], *args, **kwargs) + super().__init__(*args, children=[ipw.HTML(caption), body], **kwargs) @register_viewer_widget("process.calculation.calcjob.CalcJobNode.") diff --git a/src/aiidalab_qe/common/panel.py b/src/aiidalab_qe/common/panel.py index 2a88d486e..40a7f98a8 100644 --- a/src/aiidalab_qe/common/panel.py +++ b/src/aiidalab_qe/common/panel.py @@ -1,10 +1,10 @@ -# -*- coding: utf-8 -*- """Class to . Authors: AiiDAlab Team """ + import ipywidgets as ipw DEFAULT_PARAMETERS = {} @@ -71,7 +71,7 @@ def __init__(self, **kwargs): description=self.title, indent=False, value=False, - layout=ipw.Layout(max_width="50%"), + style={"description_width": "initial"}, ) self.description_html = ipw.HTML( f"""
    diff --git a/src/aiidalab_qe/common/process.py b/src/aiidalab_qe/common/process.py index e68a5404f..799de74bc 100644 --- a/src/aiidalab_qe/common/process.py +++ b/src/aiidalab_qe/common/process.py @@ -1,8 +1,10 @@ """Widgets related to process management.""" + from dataclasses import make_dataclass import ipywidgets as ipw import traitlets as tl + from aiida.tools.query.calculation import CalculationQueryBuilder @@ -86,9 +88,9 @@ def __init__(self, process_label, **kwargs): self.refresh_work_chains() # the following is needed to disable the button. - def parse_extra_info(self, pk: int) -> dict: + def parse_extra_info(self, _pk: int) -> dict: """Parse extra information about the work chain.""" - return dict() + return {} def find_work_chains(self): builder = CalculationQueryBuilder() diff --git a/src/aiidalab_qe/common/setup_codes.py b/src/aiidalab_qe/common/setup_codes.py index bfa95a8f3..99aa4e5ec 100644 --- a/src/aiidalab_qe/common/setup_codes.py +++ b/src/aiidalab_qe/common/setup_codes.py @@ -1,176 +1,18 @@ -# -*- coding: utf-8 -*- from pathlib import Path -from shutil import which -from subprocess import CalledProcessError, run from threading import Thread import ipywidgets as ipw import traitlets -from aiida.common.exceptions import NotExistent -from aiida.orm import load_code -from filelock import FileLock, Timeout -from aiidalab_qe.common.widgets import ProgressBar +from ..setup.codes import QE_VERSION, install_and_setup +from .widgets import ProgressBar __all__ = [ "QESetupWidget", ] -FN_LOCKFILE = Path.home().joinpath(".install-qe-on-localhost.lock") FN_DO_NOT_SETUP = Path.cwd().joinpath(".do-not-setup-on-localhost") -QE_VERSION = "7.2" - -CONDA_ENV_PREFIX = Path.home().joinpath( - ".conda", "envs", f"quantum-espresso-{QE_VERSION}" -) - -# Add all QE codes with the calcjob entry point in the aiida-quantumespresso. -CODE_NAMES = ( - "pw", - "projwfc", - "dos", - "cp", - "epw", - "matdyn", - "neb", - "open_grid", - "ph", - "pp", - "pw2gw", - "pw2wannier90", - "q2r", - "xspectra", -) - - -def qe_installed(): - return CONDA_ENV_PREFIX.exists() - - -def install_qe(): - run( - [ - "conda", - "create", - "--yes", - "--override-channels", - "--channel", - "conda-forge", - "--prefix", - str(CONDA_ENV_PREFIX), - f"qe={QE_VERSION}", - ], - capture_output=True, - check=True, - ) - - -def _code_is_setup(name): - try: - load_code(f"{name}-{QE_VERSION}@localhost") - except NotExistent: - return False - else: - return True - - -def codes_are_setup(): - return all(_code_is_setup(code_name) for code_name in CODE_NAMES) - - -def _setup_code(code_name, computer_name="localhost"): - try: - load_code(f"{code_name}-{QE_VERSION}@localhost") - except NotExistent: - run( - [ - "verdi", - "code", - "create", - "core.code.installed", - "--non-interactive", - "--label", - f"{code_name}-{QE_VERSION}", - "--description", - f"{code_name}.x ({QE_VERSION}) setup by AiiDAlab.", - "--default-calc-job-plugin", - f"quantumespresso.{code_name}", - "--computer", - computer_name, - "--prepend-text", - f'eval "$(conda shell.posix hook)"\nconda activate {CONDA_ENV_PREFIX}\nexport OMP_NUM_THREADS=1', - "--filepath-executable", - CONDA_ENV_PREFIX.joinpath("bin", f"{code_name}.x"), - ], - check=True, - capture_output=True, - ) - else: - raise RuntimeError(f"Code {code_name} (v{QE_VERSION}) is already setup!") - - -def setup_codes(): - for code_name in CODE_NAMES: - _setup_code(code_name) - - -def install(force=False): - """Install Quantum ESPRESSO and the corresponding AiiDA codes. - - Args: - force: Ignore previously failed attempts and install anyways. - """ - # Check for "do not install file" and skip actual check. The purpose of - # this file is to not re-try this process on every app start in case that - # there are issues. - if not force and FN_DO_NOT_SETUP.exists(): - raise RuntimeError("Installation failed in previous attempt.") - - yield "Checking installation status..." - - conda_installed = which("conda") - try: - with FileLock(FN_LOCKFILE, timeout=5): - # We assume that if the codes are already setup, everything is in - # order. Only if they are not present, should we take action, - # however we only do so if the environment has a conda binary - # present (`which conda`). If that is not the case then we assume - # that this is a custom user environment in which case we also take - # no further action. - if codes_are_setup(): - return # Already setup - - if not conda_installed: - raise RuntimeError( - "Unable to automatically install Quantum ESPRESSO, conda " - "is not available." - ) - - if not qe_installed(): - # First, install Quantum ESPRESSO. - yield "Installing QE..." - try: - install_qe() - except CalledProcessError as error: - raise RuntimeError(f"Failed to create conda environment: {error}") - - # After installing QE, we install the corresponding - # AiiDA codes: - for code_name in CODE_NAMES: - if not _code_is_setup(code_name): - yield f"Setting up AiiDA code ({code_name})..." - _setup_code(code_name) - - except Timeout: - # Assume that the installation was triggered by a different process. - yield "Installation was already started, waiting for it to finish..." - with FileLock(FN_LOCKFILE, timeout=120): - if not codes_are_setup(): - raise RuntimeError( - "Installation process did not finish in the expected time." - ) - class QESetupWidget(ipw.VBox): installed = traitlets.Bool(allow_none=True).tag(readonly=True) @@ -224,7 +66,7 @@ def _refresh_installed(self): try: self.set_trait("busy", True) - for msg in install(): + for msg in install_and_setup(): self.set_message(msg) except Exception as error: @@ -282,7 +124,7 @@ def _toggle_error_view(self, change): @traitlets.observe("busy") @traitlets.observe("error") @traitlets.observe("installed") - def _update(self, change): + def _update(self, _change): with self.hold_trait_notifications(): if self.hide_by_default: self.layout.visibility = ( diff --git a/src/aiidalab_qe/common/setup_pseudos.py b/src/aiidalab_qe/common/setup_pseudos.py index 12a556cbc..608a9a45f 100644 --- a/src/aiidalab_qe/common/setup_pseudos.py +++ b/src/aiidalab_qe/common/setup_pseudos.py @@ -1,226 +1,12 @@ -# -*- coding: utf-8 -*- from __future__ import annotations -import os -from dataclasses import dataclass, field -from pathlib import Path -from subprocess import run from threading import Thread -from typing import Iterable import ipywidgets as ipw import traitlets -from aiida.orm import QueryBuilder -from aiida_pseudo.groups.family import PseudoPotentialFamily -from filelock import FileLock, Timeout -from aiidalab_qe.common.widgets import ProgressBar - -SSSP_VERSION = "1.2" -PSEUDODOJO_VERSION = "0.4" - -EXPECTED_PSEUDOS = { - f"SSSP/{SSSP_VERSION}/PBE/efficiency", - f"SSSP/{SSSP_VERSION}/PBE/precision", - f"SSSP/{SSSP_VERSION}/PBEsol/efficiency", - f"SSSP/{SSSP_VERSION}/PBEsol/precision", - f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/SR/standard/upf", - f"PseudoDojo/{PSEUDODOJO_VERSION}/PBEsol/SR/standard/upf", - f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/SR/stringent/upf", - f"PseudoDojo/{PSEUDODOJO_VERSION}/PBEsol/SR/stringent/upf", -} - - -FN_LOCKFILE = Path.home().joinpath(".install-sssp.lock") - - -@dataclass -class PseudoFamily: - """The dataclass to deal with pseudo family strings. - - Attributes: - library: the library name of the pseudo family, e.g. SSSP or PseudoDojo. - cmd_library_name: the sub command name used in aiida-pseudo command line. - version: the version of the pseudo family, e.g. 1.2 - functional: the functional of the pseudo family, e.g. PBE, PBEsol. - accuracy: the accuracy of the pseudo family, which is protocol in aiida-pseudo, e.g. efficiency, precision, standard, stringent. - relativistic: the relativistic treatment of the pseudo family, e.g. SR, FR. - file_type: the file type of the pseudo family, e.g. upf, psml, currently only used for PseudoDojo. - """ - - library: str - version: str - functional: str - accuracy: str - cmd_library_name: str = field(init=False) - relativistic: str | None = None - file_type: str | None = None - - def __post_init__(self): - """Post init operations and checks.""" - if self.library == "SSSP": - self.cmd_library_name = "sssp" - elif self.library == "PseudoDojo": - self.cmd_library_name = "pseudo-dojo" - else: - raise ValueError(f"Unknown pseudo library {self.library}") - - @classmethod - def from_string(cls, pseudo_family_string: str) -> PseudoFamily: - """Initialize from a pseudo family string.""" - # We support two pseudo families: SSSP and PseudoDojo - # They are formatted as follows: - # SSSP: SSSP/// - # PseudoDojo: PseudoDojo///// - # where is either 'SR' or 'FR' and is either 'upf' or 'psml' - # Before we unify the format of family strings, the conditions below are necessary - # to distinguish between the two families - library = pseudo_family_string.split("/")[0] - if library == "SSSP": - version, functional, accuracy = pseudo_family_string.split("/")[1:] - relativistic = None - file_type = None - elif library == "PseudoDojo": - ( - version, - functional, - relativistic, - accuracy, - file_type, - ) = pseudo_family_string.split("/")[1:] - else: - raise ValueError( - f"Not able to parse valid library name from {pseudo_family_string}" - ) - - return cls( - library=library, - version=version, - functional=functional, - accuracy=accuracy, - relativistic=relativistic, - file_type=file_type, - ) - - -def pseudos_to_install() -> set[str]: - """Query the database and return the list of pseudopotentials that are not installed.""" - qb = QueryBuilder() - qb.append( - PseudoPotentialFamily, - filters={ - "or": [ - {"label": {"like": "SSSP/%"}}, - {"label": {"like": "PseudoDojo/%"}}, - ] - }, - project="label", - ) - labels = set(qb.all(flat=True)) - return EXPECTED_PSEUDOS - labels - - -def _construct_cmd( - pseudo_family_string: str, download_only: bool = False, cwd: Path | None = None -) -> list: - """Construct the command for installation of pseudopotentials. - - If ``cwd`` is not None, and ``download_only`` is True the, only download the - pseudopotential files to the ``cwd`` folder. - If ``download_only`` is False and ``cwd`` is not None, the the pseudos will be installed from the ``cwd`` where the pseudos are downloaded to. - - NOTE: download_only has nothing to do with cwd, it will not download the pseudos to cwd if cwd is specified. - The control to download to cwd is in the ``_install_pseudos`` function below. - """ - pseudo_family = PseudoFamily.from_string(pseudo_family_string) - - # the library used in command line is lowercase - # e.g. SSSP -> sssp and PseudoDojo -> pseudo-dojo - library = pseudo_family.cmd_library_name - version = pseudo_family.version - functional = pseudo_family.functional - accuracy = pseudo_family.accuracy - cmd = [ - "aiida-pseudo", - "install", - library, - "--functional", - functional, - "--version", - version, - "-p", # p for protocol which is the accuracy of the library - accuracy, - ] - - # extra arguments for PseudoDojo - if library == "pseudo-dojo": - relativistic = pseudo_family.relativistic - file_type = pseudo_family.file_type - cmd.extend( - [ - "--relativistic", - relativistic, - "--pseudo-format", - file_type, - ] - ) - - if download_only: - cmd.append("--download-only") - - # if cwd source folder specified, then install the pseudos from the folder - # download file name is replace `/` with `_` of the pseudo family string with `.aiida_pseudo` extension - if not download_only and cwd is not None: - file_path = cwd / f"{pseudo_family_string.replace('/', '_')}.aiida_pseudo" - if file_path.exists(): - cmd.extend(["--from-download", str(file_path)]) - - return cmd - - -def run_cmd(cmd: list, env: dict | None = None, cwd: Path | None = None): - """Run the command with specific env in the workdir specified.""" - run(cmd, env=env, cwd=cwd, capture_output=True, check=True) - - -def _install_pseudos( - pseudo_families: set[str], download_only: bool = False, cwd: Path | None = None -) -> Iterable[float]: - """Go through the list of pseudo families and install them.""" - env = os.environ.copy() - env["PATH"] = f"{env['PATH']}:{Path.home() / '.local' / 'bin'}" - - mult = 1.0 / len(pseudo_families) - yield mult * 0 - for i, pseudo_family in enumerate(pseudo_families): - cmd = _construct_cmd(pseudo_family, download_only, cwd=cwd) - - run_cmd(cmd, env=env, cwd=cwd) - - yield mult * (i + 1) - - -def install( - download_only: bool = False, cwd: Path | None = None -) -> Iterable[tuple[str, float]]: - yield "Checking installation status...", 0.1 - try: - with FileLock(FN_LOCKFILE, timeout=5): - if len(pseudos := pseudos_to_install()) > 0: - yield "Installing...", 0.1 - for progress in _install_pseudos(pseudos, download_only, cwd): - yield "Installing...", progress - - except Timeout: - # Assume that the installation was triggered by a different process. - yield "Installation was already started elsewhere, waiting for it to finish...", ProgressBar.AnimationRate( - 1.0 - ) - with FileLock(FN_LOCKFILE, timeout=120): - if len(pseudos_to_install()) > 0: - raise RuntimeError( - "Installation process did not finish in the expected time." - ) +from ..setup.pseudos import install, pseudos_to_install +from .widgets import ProgressBar class PseudosInstallWidget(ProgressBar): @@ -282,7 +68,7 @@ def _default_error(self): @traitlets.observe("busy") @traitlets.observe("error") @traitlets.observe("installed") - def _update(self, change): + def _update(self, _change): with self.hold_trait_notifications(): if self.hide_by_default: self.layout.visibility = ( diff --git a/src/aiidalab_qe/common/widgets.py b/src/aiidalab_qe/common/widgets.py index 8360ce783..5aaad1a7c 100644 --- a/src/aiidalab_qe/common/widgets.py +++ b/src/aiidalab_qe/common/widgets.py @@ -15,15 +15,18 @@ import ipywidgets as ipw import numpy as np import traitlets -from aiida.orm import CalcJobNode +from IPython.display import HTML, Javascript, clear_output, display +from pymatgen.core.periodic_table import Element + +from aiida.orm import CalcJobNode, load_code, load_node from aiida.orm import Data as orm_Data -from aiida.orm import load_node +from aiida_quantumespresso.data.hubbard_structure import HubbardStructureData +from aiidalab_widgets_base import ComputationalResourcesWidget from aiidalab_widgets_base.utils import ( StatusHTML, list_to_string_range, string_range_to_list, ) -from IPython.display import HTML, Javascript, clear_output, display __all__ = [ "CalcJobOutputFollower", @@ -39,7 +42,7 @@ class RollingOutput(ipw.VBox): value = traitlets.Unicode() auto_scroll = traitlets.Bool() - def __init__(self, num_min_lines=10, max_output_height="200px", **kwargs): + def __init__(self, num_min_lines=10, max_output_height="200px", **kwargs): # noqa: ARG002 self._num_min_lines = num_min_lines self._output = ipw.HTML(layout=ipw.Layout(min_width="50em")) self._refresh_output() @@ -117,14 +120,14 @@ def __on_click(self, _): digest = hashlib.md5(self.payload).hexdigest() # bypass browser cache payload = base64.b64encode(self.payload).decode() - id = f"dl_{digest}" + link_id = f"dl_{digest}" display( HTML( f""" - + + + + """ + ) + ) + + +def write_csv(dataset): + from pandas import DataFrame + + x_vals = dataset[0]["x"] + df_data = {"energy_ev": x_vals} + for entry in dataset: + if "site" in entry["name"]: + if entry["weighting"] != 1: + df_data[ + f'{entry["name"].capitalize().replace("_", " ")} (Weighted)' + ] = entry["y"] + df_data[ + f'{entry["name"].capitalize().replace("_", " ")} (Unweighted)' + ] = entry["y"] / entry["weighting"] + else: + df_data[entry["name"].capitalize().replace("_", " ")] = entry["y"] + else: + df_data[entry["name"]] = entry["y"] + + df = DataFrame(data=df_data) + df_energy_indexed = df.set_index("energy_ev") + + return df_energy_indexed.to_csv(header=True) + + +def export_xas_data(outputs): + if "final_spectra" in outputs.xas: + final_spectra = outputs.xas.final_spectra + symmetry_analysis_data = outputs.xas.symmetry_analysis_data.get_dict() + equivalent_sites_data = symmetry_analysis_data["equivalent_sites_data"] + + return ( + final_spectra, + equivalent_sites_data, + ) + else: + return None + + +def broaden_xas( + input_array, variable=False, gamma_hole=0.01, gamma_max=5, center_energy=15 +): + """Take an input spectrum and return a broadened spectrum as output using either a constant or variable parameter. + + :param input_array: The 2D array of x/y values to be broadened. Should be plotted with + little or no broadening before using the function. + :param gamma_hole: The broadening parameter for the Lorenzian broadening function. In constant mode (variable=False), + this value is applied to the entire spectrum. In variable mode (variable=True), this value defines + the starting broadening parameter of the arctangent function. Refers to the natural linewidth of + the element/XAS edge combination in question and (for elements Z > 10) should be based on reference + values from X-ray spectroscopy. + :param variable: Request a variable-energy broadening of the spectrum instead of the defaultconstant broadening. + Uses the functional form defined in Calandra and Bunau, PRB, 87, 205105 (2013). + :param gamma_max: The maximum lorenzian broadening to be applied in variable energy broadening mode. Refers to the + broadening effects at infinite energy above the main edge. + :param center_energy: The inflection point of the variable broadening function. Does not relate to experimental data + and must be tuned manually. + """ + + if variable: + if not all([gamma_hole, gamma_max, center_energy]): + missing = [ + i[0] + for i in zip( + ["gamma_hole", "gamma_max", "center_energy"], + [gamma_hole, gamma_max, center_energy], + ) + if i[1] is None + ] + raise ValueError( + f"The following variables were not defined {missing} and are required for variable-energy broadening" + ) + + x_vals = input_array[:, 0] + y_vals = input_array[:, 1] + + lorenz_y = np.zeros(len(x_vals)) + + if variable: + for x, y in zip(x_vals, y_vals): + if x < 0: # the function is bounded between gamma_hole and gamma_max + gamma_var = gamma_hole + else: + e = x / center_energy + + gamma_var = gamma_hole + gamma_max * ( + 0.5 + np.arctan((e - 1) / (e**2)) / np.pi + ) + + if y <= 1.0e-6: # do this to skip the calculation for very small values + lorenz_y = y + else: + lorenz_y += ( + gamma_var + / 2.0 + / np.pi + / ((x_vals - x) ** 2 + 0.25 * gamma_var**2) + * y + ) + else: + for x, y in zip(x_vals, y_vals): + lorenz_y += ( + gamma_hole + / 2.0 + / np.pi + / ((x_vals - x) ** 2 + 0.25 * gamma_hole**2) + * y + ) + + return np.column_stack((x_vals, lorenz_y)) + + +def get_aligned_spectra(core_wc_dict, equivalent_sites_dict): + """Return a set of spectra aligned according to the chemical shift (difference in Fermi level). + + Primarily this is a copy of ``get_spectra_by_element`` from AiiDA-QE which operates on only one + element. + """ + data_dict = {} + spectrum_dict = { + site: node.outputs.powder_spectrum for site, node in core_wc_dict.items() + } + for key, value in core_wc_dict.items(): + xspectra_out_params = value.outputs.parameters_xspectra__xas_0.get_dict() + energy_zero = xspectra_out_params["energy_zero"] + multiplicity = equivalent_sites_dict[key]["multiplicity"] + + if "total_multiplicity" not in data_dict: + data_dict["total_multiplicity"] = multiplicity + else: + data_dict["total_multiplicity"] += multiplicity + + data_dict[key] = { + "spectrum_node": spectrum_dict[key], + "multiplicity": multiplicity, + "energy_zero": energy_zero, + } + + spectra_list = [] + total_multiplicity = data_dict.pop("total_multiplicity") + for key in data_dict: + spectrum_node = data_dict[key]["spectrum_node"] + site_multiplicity = data_dict[key]["multiplicity"] + weighting = site_multiplicity / total_multiplicity + weighting_string = f"{site_multiplicity}/{total_multiplicity}" + spectrum_x = spectrum_node.get_x()[1] + spectrum_y = spectrum_node.get_y()[0][1] + spline = make_interp_spline(spectrum_x, spectrum_y) + norm_y = spline(spectrum_x) / np.trapz(spline(spectrum_x), spectrum_x) + weighted_spectrum = np.column_stack( + (spectrum_x, norm_y * (site_multiplicity / total_multiplicity)) + ) + spectra_list.append( + ( + weighted_spectrum, + key, + weighting, + weighting_string, + float(data_dict[key]["energy_zero"]), + ) + ) + + # Sort according to Fermi level, then correct to align all spectra to the + # highest value. Note that this is needed because XSpectra automatically aligns the + # final spectrum such that the system's Fermi level is at 0 eV. + spectra_list.sort(key=lambda entry: entry[-1]) + highest_level = spectra_list[0][-1] + energy_zero_corrections = [ + (entry[0], entry[1], entry[2], entry[3], entry[-1] - highest_level) + for entry in spectra_list + ] + aligned_spectra = [ + ( + entry[1], + entry[2], + entry[3], + np.column_stack((entry[0][:, 0] - entry[-1], entry[0][:, 1])), + ) + for entry in energy_zero_corrections + ] + + return aligned_spectra + + +class Result(ResultPanel): + title = "XAS" + workchain_labels = ["xas"] + + def __init__(self, node=None, **kwargs): + super().__init__(node=node, identifier="xas", **kwargs) + + def _update_view(self): + import plotly.graph_objects as go + + gamma_select_prompt = ipw.HTML( + """ +
    + Select parameters for spectrum broadening
    """ + ) + + # PNOG: If someone knows a way to format certain words differently in HTML without causing a line-break, hit me up. + # For now, (17/10/23) we'll just have the function terms be in italics. + # Alternatively, if it's possible to format mathematical terms in HTML without a line-break, let me know + variable_broad_select_help = ipw.HTML( + """ +
    + Broadening parameters: + +

    Γhole - Defines a constant Lorenzian broadening width for the whole spectrum. In "variable" mode, defines the initial broadening width of the ArcTangent function.

    +

    Γmax - Maximum Lorenzian broadening parameter at infinte energy in "variable" mode.

    +

    Ecenter - Defines the inflection point of the variable-energy broadening function.

    +
    +
    + Note that setting Γhole to 0 eV will simply plot the raw spectrum. +
    + """ + ) + spectrum_select_prompt = ipw.HTML( + """ +
    + Select spectrum to plot
    """ + ) + final_spectra, equivalent_sites_data = export_xas_data(self.outputs) + xas_wc = next( + n for n in self.node.called if n.process_label == "XspectraCrystalWorkChain" + ) + core_wcs = { + n.get_metadata_inputs()["metadata"]["call_link_label"]: n + for n in xas_wc.called + if n.process_label == "XspectraCoreWorkChain" + } + core_wc_dict = { + key.replace("_xspectra", ""): value for key, value in core_wcs.items() + } + + spectrum_select_options = [key.split("_")[0] for key in final_spectra.keys()] + + spectrum_select = ipw.Dropdown( + description="", + disabled=False, + value=spectrum_select_options[0], + options=spectrum_select_options, + layout=ipw.Layout(width="20%"), + ) + + variable_broad_select = ipw.Checkbox( + value=False, + disabled=False, + description="Use variable energy broadening.", + style={"description_width": "initial", "opacity": 0.5}, + ) + + gamma_hole_select = ipw.FloatSlider( + value=0.0, + min=0.0, + max=5, + step=0.1, + description="$\Gamma_{hole}$", + disabled=False, + continuous_update=False, + orientation="horizontal", + readout=True, + ) + + gamma_max_select = ipw.FloatSlider( + value=5.0, + min=2.0, + max=10, + step=0.5, + continuous_update=False, + description="$\Gamma_{max}$", + disabled=True, + orientation="horizontal", + readout=True, + ) + + center_e_select = ipw.FloatSlider( + value=15.0, + min=5, + max=30, + step=0.5, + continuous_update=False, + description="$E_{center}$", + disabled=True, + orientation="horizontal", + readout=True, + ) + download_data = SpectrumDownloadButton( + filename=f"{spectrum_select.value}_XAS_Spectra.csv", + contents=None, + description="Download CSV", + icon="download", + ) + # # get data + # # init figure + g = go.FigureWidget( + layout=go.Layout( + title={"text": "XAS"}, + barmode="overlay", + ) + ) + + g.layout.xaxis.title = "Relative Photon Energy (eV)" + + chosen_spectrum = spectrum_select.value + chosen_spectrum_label = f"{chosen_spectrum}_xas" + spectra = final_spectra[chosen_spectrum_label] + + raw_spectrum = np.column_stack((spectra.get_x()[1], spectra.get_y()[0][1])) + + x = raw_spectrum[:, 0] + y = raw_spectrum[:, 1] + spline = make_interp_spline(x, y) + norm_y = spline(x) / np.trapz(spline(x), x) + element = chosen_spectrum_label.split("_")[0] + element_sites = [ + key + for key in equivalent_sites_data + if equivalent_sites_data[key]["symbol"] == element + ] + element_core_wcs = {} + total_multiplicity = 0 + for site in element_sites: + site_multiplicity = equivalent_sites_data[site]["multiplicity"] + total_multiplicity += site_multiplicity + element_core_wcs[site] = core_wc_dict[site] + + g.add_scatter(x=x, y=norm_y, name=f"{element} K-edge") + for entry in get_aligned_spectra( + core_wc_dict=element_core_wcs, equivalent_sites_dict=equivalent_sites_data + ): + g.add_scatter( + x=entry[-1][:, 0], + y=entry[-1][:, 1], + name=entry[0].capitalize().replace("_", " "), + ) + + def _update_download_selection(dataset, element): + download_data.contents = lambda: write_csv(dataset) + download_data.filename = f"{element}_XAS_Spectra.csv" + + def response(_change): + chosen_spectrum = spectrum_select.value + chosen_spectrum_label = f"{chosen_spectrum}_xas" + element_sites = [ + key + for key in equivalent_sites_data + if equivalent_sites_data[key]["symbol"] == chosen_spectrum + ] + element_core_wcs = { + key: value + for key, value in core_wc_dict.items() + if key in element_sites + } + spectra = [] + final_spectrum_node = final_spectra[chosen_spectrum_label] + final_spectrum = np.column_stack( + (final_spectrum_node.get_x()[1], final_spectrum_node.get_y()[0][1]) + ) + final_x_vals = final_spectrum[:, 0] + final_y_vals = final_spectrum[:, 1] + final_spectrum_spline = make_interp_spline(final_x_vals, final_y_vals) + final_norm_y = final_spectrum_spline(final_x_vals) / np.trapz( + final_spectrum_spline(final_x_vals), final_x_vals + ) + spectra.append( + ( + f"{chosen_spectrum} K-edge", + 1, + "1", + np.column_stack((final_x_vals, final_norm_y)), + ) + ) + datasets = [] + for entry in get_aligned_spectra( + core_wc_dict=element_core_wcs, + equivalent_sites_dict=equivalent_sites_data, + ): + spectra.append(entry) + + for entry in spectra: + label = entry[0] + weighting = entry[1] + weighting_string = entry[2] + raw_spectrum = entry[-1] + x = raw_spectrum[:, 0] + y = raw_spectrum[:, 1] + if not variable_broad_select: + gamma_max_select.disabled = True + center_e_select.disabled = True + else: + gamma_max_select.disabled = False + center_e_select.disabled = False + + if gamma_hole_select.value == 0.0: + x = raw_spectrum[:, 0] + y = raw_spectrum[:, 1] + else: + broad_spectrum = broaden_xas( + raw_spectrum, + gamma_hole=gamma_hole_select.value, + gamma_max=gamma_max_select.value, + center_energy=center_e_select.value, + variable=variable_broad_select.value, + ) + x = broad_spectrum[:, 0] + y = broad_spectrum[:, 1] + + final_spline = make_interp_spline(x, y) + final_y_vals = final_spline(final_x_vals) + datasets.append( + { + "x": final_x_vals, + "y": final_y_vals, + "name": label, + "weighting": weighting, + "weighting_string": weighting_string, + } + ) + _update_download_selection(datasets, chosen_spectrum) + + with g.batch_update(): + # If the number of datasets is different from one update to the next, + # then we need to reset the data already in the Widget. Otherwise, we can + # simply override the data. This also helps since then changing the + # broadening is much smoother. + if len(datasets) == len( + g.data + ): # if the number of entries is the same, just update + for index, entry in enumerate(datasets): + g.data[index].x = entry["x"] + g.data[index].y = entry["y"] + if "site_" in entry["name"]: + g.data[index].name = ( + entry["name"].capitalize().replace("_", " ") + ) + else: + g.data[index].name = entry["name"] + else: # otherwise, reset the figure + g.data = () + for entry in datasets: + if "site_" in entry["name"]: + name = entry["name"].capitalize().replace("_", " ") + else: + name = entry["name"] + g.add_scatter(x=entry["x"], y=entry["y"], name=name) + + spectrum_select.observe(response, names="value") + gamma_hole_select.observe(response, names="value") + gamma_max_select.observe(response, names="value") + center_e_select.observe(response, names="value") + variable_broad_select.observe(response, names="value") + download_data.observe(response, names=["contents", "filename"]) + self.children = [ + ipw.HBox( + [ + ipw.VBox( + [ + spectrum_select_prompt, + spectrum_select, + gamma_select_prompt, + gamma_hole_select, + gamma_max_select, + center_e_select, + ], + layout=ipw.Layout(width="40%"), + ), + ipw.VBox( + [ + variable_broad_select, + variable_broad_select_help, + ], + layout=ipw.Layout(width="60%"), + ), + ] + ), + download_data, + g, + ] diff --git a/src/aiidalab_qe/plugins/xas/setting.py b/src/aiidalab_qe/plugins/xas/setting.py new file mode 100644 index 000000000..3e75bd2ad --- /dev/null +++ b/src/aiidalab_qe/plugins/xas/setting.py @@ -0,0 +1,336 @@ +"""Panel for XAS plugin.""" + +import os +import tarfile +from importlib import resources +from pathlib import Path + +import ipywidgets as ipw +import requests +import traitlets as tl +import yaml + +from aiida import orm +from aiidalab_qe.common.panel import Panel +from aiidalab_qe.plugins import xas as xas_folder + +PSEUDO_TOC = yaml.safe_load(resources.read_text(xas_folder, "pseudo_toc.yaml")) +pseudo_data_dict = PSEUDO_TOC["pseudos"] +xch_elements = PSEUDO_TOC["xas_xch_elements"] + +base_url = "https://github.com/PNOGillespie/Core_Level_Spectra_Pseudos/raw/main" +head_path = f"{Path.home()}/.local/lib" +dir_header = "cls_pseudos" +functionals = ["pbe"] +core_wfc_dir = "core_wfc_data" +gipaw_dir = "gipaw_pseudos" +ch_pseudo_dir = "ch_pseudos/star1s" + + +def _load_or_import_nodes_from_filenames(in_dict, path, core_wfc_data=False): + for filename in in_dict.values(): + try: + orm.load_node(filename) + except BaseException: + if not core_wfc_data: + new_upf = orm.UpfData(f"{path}/{filename}", filename=filename) + new_upf.label = filename + new_upf.store() + else: + new_singlefile = orm.SinglefileData( + f"{path}/{filename}", filename="stdout" + ) + new_singlefile.label = filename + new_singlefile.store() + + +def _download_extract_pseudo_archive(func): + target_dir = f"{head_path}/{dir_header}/{func}" + archive_filename = f"{func}_ch_pseudos.tgz" + remote_archive_filename = f"{base_url}/{func}/{archive_filename}" + local_archive_filename = f"{target_dir}/{archive_filename}" + + env = os.environ.copy() + env["PATH"] = f"{env['PATH']}:{Path.home() / '.local' / 'lib'}" + + response = requests.get(remote_archive_filename, timeout=30) + response.raise_for_status() + with open(local_archive_filename, "wb") as handle: + handle.write(response.content) + handle.flush() + response.close() + + with tarfile.open(local_archive_filename, "r:gz") as tarfil: + tarfil.extractall(target_dir) + + +class Setting(Panel): + title = "XAS Settings" + identifier = "xas" + input_structure = tl.Instance(orm.StructureData, allow_none=True) + protocol = tl.Unicode(allow_none=True) + + element_selection_title = ipw.HTML( + """
    +

    Element and Core-Hole Treatment Setting.

    """ + ) + + # TODO: The element selection should lock the "Confirm" button if no elements have been + # selected for XAS calculation. + + element_selection_help = ipw.HTML( + """
    + To select elements for calculation of K-edge spectra:
    + (1) Tick the checkbox for each element symbol to select the element for calculation.
    + (2) Select the core-hole treatment scheme from the dropdown box.
    +
    + There are three supported options for core-hole treatment:
    + - FCH: Remove one electron from the system (any occupations scheme).
    + - XCH (Smearing): places the excited electron into the conduction band (smeared occupations).
    + - XCH (Fixed): places the excited electron into the conduction band (fixed occupations).
    +
    + For XAS calculations of most elements, the FCH treatment is recommended, however in some cases the XCH treatment should be used instead.
    + The recommended setting will be shown for each available element. + Note that only elements for which core-hole pseudopotential sets are available + will be shown.
    +
    """ + ) + # I will leave these objects here for now (15/11/23), but since the calculation of molecular + # systems is not really supported (neither in terms of XAS nor the main App itself) we should + # not present this option that essentially does nothing. + # structure_title = ipw.HTML( + # """
    + #

    Structure

    """ + # ) + # structure_help = ipw.HTML( + # """
    + # Below you can indicate if the material should be treated as a molecule + # or a crystal. + #
    """ + # ) + supercell_title = ipw.HTML( + """
    +

    Cell size

    """ + ) + supercell_help = ipw.HTML( + """
    + Define the minimum cell length in angstrom for the resulting supercell, and thus all output + structures. The default value of 8.0 angstrom will be used + if no input is given. Setting this value to 0.0 will + instruct the CF to not scale up the input structure. +
    """ + ) + + def __init__(self, **kwargs): + self.gipaw_pseudos = pseudo_data_dict["pbe"]["gipaw_pseudos"] + self.core_hole_pseudos = pseudo_data_dict["pbe"]["core_hole_pseudos"]["1s"] + self.core_wfc_data_dict = pseudo_data_dict["pbe"]["core_wavefunction_data"] + + self.element_and_ch_treatment = ipw.VBox(layout=ipw.Layout(width="100%")) + + # self.structure_type = ipw.ToggleButtons( + # options=[ + # ("Molecule", "molecule"), + # ("Crystal", "crystal"), + # ], + # value="crystal", + # ) + self.supercell_min_parameter = ipw.FloatText( + value=8.0, + description="The minimum cell length (Γ…):", + disabled=False, + style={"description_width": "initial"}, + ) + + self.children = [ + # self.structure_title, + # self.structure_help, + # ipw.HBox( + # [self.structure_type], + # ), + self.element_selection_title, + self.element_selection_help, + ipw.HBox([self.element_and_ch_treatment], layout=ipw.Layout(width="95%")), + self.supercell_title, + self.supercell_help, + ipw.HBox( + [self.supercell_min_parameter], + ), + ] + + super().__init__(**kwargs) + + def get_panel_value(self): + elements_list = [] + core_hole_treatments = {} + for entry in self.element_and_ch_treatment.children: + if entry.children[0].value is True: + element = entry.children[0].description + ch_treatment = entry.children[1].value + elements_list.append(element) + core_hole_treatments[element] = ch_treatment + + pseudo_labels = {} + core_wfc_data_labels = {} + for element in elements_list: + pseudo_labels[element] = { + "gipaw": self.gipaw_pseudos[element], + "core_hole": self.core_hole_pseudos[element], + } + core_wfc_data_labels[element] = self.core_wfc_data_dict[element] + + parameters = { + "core_hole_treatments": core_hole_treatments, + "elements_list": elements_list, + # "structure_type": self.structure_type.value, + "pseudo_labels": pseudo_labels, + "core_wfc_data_labels": core_wfc_data_labels, + "supercell_min_parameter": self.supercell_min_parameter.value, + } + return parameters + + def set_panel_value(self, input_dict): + """Load a dictionary with the input parameters for the plugin.""" + + # set selected elements and core-hole treatments + elements_list = input_dict.get("elements_list", []) + for entry in self.element_and_ch_treatment.children: + element = entry.children[0].description + if element in elements_list: + entry.children[0].value = True + entry.children[1].value = input_dict["core_hole_treatments"][element] + else: + entry.children[0].value = False + entry.children[1].value = "full" + # set supercell min parameter + self.supercell_min_parameter.value = input_dict.get( + "supercell_min_parameter", 8.0 + ) + # self.structure_type.value = input_dict.get("structure_type", "crystal") + + @tl.observe("input_structure") + def _update_structure(self, _=None): + self._update_element_select_panel() + + for func in functionals: + target_dir = f"{head_path}/{dir_header}/{func}" + os.makedirs(target_dir, exist_ok=True) + archive_filename = f"{func}_ch_pseudos.tgz" + archive_found = False + for entry in os.listdir(target_dir): + if entry == archive_filename: + archive_found = True + if not archive_found: + _download_extract_pseudo_archive(func) + + # Check all the pseudos/core-wfc data files in the TOC dictionary + # and load/check all of them before proceeding. Note that this + # approach relies on there not being multiple instances of nodes + # with the same label. + for func in functionals: + gipaw_pseudo_dict = pseudo_data_dict[func]["gipaw_pseudos"] + core_wfc_dict = pseudo_data_dict[func]["core_wavefunction_data"] + core_hole_pseudo_dict = pseudo_data_dict[func]["core_hole_pseudos"] + main_path = f"{head_path}/{dir_header}/{func}" + core_wfc_dir = f"{main_path}/core_wfc_data" + gipaw_dir = f"{main_path}/gipaw_pseudos" + ch_pseudo_dir = f"{main_path}/ch_pseudos/star1s" + # First, check that the local directories contain what's in the pseudo_toc + for pseudo_dir, pseudo_dict in zip( + [gipaw_dir, core_wfc_dir, ch_pseudo_dir], + [gipaw_pseudo_dict, core_wfc_dict, core_hole_pseudo_dict], + ): + pseudo_toc_mismatch = os.listdir(pseudo_dir) != pseudo_dict.values() + + # Re-download the relevant archive if there is a mismatch + if pseudo_toc_mismatch: + _download_extract_pseudo_archive(func) + + _load_or_import_nodes_from_filenames( + in_dict=gipaw_pseudo_dict, + path=gipaw_dir, + ) + _load_or_import_nodes_from_filenames( + in_dict=core_wfc_dict, path=core_wfc_dir, core_wfc_data=True + ) + _load_or_import_nodes_from_filenames( + in_dict=core_hole_pseudo_dict["1s"], path=ch_pseudo_dir + ) + + def _update_element_select_panel(self): + if self.input_structure is None: + return + + starting_treatment_mapping = {"FCH": "full", "XCH": "xch_smear"} + ch_treatment_options = [ + ("FCH", "full"), + ("XCH (Smearing)", "xch_smear"), + ("XCH (Fixed)", "xch_fixed"), + ] + ch_pseudos = self.core_hole_pseudos + structure = self.input_structure + available_elements = list(ch_pseudos) + elements_to_select = sorted( + [ + kind.symbol + for kind in structure.kinds + if kind.symbol in available_elements + ] + ) + treatment_options = () + + for element in elements_to_select: + if element in xch_elements: + recommended_treatment = "XCH" + else: + recommended_treatment = "FCH" + + treatment_options += ( + ipw.HBox( + [ + ipw.Checkbox( + description=element, + value=False, + disabled=False, + style={"description_width": "initial"}, + layout=ipw.Layout(width="7%"), + ), + ipw.Dropdown( + options=ch_treatment_options, + value=starting_treatment_mapping[recommended_treatment], + disabled=False, + layout=ipw.Layout(width="15%"), + ), + ipw.HTML( + f"Recommended treatment: {recommended_treatment} (PBE Core-Hole Pseudopotential)", + layout=ipw.Layout(width="78%"), + ), + ], + layout=ipw.Layout( + width="100%", + ), + ), + ) + + self.element_and_ch_treatment.children = treatment_options + + # For reference: + # This is the whole widget: + # print(f"{self.element_and_ch_treatment}\n") + + # This is the tuple of selected element and core-hole treatment: + # print(f"{self.element_and_ch_treatment.children[0]}\n") + + # This is the checkbox for the element, giving element name and whether to add it to the elements list + # print(f"{self.element_and_ch_treatment.children[0].children[0]}\n") + # print(f"{self.element_and_ch_treatment.children[0].children[0].value}\n") + # print(f"{self.element_and_ch_treatment.children[0].children[0].description}\n") + + # This is the dropdown for the core-hole treatment option: + # print(f"{self.element_and_ch_treatment.children[0].children[1]}\n") + # print(f"{self.element_and_ch_treatment.children[0].children[1].value}\n") + + def reset(self): + """Reset the panel to its initial state.""" + self.input_structure = None + # self.structure_type.value = "crystal" diff --git a/src/aiidalab_qe/plugins/xas/workchain.py b/src/aiidalab_qe/plugins/xas/workchain.py new file mode 100644 index 000000000..9ad99c207 --- /dev/null +++ b/src/aiidalab_qe/plugins/xas/workchain.py @@ -0,0 +1,135 @@ +from importlib import resources + +import yaml + +from aiida import orm +from aiida.plugins import WorkflowFactory +from aiida_quantumespresso.common.types import ElectronicType, SpinType +from aiidalab_qe.plugins import xas as xas_folder +from aiidalab_qe.plugins.utils import set_component_resources + +XspectraCrystalWorkChain = WorkflowFactory("quantumespresso.xspectra.crystal") +PSEUDO_TOC = yaml.safe_load(resources.read_text(xas_folder, "pseudo_toc.yaml")) +pseudo_data_dict = PSEUDO_TOC["pseudos"] +xch_elements = PSEUDO_TOC["xas_xch_elements"] + + +def update_resources(builder, codes): + """Update the resources for the builder.""" + set_component_resources(builder.core.scf.pw, codes.get("pw")) + set_component_resources(builder.core.xs_prod.xspectra, codes.get("xspectra")) + + +def get_builder(codes, structure, parameters, **kwargs): + from copy import deepcopy + + adv_parameters = deepcopy(parameters["advanced"]) + # Setting `tot_charge = 0` will cause FCH calculations to fail due to + # inputs being incorrect, thus we pop this from the overrides + if adv_parameters["pw"]["parameters"]["SYSTEM"].get("tot_charge") == 0: + adv_parameters["pw"]["parameters"]["SYSTEM"].pop("tot_charge") + protocol = parameters["workchain"]["protocol"] + xas_parameters = parameters["xas"] + core_hole_treatments = xas_parameters["core_hole_treatments"] + elements_list = xas_parameters["elements_list"] + supercell_min_parameter = xas_parameters["supercell_min_parameter"] + pseudo_labels = xas_parameters["pseudo_labels"] + core_wfc_data_labels = xas_parameters["core_wfc_data_labels"] + pseudos = {} + # Convert the pseudo and core_wfc_data node labels into nodes: + core_wfc_data = {k: orm.load_node(v) for k, v in core_wfc_data_labels.items()} + for element in elements_list: + pseudos[element] = { + k: orm.load_node(v) for k, v in pseudo_labels[element].items() + } + + # TODO should we override the cutoff_wfc, cutoff_rho by the new pseudo? + # In principle we should, if we know what that value is, but that would + # require testing them first... + + # (13/10/23) I'm keeping the part about molecules in for future reference, + # but we need to establish the protocol & backend code for XAS of molecules + # before thinking about a workflow. + # (22/01/24) Commented out the code for molecules, just so the option doesn't + # appear in the UI and confuse the user. + # is_molecule_input = ( + # True if xas_parameters.get("structure_type") == "molecule" else False + # ) + + structure_preparation_settings = { + "supercell_min_parameter": orm.Float(supercell_min_parameter), + # "is_molecule_input": orm.Bool(is_molecule_input), + } + spglib_settings = orm.Dict({"symprec": 1.0e-3}) + + pw_code = codes["pw"]["code"] + xs_code = codes["xspectra"]["code"] + overrides = { + "core": { + "scf": adv_parameters, + # PG: Here, we set a "variable" broadening scheme, which actually defines a constant broadening + # The reason for this is that in "gamma_mode = constant", the Lorenzian broadening parameter + # is defined by "xgamma" (in "PLOT"), but this parameter *also* controls the broadening value + # used in the Lanczos algorithm to enhance the convergence rate. In order to give the user a + # final spectrum with minimal broadening, we use "gamma_mode = variable", which uses a different + # parameter set ("gamma_energy(1-2)", "gamma_value(1-2)") and thus allows us to decouple spectrum + # broadening from Lanczos broadening and avoid having to re-plot the final spectrum. + "xs_prod": { + "xspectra": { + "parameters": { + "PLOT": { + "gamma_mode": "variable", + "gamma_energy(1)": 0, + "gamma_energy(2)": 1, + "gamma_value(1)": 0.1, + "gamma_value(2)": 0.1, + } + } + } + }, + } + } + + # Ensure that VdW corrections are not applied for the core-hole SCF calculation + # Required to resolve issue #765 (https://github.com/aiidalab/aiidalab-qe/issues/765) + overrides["core"]["scf"]["pw"]["parameters"]["SYSTEM"]["vdw_corr"] = "none" + + builder = XspectraCrystalWorkChain.get_builder_from_protocol( + pw_code=pw_code, + xs_code=xs_code, + structure=structure, + protocol=protocol, + pseudos=pseudos, + elements_list=elements_list, + core_hole_treatments=core_hole_treatments, + core_wfc_data=core_wfc_data, + electronic_type=ElectronicType(parameters["workchain"]["electronic_type"]), + spin_type=SpinType(parameters["workchain"]["spin_type"]), + # TODO: We will need to merge the changes in AiiDA-QE PR#969 in order + # to better handle magnetic and Hubbard data. For now, we can probably + # leave it as it is. + initial_magnetic_moments=parameters["advanced"]["initial_magnetic_moments"], + overrides=overrides, + **kwargs, + ) + builder.pop("relax") + builder.pop("clean_workdir", None) + builder.spglib_settings = spglib_settings + builder.structure_preparation_settings = structure_preparation_settings + # update resources + update_resources(builder, codes) + + return builder + + +def update_inputs(inputs, ctx): + """Update the inputs using context.""" + inputs.structure = ctx.current_structure + + +workchain_and_builder = { + "workchain": XspectraCrystalWorkChain, + "exclude": ("structure", "relax"), + "get_builder": get_builder, + "update_inputs": update_inputs, +} diff --git a/src/aiidalab_qe/plugins/xps/__init__.py b/src/aiidalab_qe/plugins/xps/__init__.py new file mode 100644 index 000000000..56d0eb276 --- /dev/null +++ b/src/aiidalab_qe/plugins/xps/__init__.py @@ -0,0 +1,18 @@ +from aiidalab_qe.common.panel import OutlinePanel + +from .result import Result +from .setting import Setting +from .workchain import workchain_and_builder + + +class XpsOutline(OutlinePanel): + title = "X-ray photoelectron spectroscopy (XPS)" + help = """""" + + +xps = { + "outline": XpsOutline, + "setting": Setting, + "result": Result, + "workchain": workchain_and_builder, +} diff --git a/src/aiidalab_qe/plugins/xps/result.py b/src/aiidalab_qe/plugins/xps/result.py new file mode 100644 index 000000000..57789f555 --- /dev/null +++ b/src/aiidalab_qe/plugins/xps/result.py @@ -0,0 +1,282 @@ +"""XPS results view widgets""" + +import ipywidgets as ipw + +from aiidalab_qe.common.panel import ResultPanel + + +def export_xps_data(outputs): + """Export the data from the XPS workchain""" + + chemical_shifts = {} + symmetry_analysis_data = outputs.symmetry_analysis_data.get_dict() + equivalent_sites_data = symmetry_analysis_data["equivalent_sites_data"] + if "chemical_shifts" in outputs: + for key, data in outputs.chemical_shifts.items(): + ele = key[:-4] + chemical_shifts[ele] = data.get_dict() + binding_energies = {} + if "binding_energies" in outputs: + for key, data in outputs.binding_energies.items(): + ele = key[:-3] + binding_energies[ele] = data.get_dict() + + return ( + chemical_shifts, + binding_energies, + equivalent_sites_data, + ) + + +def xps_spectra_broadening( + points, equivalent_sites_data, gamma=0.3, sigma=0.3, _label="", intensity=1.0 +): + """Broadening the XPS spectra with Voigt function and return the spectra data""" + + import numpy as np + from scipy.special import voigt_profile # pylint: disable=no-name-in-module + + result_spectra = {} + fwhm_voight = gamma / 2 + np.sqrt(gamma**2 / 4 + sigma**2) + for element, point in points.items(): + result_spectra[element] = {} + final_spectra_y_arrays = [] + total_multiplicity = sum( + [equivalent_sites_data[site]["multiplicity"] for site in point] + ) + max_core_level_shift = max(point.values()) + min_core_level_shift = min(point.values()) + # Energy range for the Broadening function + x_energy_range = np.linspace( + min_core_level_shift - fwhm_voight - 1.5, + max_core_level_shift + fwhm_voight + 1.5, + 500, + ) + for site in point: + # Weight for the spectra of every atom + intensity = equivalent_sites_data[site]["multiplicity"] * intensity + relative_core_level_position = point[site] + y = ( + intensity + * voigt_profile( + x_energy_range - relative_core_level_position, sigma, gamma + ) + / total_multiplicity + ) + result_spectra[element][site] = [x_energy_range, y] + final_spectra_y_arrays.append(y) + total = sum(final_spectra_y_arrays) + result_spectra[element]["total"] = [x_energy_range, total] + return result_spectra + + +class Result(ResultPanel): + title = "XPS" + workchain_labels = ["xps"] + + def __init__(self, node=None, **kwargs): + super().__init__(node=node, **kwargs) + self.experimental_data = None # Placeholder for experimental data + + def _update_view(self): + import plotly.graph_objects as go + + spectrum_select_prompt = ipw.HTML( + """ +
    + Select spectrum to plot
    """ + ) + + voigt_profile_help = ipw.HTML( + """
    + Set the Voigt profile to broaden the XPS spectra: +
    """ + ) + + spectra_type = ipw.ToggleButtons( + options=[ + ("Chemical shift", "chemical_shift"), + ("Binding energy", "binding_energy"), + ], + value="chemical_shift", + ) + gamma = ipw.FloatSlider( + value=0.1, + min=0.01, + max=0.5, + description="Lorentzian profile ($\gamma$)", + disabled=False, + style={"description_width": "initial"}, + ) + sigma = ipw.FloatSlider( + value=0.1, + min=0.01, + max=0.5, + description="Gaussian profile ($\sigma$)", + disabled=False, + style={"description_width": "initial"}, + ) + self.intensity = ipw.FloatText( + value=1.0, + min=0.001, + description="Adjustable Intensity Factor", + disabled=False, + style={"description_width": "initial"}, + ) + fill = ipw.Checkbox( + description="Fill", + value=True, + disabled=False, + style={"description_width": "initial"}, + ) + # Create a description label + upload_description = ipw.HTML( + value="Upload Experimental Data (csv format, without header):", + placeholder="", + description="", + ) + + # Create the upload button + upload_btn = ipw.FileUpload( + description="Choose File", + multiple=False, + ) + upload_container = ipw.VBox([upload_description, upload_btn, self.intensity]) + upload_btn.observe(self._handle_upload, names="value") + + paras = ipw.HBox( + children=[ + gamma, + sigma, + ] + ) + # get data + ( + chemical_shifts, + binding_energies, + equivalent_sites_data, + ) = export_xps_data(self.outputs.xps) + self.spectrum_select_options = [ + key.split("_")[0] for key in chemical_shifts.keys() + ] + self.spectrum_select = ipw.Dropdown( + description="", + disabled=False, + value=self.spectrum_select_options[0], + options=self.spectrum_select_options, + layout=ipw.Layout(width="20%"), + ) + # init figure + self.g = go.FigureWidget( + layout=go.Layout( + title={"text": "XPS"}, + barmode="overlay", + ) + ) + self.g.layout.xaxis.title = "Chemical shift (eV)" + self.g.layout.xaxis.autorange = "reversed" + # + self.spectra = xps_spectra_broadening( + chemical_shifts, + equivalent_sites_data, + gamma=gamma.value, + sigma=sigma.value, + intensity=self.intensity.value, + ) + # only plot the selected spectrum + for site, d in self.spectra[self.spectrum_select.value].items(): + self.g.add_scatter( + x=d[0], y=d[1], fill="tozeroy", name=site.replace("_", " ") + ) + + def response(_change): + data = [] + if spectra_type.value == "chemical_shift": + points = chemical_shifts + xaxis = "Chemical Shift (eV)" + else: + points = binding_energies + xaxis = "Binding Energy (eV)" + # + spectra = xps_spectra_broadening( + points, + equivalent_sites_data, + gamma=gamma.value, + sigma=sigma.value, + intensity=self.intensity.value, + ) + + for site, d in spectra[self.spectrum_select.value].items(): + data.append( + { + "x": d[0], + "y": d[1], + "site": site, + } + ) + fill_type = "tozeroy" if fill.value else None + with self.g.batch_update(): + if len(self.g.data) == len(data): + for i in range(len(data)): + self.g.data[i].x = data[i]["x"] + self.g.data[i].y = data[i]["y"] + self.g.data[i].fill = fill_type + self.g.data[i].name = data[i]["site"].replace("_", " ") + + else: + self.g.data = [] + for d in data: + self.g.add_scatter( + x=d["x"], y=d["y"], fill=fill_type, name=d["site"] + ) + self.g.layout.barmode = "overlay" + self.g.layout.xaxis.title = xaxis + self.plot_experimental_data() + + spectra_type.observe(response, names="value") + self.spectrum_select.observe(response, names="value") + gamma.observe(response, names="value") + sigma.observe(response, names="value") + self.intensity.observe(response, names="value") + fill.observe(response, names="value") + self.children = [ + spectra_type, + ipw.HBox( + children=[ + spectrum_select_prompt, + self.spectrum_select, + ] + ), + voigt_profile_help, + paras, + fill, + self.g, + upload_container, + ] + + def _handle_upload(self, change): + """Process the uploaded experimental data file.""" + import pandas as pd + + uploaded_file = next(iter(change.new.values())) + content = uploaded_file["content"] + content_str = content.decode("utf-8") + + from io import StringIO + + df = pd.read_csv(StringIO(content_str), header=None) + + self.experimental_data = df + # Calculate an initial guess for the intensity factor + total = self.spectra[self.spectrum_select.value]["total"] + # Align the max value of the total spectra with the max value of the experimental data + max_exp = max(self.experimental_data[1]) + max_total = max(total[1]) + self.intensity.value = max_exp / max_total + + def plot_experimental_data(self): + """Plot the experimental data alongside the calculated data.""" + if self.experimental_data is not None: + x = self.experimental_data[0] + y = self.experimental_data[1] + self.g.add_scatter(x=x, y=y, mode="lines", name="Experimental Data") diff --git a/src/aiidalab_qe/plugins/xps/setting.py b/src/aiidalab_qe/plugins/xps/setting.py new file mode 100644 index 000000000..ecb1b4c9e --- /dev/null +++ b/src/aiidalab_qe/plugins/xps/setting.py @@ -0,0 +1,243 @@ +"""Panel for XPS plugin.""" + +import ipywidgets as ipw +import traitlets as tl + +from aiida.orm import Group, QueryBuilder, StructureData +from aiidalab_qe.common.panel import Panel + +base_url = "https://github.com/superstar54/xps-data/raw/main/pseudo_demo/" + + +def install_pseudos(pseudo_group="pseudo_demo_pbe"): + import os + from pathlib import Path + from subprocess import run + + url = base_url + pseudo_group + ".aiida" + + env = os.environ.copy() + env["PATH"] = f"{env['PATH']}:{Path.home().joinpath('.local', 'bin')}" + + def run_(*args, **kwargs): + return run(*args, env=env, capture_output=True, check=True, **kwargs) + + run_(["verdi", "archive", "import", url, "--no-import-group"]) + + +class Setting(Panel): + title = "XPS Settings" + identifier = "xps" + input_structure = tl.Instance(StructureData, allow_none=True) + protocol = tl.Unicode(allow_none=True) + + core_hole_treatment_title = ipw.HTML( + """
    +

    Core hole treatment

    """ + ) + core_hole_treatment_help = ipw.HTML( + """
    + You have three options:
    + (1) XCH(smear): places the excited electron into the conduction band, suitable for extend system.
    + (2) XCH(fixed): places the excited electron into the conduction band, suitable for extend system.
    + (3) Full: remove one electron from the system, suitable for molecule.
    """ + ) + + pseudo_title = ipw.HTML( + """
    +

    Core-Hole pseudopotential group

    """ + ) + pseudo_help = ipw.HTML( + f"""
    + Please select a pseudopotential group, which provide the ground-state and excited-state pseudopotentials for the element. The pseudopotentials are downloaded from this repository. +
    """ + ) + + core_level_title = ipw.HTML( + """
    +

    Select core-level

    """ + ) + core_level_help = ipw.HTML( + """
    + The list of core-levels to be considered for analysis. +
    """ + ) + structure_title = ipw.HTML( + """
    +

    Structure

    """ + ) + structure_help = ipw.HTML( + """
    + Below you can indicate if the material should be treated as a molecule + or a crystal. +
    """ + ) + supercell_title = ipw.HTML( + """
    +

    Cell size

    """ + ) + supercell_help = ipw.HTML( + """
    + Define the minimum cell length in angstrom for the resulting supercell, and thus all output + structures. The default value of 8.0 angstrom will be used + if no input is given. Setting this value to 0.0 will + instruct the CF to not scale up the input structure. +
    """ + ) + binding_energy_title = ipw.HTML( + """
    +

    Absolute binding energy

    """ + ) + binding_energy_help = ipw.HTML( + """
    + To calculate the absolute binding energy, you need to provide the correction energy for the core electrons. The correction energy is Ecorr = E_core_hole - E_gipaw, where E_core_hole and E_gipaw are calculated by Etot - Etotps. Etot and Etotps can be found in the output when generating the pseudo potential. A offset corretion by fitting the experimental data is also added. Here is a example: C:339.79,O:668.22,F:955.73,Si:153.19 +
    """ + ) + + def __init__(self, **kwargs): + # Core hole treatment type + self.core_hole_treatment = ipw.ToggleButtons( + options=[ + ("XCH(smear)", "xch_smear"), + ("XCH(fixed)", "xch_fixed"), + ("Full", "full"), + ], + value="xch_smear", + ) + self.pseudo_group = ipw.Dropdown( + options=["pseudo_demo_pbe", "pseudo_demo_pbesol"], + value="pseudo_demo_pbe", + description="Group:", + disabled=False, + style={"description_width": "initial"}, + ) + self.core_level_list = ipw.VBox() + + self.structure_type = ipw.ToggleButtons( + options=[ + ("Molecule", "molecule"), + ("Crystal", "crystal"), + ], + value="crystal", + ) + self.supercell_min_parameter = ipw.FloatText( + value=8.0, + description="The minimum cell length (Γ…):", + disabled=False, + style={"description_width": "initial"}, + ) + self.calc_binding_energy = ipw.Checkbox( + description="Calculate binding energy: ", + indent=False, + value=False, + ) + + self.children = [ + self.structure_title, + self.structure_help, + ipw.HBox( + [self.structure_type], + ), + self.pseudo_title, + self.pseudo_help, + self.pseudo_group, + self.core_level_title, + self.core_level_help, + ipw.HBox( + [self.core_level_list], + ), + ] + self.pseudo_group.observe(self._update_pseudo, names="value") + super().__init__(**kwargs) + + def get_panel_value(self): + """Return a dictionary with the input parameters for the plugin.""" + core_level_list = [ + core_level.description + for core_level in self.core_level_list.children + if core_level.value + ] + # if len(core_level_list) == 0: + # raise Exception("Please select at least one core_level.") + parameters = { + # "core_hole_treatment": self.core_hole_treatment.value, + "structure_type": self.structure_type.value, + "pseudo_group": self.pseudo_group.value, + "correction_energies": self.correction_energies, + "core_level_list": core_level_list, + } + return parameters + + def set_panel_value(self, input_dict): + """Load a dictionary with the input parameters for the plugin.""" + self.pseudo_group.value = input_dict.get("pseudo_group", "pseudo_demo_pbe") + # self.core_hole_treatment.value = input_dict.get( + # "core_hole_treatment", "xch_smear" + # ) + self.structure_type.value = input_dict.get("structure_type", "crystal") + core_level_list = input_dict.get("core_level_list", []) + for core_level in self.core_level_list.children: + if core_level.description in core_level_list: + core_level.value = True + + @tl.observe("input_structure") + def _update_structure(self, _=None): + self._update_core_level_list() + + def _update_core_level_list(self): + if self.input_structure is None: + return + structure = self.input_structure + kind_list = [Kind.symbol for Kind in structure.kinds] + checkbox_list = [] + qb = QueryBuilder() + qb.append(Group, filters={"label": self.pseudo_group.value}) + if len(qb.all()) == 0: + install_pseudos(self.pseudo_group.value) + group = qb.all()[0][0] + self.correction_energies = group.base.extras.get("correction") + supported_core_levels = {} + for key in self.correction_energies: + ele, orbital = key.split("_") + if ele not in supported_core_levels: + supported_core_levels[ele] = [key] + else: + supported_core_levels[ele].append(key) + # print("supported_core_levels: ", supported_core_levels) + for ele in kind_list: + if ele in supported_core_levels: + for orbital in supported_core_levels[ele]: + checkbox_list += ( + ipw.Checkbox( + description=orbital, + indent=False, + value=False, + layout=ipw.Layout(max_width="100%"), + ), + ) + else: + checkbox_list += ( + ipw.Checkbox( + description=f"{ele}, not supported by the selected pseudo group", + indent=False, + value=False, + disabled=True, + style={"description_width": "initial"}, + layout=ipw.Layout(max_width="100%"), + ), + ) + self.core_level_list.children = checkbox_list + + def _update_pseudo(self, change): + pseudo_group = change["new"] + qb = QueryBuilder() + qb.append(Group, filters={"label": pseudo_group}) + if len(qb.all()) == 0: + install_pseudos(pseudo_group) + self._update_core_level_list() + + def reset(self): + """Reset the panel to its initial state.""" + self.input_structure = None + self.structure_type.value = "crystal" + self.pseudo_group.value = "pseudo_demo_pbe" diff --git a/src/aiidalab_qe/plugins/xps/workchain.py b/src/aiidalab_qe/plugins/xps/workchain.py new file mode 100644 index 000000000..06379b99c --- /dev/null +++ b/src/aiidalab_qe/plugins/xps/workchain.py @@ -0,0 +1,123 @@ +from aiida.orm import Bool, Dict, Float, Group, QueryBuilder +from aiida.plugins import WorkflowFactory +from aiida_quantumespresso.common.types import ElectronicType, SpinType +from aiidalab_qe.plugins.utils import set_component_resources + +XpsWorkChain = WorkflowFactory("quantumespresso.xps") + +# supercell min parameter for different protocols +supercell_min_parameter_map = { + "fast": 4.0, + "moderate": 8.0, + "precise": 12.0, +} + + +def update_resources(builder, codes): + """Update the resources for the builder.""" + set_component_resources(builder.ch_scf.pw, codes.get("pw")) + + +def get_builder(codes, structure, parameters, **kwargs): + from copy import deepcopy + + protocol = parameters["workchain"]["protocol"] + xps_parameters = parameters.get("xps", {}) + all_correction_energies = xps_parameters.pop("correction_energies", {}) + core_level_list = xps_parameters.pop("core_level_list", None) + # load pseudo for excited-state and group-state. + pseudo_group = xps_parameters.pop("pseudo_group") + pseudo_group = ( + QueryBuilder().append(Group, filters={"label": pseudo_group}).one()[0] + ) + # set pseudo for element + pseudos = {} + elements_list = [] + correction_energies = {} + for label in core_level_list: + element = label.split("_")[0] + pseudos[element] = { + "core_hole": next( + pseudo for pseudo in pseudo_group.nodes if pseudo.label == label + ), + "gipaw": next( + pseudo + for pseudo in pseudo_group.nodes + if pseudo.label == f"{element}_gs" + ), + } + correction_energies[element] = ( + all_correction_energies[label]["core"] + - all_correction_energies[label]["exp"] + ) + elements_list.append(element) + # + is_molecule_input = ( + True if xps_parameters.get("structure_type") == "molecule" else False + ) + # set core hole treatment based on electronic type + if parameters["workchain"]["electronic_type"] == "metal": + core_hole_treatment = "xch_smear" + else: + core_hole_treatment = "xch_fixed" + # if molecule input, set core hole treatment to full + if is_molecule_input: + core_hole_treatment = "full" + core_hole_treatments = {element: core_hole_treatment for element in elements_list} + structure_preparation_settings = { + "supercell_min_parameter": Float(supercell_min_parameter_map[protocol]), + "is_molecule_input": Bool(is_molecule_input), + } + pw_code = codes["pw"]["code"] + overrides_ch_scf = deepcopy(parameters["advanced"]) + if is_molecule_input: + overrides_ch_scf["pw"]["parameters"]["SYSTEM"]["assume_isolated"] = "mt" + overrides = { + "relax": { + "base": deepcopy(parameters["advanced"]), + "base_final_scf": deepcopy(parameters["advanced"]), + }, + "ch_scf": overrides_ch_scf, + } + # Ensure that VdW corrections are not applied for the core-hole SCF calculation + # Required to resolve issue #765 (https://github.com/aiidalab/aiidalab-qe/issues/765) + overrides["ch_scf"]["pw"]["parameters"]["SYSTEM"]["vdw_corr"] = "none" + + builder = XpsWorkChain.get_builder_from_protocol( + code=pw_code, + structure=structure, + protocol=protocol, + pseudos=pseudos, + elements_list=elements_list, + calc_binding_energy=Bool(True), + correction_energies=Dict(correction_energies), + core_hole_treatments=core_hole_treatments, + structure_preparation_settings=structure_preparation_settings, + electronic_type=ElectronicType(parameters["workchain"]["electronic_type"]), + spin_type=SpinType(parameters["workchain"]["spin_type"]), + initial_magnetic_moments=parameters["advanced"]["initial_magnetic_moments"], + overrides=overrides, + **kwargs, + ) + builder.pop("relax") + builder.pop("clean_workdir", None) + # update resources + update_resources(builder, codes) + if is_molecule_input: + # set a large kpoints_distance value to set the kpoints to 1x1x1 + builder.ch_scf.kpoints_distance = Float(5) + builder.ch_scf.pw.settings = Dict(dict={"gamma_only": True}) + return builder + + +def update_inputs(inputs, ctx): + """Update the inputs using context.""" + inputs.structure = ctx.current_structure + + +workchain_and_builder = { + "workchain": XpsWorkChain, + "exclude": ("structure", "relax"), + "get_builder": get_builder, + "update_inputs": update_inputs, +} diff --git a/src/aiidalab_qe/setup/__init__.py b/src/aiidalab_qe/setup/__init__.py new file mode 100644 index 000000000..762d595d7 --- /dev/null +++ b/src/aiidalab_qe/setup/__init__.py @@ -0,0 +1,5 @@ +"""Package for the QE app CLI for setting up codes and pseudos""" + +from .codes import QE_VERSION + +__all__ = ["QE_VERSION"] diff --git a/src/aiidalab_qe/setup/codes.py b/src/aiidalab_qe/setup/codes.py new file mode 100644 index 000000000..c63a76bb7 --- /dev/null +++ b/src/aiidalab_qe/setup/codes.py @@ -0,0 +1,247 @@ +import subprocess +from pathlib import Path +from shutil import which + +from filelock import FileLock, Timeout + +from aiida.common.exceptions import NotExistent +from aiida.orm import load_code + +FN_INSTALL_LOCKFILE = Path.home().joinpath(".install-qe-on-localhost.lock") +FN_SETUP_LOCKFILE = Path.home().joinpath(".setup-qe-on-localhost.lock") +FN_DO_NOT_SETUP = Path.cwd().joinpath(".do-not-setup-on-localhost") + +QE_VERSION = "7.2" + + +def get_qe_env(): + # QE is already pre-installed in the QE image + path = Path(f"/opt/conda/envs/quantum-espresso-{QE_VERSION}") + if path.exists(): + return path + else: + return Path.home().joinpath(".conda", "envs", f"quantum-espresso-{QE_VERSION}") + + +# Add all QE codes with the calcjob entry point in the aiida-quantumespresso. +CODE_NAMES = ( + "pw", + "projwfc", + "dos", + "cp", + "epw", + "matdyn", + "neb", + "open_grid", + "ph", + "pp", + "pw2gw", + "pw2wannier90", + "q2r", + "xspectra", + "hp", +) + + +def qe_installed(): + import json + + env_exist = get_qe_env().exists() + proc = subprocess.run( + ["conda", "list", "-n", f"{get_qe_env().name}", "--json", "--full-name", "qe"], + check=True, + capture_output=True, + ) + + info = json.loads(str(proc.stdout.decode()))[0] + + return env_exist and "qe" == info["name"] + + +def install_qe(): + subprocess.run( + [ + "conda", + "create", + "--yes", + "--override-channels", + "--channel", + "conda-forge", + "--prefix", + str(get_qe_env()), + f"qe={QE_VERSION}", + ], + capture_output=True, + check=True, + ) + + +def _code_is_setup(name, computer): + try: + load_code(f"{name}-{QE_VERSION}@{computer}") + except NotExistent: + return False + else: + return True + + +def codes_are_setup(computer): + return all(_code_is_setup(code_name, computer) for code_name in CODE_NAMES) + + +def _generate_header_to_setup_code(): + """Generate the header string to setup a code for a given computer.""" + header_code = """ +from aiida.orm.nodes.data.code.installed import InstalledCode +from aiida.orm import load_computer +from aiida import load_profile +load_profile() + +""" + return header_code + + +def _generate_string_to_setup_code(code_name, computer): + """Generate the Python string to setup an AiiDA code for a given computer. + + Tries to load an existing code and if not existent, + generates Python code to create and store a new code setup.""" + try: + load_code(f"{code_name}-{QE_VERSION}@{computer}") + except NotExistent: + label = f"{code_name}-{QE_VERSION}" + description = f"{code_name}.x ({QE_VERSION}) setup by AiiDAlab." + filepath_executable = get_qe_env().joinpath("bin", f"{code_name}.x") + default_calc_job_plugin = f"quantumespresso.{code_name}" + prepend_text = f'eval "$(conda shell.posix hook)"\\nconda activate {get_qe_env()}\\nexport OMP_NUM_THREADS=1' + python_code = """ +computer = load_computer('{}') +code = InstalledCode(computer=computer, + label='{}', + description='{}', + filepath_executable='{}', + default_calc_job_plugin='{}', + prepend_text='{}' + ) + +code.store() +""".format( # noqa: UP032 + computer, + label, + description, + filepath_executable, + default_calc_job_plugin, + prepend_text, + ) + return python_code + else: + # the code already exists + return "" + + +def setup_codes(computer): + python_code = _generate_header_to_setup_code() + for code_name in CODE_NAMES: + python_code += _generate_string_to_setup_code(code_name, computer) + try: + subprocess.run(["python", "-c", python_code], capture_output=True, check=True) + except subprocess.CalledProcessError as err: + raise RuntimeError( + f"Failed to setup codes, exit_code={err.returncode}, {err.stderr}" + ) from None + + +def install_and_setup(computer="localhost", force=False): + """Install Quantum ESPRESSO and the corresponding AiiDA codes. + + Args: + force: Ignore previously failed attempts and install anyways. + computer: computer label in AiiDA where the code is setup for + """ + # Check for "do not install file" and skip actual check. The purpose of + # this file is to not re-try this process on every app start in case that + # there are issues. + # XXX: use filelock to control `FN_DO_NOT_SETUP` as well + if not force and FN_DO_NOT_SETUP.exists(): + raise RuntimeError("Installation failed in previous attempt.") + + yield from _install() + yield from _setup(computer) + + +def _install(): + """Install Quantum ESPRESSO.""" + yield "Checking installation status..." + + conda_installed = which("conda") + try: + with FileLock(FN_INSTALL_LOCKFILE, timeout=5): + if not conda_installed: + raise RuntimeError( + "Unable to automatically install Quantum ESPRESSO, conda " + "is not available." + ) + + if qe_installed(): + return + + # Install Quantum ESPRESSO. + yield "Installing QE..." + try: + install_qe() + except subprocess.CalledProcessError as error: + raise RuntimeError( + f"Failed to create conda environment: {error}" + ) from None + + except Timeout: + # Assume that the installation was triggered by a different process. + yield "Installation was already started, waiting for it to finish..." + with FileLock(FN_INSTALL_LOCKFILE, timeout=120): + if not qe_installed(): + raise RuntimeError( + "Installation process did not finish in the expected time." + ) from None + + +def _setup(computer): + """Setup the corresponding AiiDA codes after QE installation.""" + yield "Checking setup status..." + + try: + with FileLock(FN_SETUP_LOCKFILE, timeout=5): + # We assume that if the codes are already setup, everything is in + # order. Only if they are not present, should we take action, + # however we only do so if the environment has a conda binary + # present (`which conda`). If that is not the case then we assume + # that this is a custom user environment in which case we also take + # no further action. + if codes_are_setup(computer=computer): + return # Already setup + + # After installing QE, we install the corresponding + # AiiDA codes: + python_code = _generate_header_to_setup_code() + for code_name in CODE_NAMES: + if not _code_is_setup(code_name, computer=computer): + yield f"Preparing setup script for ({code_name}) on ({computer})..." + code_string = _generate_string_to_setup_code(code_name, computer) + python_code += code_string + try: + yield "Setting up all codes..." + subprocess.run( + ["python", "-c", python_code], capture_output=True, check=True + ) + except subprocess.CalledProcessError as err: + raise RuntimeError( + f"Failed to setup codes, exit_code={err.returncode}, {err.stderr}" + ) from None + + except Timeout: + # Assume that the installation was triggered by a different process. + yield "Installation was already started, waiting for it to finish..." + with FileLock(FN_SETUP_LOCKFILE, timeout=120): + if not codes_are_setup(computer=computer): + raise RuntimeError( + "Installation process did not finish in the expected time." + ) from None diff --git a/src/aiidalab_qe/setup/pseudos.py b/src/aiidalab_qe/setup/pseudos.py new file mode 100644 index 000000000..2dd0b1323 --- /dev/null +++ b/src/aiidalab_qe/setup/pseudos.py @@ -0,0 +1,225 @@ +from __future__ import annotations + +import os +from collections.abc import Iterable +from dataclasses import dataclass, field +from pathlib import Path +from subprocess import run + +from aiida_pseudo.groups.family import PseudoPotentialFamily +from filelock import FileLock, Timeout + +from aiida.orm import QueryBuilder + +SSSP_VERSION = "1.3" +PSEUDODOJO_VERSION = "0.4" + +EXPECTED_PSEUDOS = { + f"SSSP/{SSSP_VERSION}/PBE/efficiency", + f"SSSP/{SSSP_VERSION}/PBE/precision", + f"SSSP/{SSSP_VERSION}/PBEsol/efficiency", + f"SSSP/{SSSP_VERSION}/PBEsol/precision", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/SR/standard/upf", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBEsol/SR/standard/upf", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/SR/stringent/upf", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBEsol/SR/stringent/upf", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/FR/standard/upf", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBEsol/FR/standard/upf", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/FR/stringent/upf", + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBEsol/FR/stringent/upf", +} + + +FN_LOCKFILE = Path.home().joinpath(".install-sssp.lock") + + +@dataclass +class PseudoFamily: + """The dataclass to deal with pseudo family strings. + + Attributes: + library: the library name of the pseudo family, e.g. SSSP or PseudoDojo. + cmd_library_name: the sub command name used in aiida-pseudo command line. + version: the version of the pseudo family, e.g. 1.2 + functional: the functional of the pseudo family, e.g. PBE, PBEsol. + accuracy: the accuracy of the pseudo family, which is protocol in aiida-pseudo, e.g. efficiency, precision, standard, stringent. + relativistic: the relativistic treatment of the pseudo family, e.g. SR, FR. + file_type: the file type of the pseudo family, e.g. upf, psml, currently only used for PseudoDojo. + """ + + library: str + version: str + functional: str + accuracy: str + cmd_library_name: str = field(init=False) + relativistic: str | None = None + file_type: str | None = None + + def __post_init__(self): + """Post init operations and checks.""" + if self.library == "SSSP": + self.cmd_library_name = "sssp" + elif self.library == "PseudoDojo": + self.cmd_library_name = "pseudo-dojo" + else: + raise ValueError(f"Unknown pseudo library {self.library}") + + @classmethod + def from_string(cls, pseudo_family_string: str) -> PseudoFamily: + """Initialize from a pseudo family string.""" + # We support two pseudo families: SSSP and PseudoDojo + # They are formatted as follows: + # SSSP: SSSP/// + # PseudoDojo: PseudoDojo///// + # where is either 'SR' or 'FR' and is either 'upf' or 'psml' + # Before we unify the format of family strings, the conditions below are necessary + # to distinguish between the two families + library = pseudo_family_string.split("/")[0] + if library == "SSSP": + version, functional, accuracy = pseudo_family_string.split("/")[1:] + relativistic = None + file_type = None + elif library == "PseudoDojo": + ( + version, + functional, + relativistic, + accuracy, + file_type, + ) = pseudo_family_string.split("/")[1:] + else: + raise ValueError( + f"Not able to parse valid library name from {pseudo_family_string}" + ) + + return cls( + library=library, + version=version, + functional=functional, + accuracy=accuracy, + relativistic=relativistic, + file_type=file_type, + ) + + +def pseudos_to_install() -> set[str]: + """Query the database and return the list of pseudopotentials that are not installed.""" + qb = QueryBuilder() + qb.append( + PseudoPotentialFamily, + filters={ + "or": [ + {"label": {"like": "SSSP/%"}}, + {"label": {"like": "PseudoDojo/%"}}, + ] + }, + project="label", + ) + labels = set(qb.all(flat=True)) + return EXPECTED_PSEUDOS - labels + + +def _construct_cmd( + pseudo_family_string: str, download_only: bool = False, cwd: Path | None = None +) -> list: + """Construct the command for installation of pseudopotentials. + + If ``cwd`` is not None, and ``download_only`` is True the, only download the + pseudopotential files to the ``cwd`` folder. + If ``download_only`` is False and ``cwd`` is not None, the the pseudos will be installed from the ``cwd`` where the pseudos are downloaded to. + + NOTE: download_only has nothing to do with cwd, it will not download the pseudos to cwd if cwd is specified. + The control to download to cwd is in the ``_install_pseudos`` function below. + """ + pseudo_family = PseudoFamily.from_string(pseudo_family_string) + + # the library used in command line is lowercase + # e.g. SSSP -> sssp and PseudoDojo -> pseudo-dojo + library = pseudo_family.cmd_library_name + version = pseudo_family.version + functional = pseudo_family.functional + accuracy = pseudo_family.accuracy + cmd = [ + "aiida-pseudo", + "install", + library, + "--functional", + functional, + "--version", + version, + "-p", # p for protocol which is the accuracy of the library + accuracy, + ] + + # extra arguments for PseudoDojo + if library == "pseudo-dojo": + relativistic = pseudo_family.relativistic + file_type = pseudo_family.file_type + cmd.extend( + [ + "--relativistic", + relativistic, + "--pseudo-format", + file_type, + ] + ) + + if download_only: + cmd.append("--download-only") + + # if cwd source folder specified, then install the pseudos from the folder + # download file name is replace `/` with `_` of the pseudo family string with `.aiida_pseudo` extension + if not download_only and cwd is not None: + file_path = cwd / f"{pseudo_family_string.replace('/', '_')}.aiida_pseudo" + if file_path.exists(): + cmd.extend(["--from-download", str(file_path)]) + + return cmd + + +def run_cmd(cmd: list, env: dict | None = None, cwd: Path | None = None): + """Run the command with specific env in the workdir specified.""" + run(cmd, env=env, cwd=cwd, capture_output=True, check=True) + + +def _install_pseudos( + pseudo_families: set[str], download_only: bool = False, cwd: Path | None = None +) -> Iterable[float]: + """Go through the list of pseudo families and install them.""" + env = os.environ.copy() + env["PATH"] = f"{env['PATH']}:{Path.home() / '.local' / 'bin'}" + + mult = 1.0 / len(pseudo_families) + yield mult * 0 + for i, pseudo_family in enumerate(pseudo_families): + cmd = _construct_cmd(pseudo_family, download_only, cwd=cwd) + + run_cmd(cmd, env=env, cwd=cwd) + + yield mult * (i + 1) + + +def install( + download_only: bool = False, cwd: Path | None = None +) -> Iterable[tuple[str, float]]: + yield "Checking installation status...", 0.1 + try: + with FileLock(FN_LOCKFILE, timeout=5): + if len(pseudos := pseudos_to_install()) > 0: + yield "Installing...", 0.1 + for progress in _install_pseudos(pseudos, download_only, cwd): + yield "Installing...", progress + + except Timeout: + # Assume that the installation was triggered by a different process. + from aiidalab_qe.common.widgets import ProgressBar + + yield ( + "Installation was already started elsewhere, waiting for it to finish...", + ProgressBar.AnimationRate(1.0), + ) + with FileLock(FN_LOCKFILE, timeout=300): + if len(pseudos_to_install()) > 0: + raise RuntimeError( + "Installation process did not finish in the expected time." + ) from None diff --git a/src/aiidalab_qe/version.py b/src/aiidalab_qe/version.py index 5cc5e7af7..0207c7e47 100644 --- a/src/aiidalab_qe/version.py +++ b/src/aiidalab_qe/version.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- """This module contains project version information for both the app and the workflow.""" -__version__ = "v23.11.0rc0" +__version__ = "v24.10.0a3" diff --git a/src/aiidalab_qe/workflows/__init__.py b/src/aiidalab_qe/workflows/__init__.py index 75a74029b..6fc798afc 100644 --- a/src/aiidalab_qe/workflows/__init__.py +++ b/src/aiidalab_qe/workflows/__init__.py @@ -1,11 +1,11 @@ # AiiDA imports. +# AiiDA Quantum ESPRESSO plugin inputs. from aiida import orm from aiida.common import AttributeDict from aiida.engine import ToContext, WorkChain, if_ from aiida.plugins import DataFactory - -# AiiDA Quantum ESPRESSO plugin inputs. from aiida_quantumespresso.common.types import ElectronicType, RelaxType, SpinType +from aiida_quantumespresso.data.hubbard_structure import HubbardStructureData from aiida_quantumespresso.utils.mapping import prepare_process_inputs from aiida_quantumespresso.workflows.pw.relax import PwRelaxWorkChain @@ -19,12 +19,20 @@ # because we want to decouple the workflows from the app, so I copied it here # instead of importing it. # load entry points + + def get_entries(entry_point_name="aiidalab_qe.property"): from importlib.metadata import entry_points entries = {} for entry_point in entry_points().get(entry_point_name, []): - entries[entry_point.name] = entry_point.load() + try: + # Attempt to load the entry point + loaded_entry_point = entry_point.load() + entries[entry_point.name] = loaded_entry_point + except Exception as e: + # Handle loading errors + print(f"Failed to load entry point {entry_point.name}: {e}") return entries @@ -111,23 +119,63 @@ def get_builder_from_protocol( parameters = parameters or {} properties = parameters["workchain"].pop("properties", []) codes = parameters.pop("codes", {}) - codes = { - key: orm.load_node(value) - for key, value in codes.items() - if value is not None - } + # load codes from uuid + for _, value in codes.items(): + if value["code"] is not None: + value["code"] = orm.load_node(value["code"]) # update pseudos for kind, uuid in parameters["advanced"]["pw"]["pseudos"].items(): parameters["advanced"]["pw"]["pseudos"][kind] = orm.load_node(uuid) # builder = cls.get_builder() - # Set the structure. - builder.structure = structure + # Set a HubbardStructureData if hubbard_parameters is specified + hubbard_dict = parameters["advanced"].pop("hubbard_parameters", None) + + # Check if hubbard_dict is provided + if hubbard_dict is not None: + hubbard_parameters = hubbard_dict["hubbard_u"] + hubbard_structure = HubbardStructureData.from_structure(structure) + + # Initialize on-site Hubbard values + for key, value in hubbard_parameters.items(): + kind, orbital = key.rsplit(" - ", 1) + hubbard_structure.initialize_onsites_hubbard( + atom_name=kind, + atom_manifold=orbital, + value=value, + hubbard_type="U", + use_kinds=True, + ) + + # Determine whether to store and use hubbard_structure based on conditions + if ( + isinstance(structure, HubbardStructureData) + and hubbard_structure.hubbard == structure.hubbard + ): + # If the structure is HubbardStructureData and hubbard parameters match, assign the original structure + builder.structure = structure + else: + # In all other cases, store and assign hubbard_structure + hubbard_structure.store() + builder.structure = hubbard_structure + + elif isinstance(structure, HubbardStructureData): + # Convert HubbardStructureData to a simple StructureData + temp_structure = structure.get_ase() + new_structure = StructureData(ase=temp_structure) + new_structure.store() + builder.structure = new_structure + else: + builder.structure = structure + # relax - relax_overrides = {"base": parameters["advanced"]} + relax_overrides = { + "base": parameters["advanced"], + "base_final_scf": parameters["advanced"], + } protocol = parameters["workchain"]["protocol"] relax_builder = PwRelaxWorkChain.get_builder_from_protocol( - code=codes.get("pw"), + code=codes.get("pw")["code"], structure=structure, protocol=protocol, relax_type=RelaxType(parameters["workchain"]["relax_type"]), @@ -146,21 +194,22 @@ def get_builder_from_protocol( if properties is None: properties = [] builder.properties = orm.List(list=properties) + # clean workdir + clean_workdir = orm.Bool(parameters["advanced"]["clean_workdir"]) + builder.clean_workdir = clean_workdir # add plugin workchain for name, entry_point in plugin_entries.items(): if name in properties: plugin_builder = entry_point["get_builder"]( - codes, structure, copy.deepcopy(parameters), **kwargs + codes, builder.structure, copy.deepcopy(parameters), **kwargs ) + plugin_workchain = entry_point["workchain"] + if plugin_workchain.spec().has_input("clean_workdir"): + plugin_builder.clean_workdir = clean_workdir setattr(builder, name, plugin_builder) else: builder.pop(name, None) - # XXX (unkcpz) I smell not proper design here since I have to look at - # configuration step to know what show be set here. - clean_workdir = parameters["advanced"]["clean_workdir"] - builder.clean_workdir = orm.Bool(clean_workdir) - return builder def setup(self): @@ -224,7 +273,8 @@ def run_plugin(self): self.exposed_inputs(plugin_workchain, namespace=name) ) inputs.metadata.call_link_label = name - inputs.structure = self.ctx.current_structure + if entry_point.get("update_inputs"): + entry_point["update_inputs"](inputs, self.ctx) inputs = prepare_process_inputs(plugin_workchain, inputs) running = self.submit(plugin_workchain, **inputs) self.report(f"launching plugin {name} <{running.pk}>") diff --git a/start.py b/start.py index 322943c15..dfe23aaa0 100644 --- a/start.py +++ b/start.py @@ -1,9 +1,19 @@ import ipywidgets as ipw -def get_start_widget(appbase, jupbase, notebase): +def get_start_widget(appbase, jupbase, notebase): # noqa: ARG001 return ipw.HTML( f""" + + + + + + +
    Utils
    diff --git a/tests/configuration/test_advanced.py b/tests/configuration/test_advanced.py index a860fe618..0a177460c 100644 --- a/tests/configuration/test_advanced.py +++ b/tests/configuration/test_advanced.py @@ -95,6 +95,43 @@ def test_advanced_kpoints_settings(): assert w.value.get("kpoints_distance") == 0.5 +def test_advanced_molecule_settings(generate_structure_data): + """Test kpoint setting of advanced setting widget.""" + from aiidalab_qe.app.configuration.advanced import AdvancedSettings + + w = AdvancedSettings() + + # Check the disable of is bind to override switch + assert w.kpoints_distance.disabled is True + + w.override.value = True + assert w.kpoints_distance.disabled is False + + # create molecule + structure = generate_structure_data(name="H2O", pbc=(False, False, False)) + # Assign the molecule + w.input_structure = structure + + # Check override can not modify the kpoints_distance + assert w.kpoints_distance.disabled is True + w.override.value = True + assert w.kpoints_distance.disabled is True + + # Confirm the value of kpoints_distance is fixed + assert w.value.get("kpoints_distance") == 100.0 + + w.protocol = "fast" + assert w.value.get("kpoints_distance") == 100.0 + + # # Check reset + w.input_structure = None + w.reset() + + # # the protocol will not be reset + assert w.protocol == "fast" + assert w.value.get("kpoints_distance") == 0.5 + + def test_advanced_tot_charge_settings(): """Test TotCharge widget.""" from aiidalab_qe.app.configuration.advanced import AdvancedSettings @@ -121,7 +158,6 @@ def test_advanced_tot_charge_settings(): def test_advanced_kpoints_mesh(): """Test Mesh Grid HTML widget.""" from aiida import orm - from aiidalab_qe.app.configuration.advanced import AdvancedSettings w = AdvancedSettings() @@ -150,4 +186,59 @@ def test_advanced_kpoints_mesh(): # change protocol w.protocol = "fast" - assert w.mesh_grid.value == "Mesh [6, 6, 6]" + assert w.mesh_grid.value == "Mesh [5, 5, 5]" + + +def test_advanced_hubbard_widget(generate_structure_data): + """Test Hubbard widget.""" + + from aiidalab_qe.app.configuration.advanced import AdvancedSettings + + w = AdvancedSettings() + + structure = generate_structure_data(name="LiCoO2") + + w.input_structure = structure + + # Activate Hubbard U widget + w.hubbard_widget.activate_hubbard.value = True + + assert w.hubbard_widget.input_labels == ["Co - 3d", "O - 2p", "Li - 2s"] + + # Change the value of the Hubbard U for Co, O and Li + w.hubbard_widget.hubbard_widget.children[1].children[0].value = 1 + w.hubbard_widget.hubbard_widget.children[2].children[0].value = 2 + w.hubbard_widget.hubbard_widget.children[3].children[0].value = 3 + + assert w.hubbard_widget.hubbard_dict == { + "hubbard_u": {"Co - 3d": 1.0, "O - 2p": 2.0, "Li - 2s": 3.0} + } + + # Check eigenvalues are empty + assert w.hubbard_widget.eigenvalues_dict == {} + + w.hubbard_widget.eigenvalues_label.value = True + + # Check there is only eigenvalues for Co (Transition metal) + + assert len(w.hubbard_widget.eigen_values_widget.children) == 1 + + # The widget hierarchy for eigenvalues: + # - w.hubbard_widget.eigen_values_widget.children[0]: List of eigenvalues for Co + # - w.hubbard_widget.eigen_values_widget.children[0].children[1]: Widgets for up and down spin + # - w.hubbard_widget.eigen_values_widget.children[0].children[1].children[0]: Widget for up spin + # - w.hubbard_widget.eigen_values_widget.children[0].children[1].children[0].children[1]: Widget for eigenvalue 1 (3d range: 1 to 5) + + w.hubbard_widget.eigen_values_widget.children[0].children[1].children[0].children[ + 1 + ].value = "1" + w.hubbard_widget.eigen_values_widget.children[0].children[1].children[0].children[ + 3 + ].value = "1" + w.hubbard_widget.eigen_values_widget.children[0].children[1].children[0].children[ + 5 + ].value = "1" + + assert w.hubbard_widget.eigenvalues_dict == { + "starting_ns_eigenvalue": [[1, 1, "Co", 1], [3, 1, "Co", 1], [5, 1, "Co", 1]] + } diff --git a/tests/conftest.py b/tests/conftest.py index d73f258fd..c05067b76 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,9 @@ import tempfile import pytest + from aiida import orm +from aiidalab_qe.setup.pseudos import SSSP_VERSION pytest_plugins = ["aiida.manage.tests.pytest_fixtures"] @@ -69,7 +71,50 @@ def _generate_structure_data(name="silicon", pbc=(True, True, True)): structure.append_atom(position=(1.28, 1.28, 0.0), symbols="O") structure.append_atom(position=(2.9, 2.9, 0.0), symbols="O") structure.append_atom(position=(0.81, 3.37, 1.33), symbols="O") + + elif name == "LiCoO2": + a, b, c, d = ( + 1.4060463552647, + 0.81178124180108, + 4.6012019181836, + 1.6235624832021, + ) + cell = [[a, -b, c], [0.0, d, c], [-a, -b, c]] + sites = [ + ["Co", "Co", (0, 0, 0)], + ["O", "O", (0, 0, 3.6020728736387)], + ["O", "O", (0, 0, 10.201532881212)], + ["Li", "Li", (0, 0, 6.9018028772754)], + ] + structure = orm.StructureData(cell=cell) + + for site in sites: + structure.append_atom(position=site[2], symbols=site[0], name=site[1]) + + elif name == "MoS2": + cell = [[3.1922, 0, 0], [-1.5961, 2.7646, 0], [0, 0, 13.3783]] + structure = orm.StructureData(cell=cell) + structure.append_atom(position=(-0.0, 1.84, 10.03), symbols="Mo") + structure.append_atom(position=(1.6, 0.92, 8.47), symbols="S") + structure.append_atom(position=(1.6, 0.92, 11.6), symbols="S") + + elif name == "H2O": + cell = [[10.0, 0.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 10.0]] + structure = orm.StructureData(cell=cell) + structure.append_atom(position=(0.0, 0.0, 0.0), symbols="H") + structure.append_atom(position=(0.0, 0.0, 1.0), symbols="O") + structure.append_atom(position=(0.0, 1.0, 0.0), symbols="H") + + elif name == "H2O-larger": + # just a larger supercell. To test the warning messages + cell = [[20.0, 0.0, 0.0], [0.0, 20.0, 0.0], [0.0, 0.0, 20.0]] + structure = orm.StructureData(cell=cell) + structure.append_atom(position=(0.0, 0.0, 0.0), symbols="H") + structure.append_atom(position=(0.0, 0.0, 1.0), symbols="O") + structure.append_atom(position=(0.0, 1.0, 0.0), symbols="H") + structure.pbc = pbc + return structure return _generate_structure_data @@ -85,10 +130,6 @@ def _generate_xy_data(xvals=None, yvals=None, xlabel=None, ylabel=None): """ from aiida.orm import XyData - xvals = xvals - yvals = yvals - xlabel = xlabel - ylabel = ylabel xunits = "n/a" yunits = ["n/a"] * len(ylabel) @@ -108,6 +149,7 @@ def generate_bands_data(): def _generate_bands_data(): """Return a `BandsData` instance with some basic `kpoints` and `bands` arrays.""" import numpy as np + from aiida.plugins import DataFactory BandsData = DataFactory("core.array.bands") @@ -131,6 +173,7 @@ def generate_projection_data(generate_bands_data): def _generate_projection_data(): """Return an ``ProjectionData`` node.""" import numpy as np + from aiida.plugins import DataFactory, OrbitalFactory ProjectionData = DataFactory("core.array.projection") @@ -175,33 +218,34 @@ def sssp(aiida_profile, generate_upf_data): cutoffs = {} stringency = "standard" - with tempfile.TemporaryDirectory() as dirpath: + actinides = ( + "Ac", + "Th", + "Pa", + "U", + "Np", + "Pu", + "Am", + "Cm", + "Bk", + "Cf", + "Es", + "Fm", + "Md", + "No", + "Lr", + ) + + with tempfile.TemporaryDirectory() as d: + dirpath = pathlib.Path(d) + for values in elements.values(): element = values["symbol"] - actinides = ( - "Ac", - "Th", - "Pa", - "U", - "Np", - "Pu", - "Am", - "Cm", - "Bk", - "Cf", - "Es", - "Fm", - "Md", - "No", - "Lr", - ) - if element in actinides: continue upf = generate_upf_data(element) - dirpath = pathlib.Path(dirpath) filename = dirpath / f"{element}.upf" with open(filename, "w+b") as handle: @@ -214,7 +258,7 @@ def sssp(aiida_profile, generate_upf_data): "cutoff_rho": 240.0, } - label = "SSSP/1.2/PBEsol/efficiency" + label = f"SSSP/{SSSP_VERSION}/PBEsol/efficiency" family = SsspFamily.create_from_folder(dirpath, label) family.set_cutoffs(cutoffs, stringency, unit="Ry") @@ -268,6 +312,16 @@ def projwfc_code(aiida_local_code_factory): ) +@pytest.fixture +def projwfc_bands_code(aiida_local_code_factory): + """Return a `Code` configured for the projwfc.x executable.""" + return aiida_local_code_factory( + label="projwfc_bands", + executable="bash", + entry_point="quantumespresso.projwfc", + ) + + @pytest.fixture() def workchain_settings_generator(): """Return a function that generates a workchain settings dictionary.""" @@ -295,7 +349,7 @@ def _smearing_settings_generator(**kwargs): @pytest.fixture -def app(pw_code, dos_code, projwfc_code): +def app(pw_code, dos_code, projwfc_code, projwfc_bands_code): from aiidalab_qe.app.main import App # Since we use `qe_auto_setup=False`, which will skip the pseudo library installation @@ -305,13 +359,15 @@ def app(pw_code, dos_code, projwfc_code): app.submit_step.sssp_installation_status.installed = True # set up codes - app.submit_step.pw_code.refresh() - app.submit_step.codes["dos"].refresh() - app.submit_step.codes["projwfc"].refresh() + app.submit_step.pw_code.code_selection.refresh() + app.submit_step.codes["dos"].code_selection.refresh() + app.submit_step.codes["projwfc"].code_selection.refresh() + app.submit_step.codes["projwfc_bands"].code_selection.refresh() app.submit_step.pw_code.value = pw_code.uuid app.submit_step.codes["dos"].value = dos_code.uuid app.submit_step.codes["projwfc"].value = projwfc_code.uuid + app.submit_step.codes["projwfc_bands"].value = projwfc_bands_code.uuid yield app @@ -334,7 +390,9 @@ def _submit_app_generator( smearing="methfessel-paxton", degauss=0.015, tot_charge=0.0, + vdw_corr="none", initial_magnetic_moments=0.0, + electron_maxstep=80, ): configure_step = app.configure_step # Settings @@ -352,10 +410,12 @@ def _submit_app_generator( # Advanced settings configure_step.advanced_settings.override.value = True configure_step.advanced_settings.total_charge.value = tot_charge + configure_step.advanced_settings.van_der_waals.value = vdw_corr configure_step.advanced_settings.kpoints_distance.value = kpoints_distance configure_step.advanced_settings.magnetization._set_magnetization_values( initial_magnetic_moments ) + configure_step.advanced_settings.electron_maxstep.value = electron_maxstep # mimic the behavior of the smearing widget set up configure_step.advanced_settings.smearing.smearing.value = smearing configure_step.advanced_settings.smearing.degauss.value = degauss @@ -363,7 +423,7 @@ def _submit_app_generator( # submit_step = app.submit_step submit_step.input_structure = generate_structure_data() - submit_step.resources_config.num_cpus.value = 2 + submit_step.pw_code.num_cpus.value = 2 return app @@ -419,15 +479,26 @@ def generate_pdos_workchain( def _generate_pdos_workchain(structure, spin_type="none"): import numpy as np + from aiida import engine from aiida.orm import Dict, FolderData, RemoteData from aiida_quantumespresso.workflows.pdos import PdosWorkChain + pseudo_family = f"SSSP/{SSSP_VERSION}/PBEsol/efficiency" + inputs = { "pw_code": fixture_code("quantumespresso.pw"), "dos_code": fixture_code("quantumespresso.dos"), "projwfc_code": fixture_code("quantumespresso.projwfc"), "structure": structure, + "overrides": { + "scf": { + "pseudo_family": pseudo_family, + }, + "nscf": { + "pseudo_family": pseudo_family, + }, + }, } builder = PdosWorkChain.get_builder_from_protocol(**inputs) inputs = builder._inputs() @@ -515,39 +586,66 @@ def _generate_pdos_workchain(structure, spin_type="none"): @pytest.fixture def generate_bands_workchain( - fixture_localhost, fixture_code, - generate_xy_data, generate_bands_data, generate_workchain, ): """Generate an instance of a the WorkChain.""" def _generate_bands_workchain(structure): - from copy import deepcopy - from aiida import engine from aiida.orm import Dict - from aiida_quantumespresso.workflows.pw.bands import PwBandsWorkChain + from aiidalab_qe.plugins.bands.bands_workchain import BandsWorkChain + + pseudo_family = f"SSSP/{SSSP_VERSION}/PBEsol/efficiency" inputs = { - "code": fixture_code("quantumespresso.pw"), + "pw_code": fixture_code("quantumespresso.pw"), + "projwfc_code": fixture_code("quantumespresso.projwfc"), "structure": structure, + "simulation_mode": "normal", + "overrides": { + "scf": { + "pseudo_family": pseudo_family, + }, + "bands": { + "pseudo_family": pseudo_family, + }, + "relax": { + "base": { + "pseudo_family": pseudo_family, + }, + "base_final_scf": { + "pseudo_family": pseudo_family, + }, + }, + }, } - builder = PwBandsWorkChain.get_builder_from_protocol(**inputs) + builder = BandsWorkChain.get_builder_from_protocol(**inputs) inputs = builder._inputs() - inputs["relax"]["base_final_scf"] = deepcopy(inputs["relax"]["base"]) - wkchain = generate_workchain(PwBandsWorkChain, inputs) + wkchain = generate_workchain(BandsWorkChain, inputs) wkchain.setup() # run bands and return the process - output_parameters = Dict(dict={"fermi_energy": 2.0}) - output_parameters.store() - wkchain.out("scf_parameters", output_parameters) - wkchain.out("band_parameters", output_parameters) + fermi_dict = Dict(dict={"fermi_energy": 2.0}) + fermi_dict.store() + output_parameters = { + "bands": { + "scf_parameters": fermi_dict, + "band_parameters": fermi_dict, + } + } + + wkchain.out( + "bands.scf_parameters", output_parameters["bands"]["scf_parameters"] + ) + wkchain.out( + "bands.band_parameters", output_parameters["bands"]["band_parameters"] + ) + # band_structure = generate_bands_data() band_structure.store() - wkchain.out("band_structure", band_structure) + wkchain.out("bands.band_structure", band_structure) wkchain.update_outputs() # bands_node = wkchain.node @@ -564,6 +662,7 @@ def generate_qeapp_workchain( generate_workchain, generate_pdos_workchain, generate_bands_workchain, + fixture_code, ): """Generate an instance of the WorkChain.""" @@ -573,12 +672,17 @@ def _generate_qeapp_workchain( run_bands=True, run_pdos=True, spin_type="none", + electronic_type="metal", + magnetization_type="starting_magnetization", # Options: "starting_magnetization", "tot_magnetization" initial_magnetic_moments=0.0, + tot_magnetization=0.0, ): from copy import deepcopy + from aiida.orm import Dict from aiida.orm.utils.serialize import serialize - + from aiidalab_qe.app.configuration import ConfigureQeAppWorkChainStep + from aiidalab_qe.app.submission import SubmitQeAppWorkChainStep from aiidalab_qe.workflows import QeAppWorkChain # Step 1: select structure from example @@ -595,7 +699,7 @@ def _generate_qeapp_workchain( s1.confirm() structure = s1.confirmed_structure # step 2 configure - s2 = app.configure_step + s2: ConfigureQeAppWorkChainStep = app.configure_step s2.workchain_settings.relax_type.value = relax_type # In order to parepare a complete inputs, I set all the properties to true # this can be overrided later @@ -603,20 +707,55 @@ def _generate_qeapp_workchain( s2.workchain_settings.properties["pdos"].run.value = run_pdos s2.workchain_settings.workchain_protocol.value = "fast" s2.workchain_settings.spin_type.value = spin_type - s2.advanced_settings.magnetization._set_magnetization_values( - initial_magnetic_moments - ) + s2.workchain_settings.electronic_type.value = electronic_type + if spin_type == "collinear": + s2.advanced_settings.override.value = True + magnetization_values = ( + initial_magnetic_moments + if magnetization_type == "starting_magnetization" + else tot_magnetization + ) + s2.advanced_settings.magnetization._set_tot_magnetization( + tot_magnetization + ) if electronic_type == "insulator" else s2.advanced_settings.magnetization._set_magnetization_values( + magnetization_values + ) + s2.confirm() # step 3 setup code and resources - s3 = app.submit_step - s3.resources_config.num_cpus.value = 4 + s3: SubmitQeAppWorkChainStep = app.submit_step + s3.pw_code.num_cpus.value = 4 + builder = s3._create_builder() inputs = builder._inputs() inputs["relax"]["base_final_scf"] = deepcopy(inputs["relax"]["base"]) + + # Setting up inputs for bands_projwfc + inputs["bands"]["bands_projwfc"]["scf"]["pw"] = deepcopy( + inputs["bands"]["bands"]["scf"]["pw"] + ) + inputs["bands"]["bands_projwfc"]["bands"]["pw"] = deepcopy( + inputs["bands"]["bands"]["bands"]["pw"] + ) + inputs["bands"]["bands_projwfc"]["bands"]["pw"]["code"] = inputs["bands"][ + "bands" + ]["bands"]["pw"]["code"] + inputs["bands"]["bands_projwfc"]["scf"]["pw"]["code"] = inputs["bands"][ + "bands" + ]["scf"]["pw"]["code"] + + inputs["bands"]["bands_projwfc"]["projwfc"]["projwfc"]["code"] = fixture_code( + "quantumespresso.projwfc" + ) + inputs["bands"]["bands_projwfc"]["projwfc"]["projwfc"]["parameters"] = Dict( + {"PROJWFC": {"DeltaE": 0.01}} + ).store() + if run_bands: inputs["properties"].append("bands") if run_pdos: inputs["properties"].append("pdos") + wkchain = generate_workchain(QeAppWorkChain, inputs) wkchain.setup() # mock output @@ -630,11 +769,11 @@ def _generate_qeapp_workchain( wkchain.exposed_outputs(pdos.node, PdosWorkChain, namespace="pdos") ) if run_bands: - from aiida_quantumespresso.workflows.pw.bands import PwBandsWorkChain + from aiidalab_qe.plugins.bands.bands_workchain import BandsWorkChain bands = generate_bands_workchain(structure) wkchain.out_many( - wkchain.exposed_outputs(bands.node, PwBandsWorkChain, namespace="bands") + wkchain.exposed_outputs(bands.node, BandsWorkChain, namespace="bands") ) wkchain.update_outputs() # set ui_parameters diff --git a/tests/test_app.py b/tests/test_app.py index 65bfac6c2..79c586b8f 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -12,7 +12,7 @@ def test_reload_and_reset(submit_app_generator, generate_qeapp_workchain): ) app = submit_app_generator() # select the pk - app.work_chain_selector.value = wkchain.node.pk + app.process = wkchain.node.pk # check if the value are reload correctly assert app.configure_step.workchain_settings.relax_type.value == "positions" assert app.configure_step.workchain_settings.spin_type.value == "collinear" @@ -24,8 +24,13 @@ def test_reload_and_reset(submit_app_generator, generate_qeapp_workchain): ) > 0 ) + assert app.configure_step.state == app.configure_step.State.SUCCESS + # in the reload case, go to the submit step should not + # trigger the reset of previous steps + app._wizard_app_widget.selected_index = 2 + assert app.configure_step.state == app.configure_step.State.SUCCESS # new workflow, this will reset the GUI - app.work_chain_selector.value = None + app.process = None # check if the value are reload correctly assert app.structure_step.manager.structure is None assert app.configure_step.workchain_settings.relax_type.value == "positions_cell" @@ -39,7 +44,7 @@ def test_reload_and_reset(submit_app_generator, generate_qeapp_workchain): ) == 0 ) - assert app.submit_step.resources_config.num_cpus.value == 1 + assert app.submit_step.pw_code.num_cpus.value == 4 def test_select_new_structure(app_to_submit, generate_structure_data): diff --git a/tests/test_cli.py b/tests/test_cli.py index 53c334307..93cb52229 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,8 +1,8 @@ import time -import aiida from click.testing import CliRunner, Result +import aiida import aiidalab_qe.__main__ as cli # To learn more about testing click applications, see: https://click.palletsprojects.com/en/8.1.x/testing/ @@ -24,7 +24,7 @@ def test_download_and_install_pseudos(tmp_path, aiida_profile, monkeypatch): Note: this test is slow, it takes about ~30 seconds to run. """ - from aiidalab_qe.common.setup_pseudos import ( + from aiidalab_qe.setup.pseudos import ( PSEUDODOJO_VERSION, SSSP_VERSION, pseudos_to_install, @@ -39,7 +39,7 @@ def test_download_and_install_pseudos(tmp_path, aiida_profile, monkeypatch): } # mock the EXPECTED_PSEUDOS monkeypatch.setattr( - "aiidalab_qe.common.setup_pseudos.EXPECTED_PSEUDOS", + "aiidalab_qe.setup.pseudos.EXPECTED_PSEUDOS", MOCK_EXPECTED_PSEUDOS, ) diff --git a/tests/test_codes.py b/tests/test_codes.py index 37f46dd4c..535d94526 100644 --- a/tests/test_codes.py +++ b/tests/test_codes.py @@ -59,3 +59,23 @@ def test_identify_submission_blockers(app): submit.codes["dos"].value = dos_value blockers = list(submit._identify_submission_blockers()) assert len(blockers) == 0 + + +def test_qeapp_computational_resources_widget(): + """Test QEAppComputationalResourcesWidget.""" + from aiidalab_qe.app.submission import SubmitQeAppWorkChainStep + + new_submit_step = SubmitQeAppWorkChainStep(qe_auto_setup=False) + assert new_submit_step.codes["pw"].parallelization.npool.layout.display == "none" + new_submit_step.codes["pw"].parallelization.override.value = True + new_submit_step.codes["pw"].parallelization.npool.value = 2 + assert new_submit_step.codes["pw"].parallelization.npool.layout.display == "block" + assert new_submit_step.codes["pw"].parameters == { + "code": None, + "cpus": 1, + "cpus_per_task": 1, + "max_wallclock_seconds": 43200, + "nodes": 1, + "ntasks_per_node": 1, + "parallelization": {"npool": 2}, + } diff --git a/tests/test_configure.py b/tests/test_configure.py index 3727280cf..6274e0df2 100644 --- a/tests/test_configure.py +++ b/tests/test_configure.py @@ -1,3 +1,6 @@ +from aiidalab_qe.setup.pseudos import PSEUDODOJO_VERSION, SSSP_VERSION + + def test_protocol(): """Test the protocol. The protocol from workchain_settings will trigger the @@ -31,12 +34,14 @@ def test_set_configuration_parameters(): wg = ConfigureQeAppWorkChainStep() parameters = wg.get_configuration_parameters() parameters["workchain"]["relax_type"] = "positions" - parameters["advanced"]["pseudo_family"] = "SSSP/1.2/PBE/efficiency" + parameters["advanced"]["pseudo_family"] = f"SSSP/{SSSP_VERSION}/PBE/efficiency" wg.set_configuration_parameters(parameters) new_parameters = wg.get_configuration_parameters() assert parameters == new_parameters # test pseudodojo - parameters["advanced"]["pseudo_family"] = "PseudoDojo/0.4/PBEsol/SR/standard/upf" + parameters["advanced"]["pseudo_family"] = ( + f"PseudoDojo/{PSEUDODOJO_VERSION}/PBEsol/SR/standard/upf" + ) wg.set_configuration_parameters(parameters) new_parameters = wg.get_configuration_parameters() assert parameters == new_parameters @@ -54,3 +59,25 @@ def test_panel(): assert len(wg.tab.children) == 3 parameters = wg.get_configuration_parameters() assert "bands" in parameters + + +def test_reminder_info(): + """Dynamic add/remove the reminder text based on the workchain settings.""" + from aiidalab_qe.app.configuration import ConfigureQeAppWorkChainStep + + wg = ConfigureQeAppWorkChainStep() + assert wg.workchain_settings.reminder_info["bands"].value == "" + # select bands + wg.workchain_settings.properties["bands"].run.value = True + for name in wg.workchain_settings.reminder_info: + if name == "bands": + assert ( + wg.workchain_settings.reminder_info["bands"].value + == "Customize bands settings in the panel above if needed." + ) + else: + # all other reminder texts should be empty + assert wg.workchain_settings.reminder_info[name].value == "" + # unselect bands + wg.workchain_settings.properties["bands"].run.value = False + assert wg.workchain_settings.reminder_info["bands"].value == "" diff --git a/tests/test_infobox.py b/tests/test_infobox.py new file mode 100644 index 000000000..892335da9 --- /dev/null +++ b/tests/test_infobox.py @@ -0,0 +1,16 @@ +from aiidalab_qe.common.infobox import InfoBox + + +def test_infobox_classes(): + """Test `InfoBox` classes.""" + custom_classes = ["custom-1", "custom-2 custom-3"] + infobox = InfoBox(classes=custom_classes) + assert all( + css_class in infobox._dom_classes + for css_class in ( + "info-box", + "custom-1", + "custom-2", + "custom-3", + ) + ) diff --git a/tests/test_plugins_bands.py b/tests/test_plugins_bands.py index a819f1ea8..00e7a75c8 100644 --- a/tests/test_plugins_bands.py +++ b/tests/test_plugins_bands.py @@ -3,32 +3,58 @@ @pytest.mark.usefixtures("sssp") def test_result(generate_qeapp_workchain): - from widget_bandsplot import BandsPlotWidget + import plotly.graph_objects as go - from aiidalab_qe.plugins.bands.result import Result, export_bands_data + from aiidalab_qe.common.bandpdoswidget import BandPdosWidget + from aiidalab_qe.plugins.bands.result import Result wkchain = generate_qeapp_workchain() - data = export_bands_data(wkchain.node.outputs.bands) - assert data is not None # generate structure for scf calculation result = Result(wkchain.node) result._update_view() - assert isinstance(result.children[0], BandsPlotWidget) + assert isinstance(result.children[0], BandPdosWidget) + assert isinstance(result.children[0].bandsplot_widget, go.FigureWidget) + + # Check if data is correct + assert result.children[0].bands_data is not None + assert result.children[0].bands_data["pathlabels"] is not None + assert result.children[0].pdos_data is None + + # Check Bands axis + assert result.children[0].bandsplot_widget.layout.xaxis.title.text == "k-points" + assert ( + result.children[0].bandsplot_widget.layout.yaxis.title.text + == "Electronic Bands (eV)" + ) + assert isinstance( + result.children[0].bandsplot_widget.layout.xaxis.rangeslider, + go.layout.xaxis.Rangeslider, + ) + assert result.children[0].bands_data["pathlabels"][0] == list( + result.children[0].bandsplot_widget.layout.xaxis.ticktext + ) @pytest.mark.usefixtures("sssp") def test_structure_1d(generate_qeapp_workchain, generate_structure_data): structure = generate_structure_data("silicon", pbc=(True, False, False)) wkchain = generate_qeapp_workchain(structure=structure) - assert "bands_kpoints_distance" not in wkchain.inputs.bands - assert "bands_kpoints" in wkchain.inputs.bands - assert len(wkchain.inputs.bands.bands_kpoints.labels) == 2 + assert "bands_kpoints_distance" not in wkchain.inputs.bands.bands + assert "bands_kpoints" in wkchain.inputs.bands.bands + assert len(wkchain.inputs.bands.bands.bands_kpoints.labels) == 2 + assert wkchain.inputs.bands.bands.bands_kpoints.labels == [(0, "Ξ“"), (9, "X")] @pytest.mark.usefixtures("sssp") def test_structure_2d(generate_qeapp_workchain, generate_structure_data): - structure = generate_structure_data("silicon", pbc=(True, True, False)) + structure = generate_structure_data("MoS2", pbc=(True, True, False)) wkchain = generate_qeapp_workchain(structure=structure) - assert "bands_kpoints_distance" not in wkchain.inputs.bands - assert "bands_kpoints" in wkchain.inputs.bands - assert len(wkchain.inputs.bands.bands_kpoints.labels) == 4 + assert "bands_kpoints_distance" not in wkchain.inputs.bands.bands + assert "bands_kpoints" in wkchain.inputs.bands.bands + assert len(wkchain.inputs.bands.bands.bands_kpoints.labels) == 4 + assert wkchain.inputs.bands.bands.bands_kpoints.labels == [ + (0, "Ξ“"), + (11, "M"), + (18, "K"), + (31, "Ξ“"), + ] diff --git a/tests/test_plugins_electronic_structure.py b/tests/test_plugins_electronic_structure.py index c9c107f43..f25351563 100644 --- a/tests/test_plugins_electronic_structure.py +++ b/tests/test_plugins_electronic_structure.py @@ -1,20 +1,57 @@ def test_electronic_structure(generate_qeapp_workchain): """Test the electronic structure tab.""" - from aiida import engine + import time + + import plotly.graph_objects as go + from aiida import engine from aiidalab_qe.app.result.workchain_viewer import WorkChainViewer + from aiidalab_qe.common.bandpdoswidget import BandPdosWidget + from aiidalab_qe.plugins.electronic_structure.result import Result wkchain = generate_qeapp_workchain() wkchain.node.set_exit_status(0) wkchain.node.set_process_state(engine.ProcessState.FINISHED) + wkchain.node.seal() wcv = WorkChainViewer(wkchain.node) + # wait for the tabs to be updated by the process monitor + time.sleep(3) # find the tab with the identifier "electronic_structure" # the built-in summary and structure tabs is not a plugin panel, # thus don't have identifiers - tab = [ + tab = next( tab for tab in wcv.result_tabs.children if getattr(tab, "identifier", "") == "electronic_structure" - ][0] - # It should have two children: settings and the _bands_plot_view - assert len(tab.children) == 2 + ) + # It should have one children: the _bands_plot_view + assert len(tab.children) == 1 + + result = Result(node=wkchain.node) + result._update_view() + + assert isinstance(result.children[0], BandPdosWidget) + assert isinstance(result.children[0].bandsplot_widget, go.FigureWidget) + + # Check if data is correct + assert result.children[0].bands_data is not None + assert result.children[0].bands_data["pathlabels"] is not None + assert result.children[0].pdos_data is not None + + # Check Bands axis + assert result.children[0].bandsplot_widget.layout.xaxis.title.text == "k-points" + assert ( + result.children[0].bandsplot_widget.layout.xaxis2.title.text + == "Density of states" + ) + assert ( + result.children[0].bandsplot_widget.layout.yaxis.title.text + == "Electronic Bands (eV)" + ) + assert isinstance( + result.children[0].bandsplot_widget.layout.xaxis.rangeslider, + go.layout.xaxis.Rangeslider, + ) + assert result.children[0].bands_data["pathlabels"][0] == list( + result.children[0].bandsplot_widget.layout.xaxis.ticktext + ) diff --git a/tests/test_plugins_pdos.py b/tests/test_plugins_pdos.py index b625143a9..3fda29b76 100644 --- a/tests/test_plugins_pdos.py +++ b/tests/test_plugins_pdos.py @@ -3,38 +3,27 @@ @pytest.mark.usefixtures("sssp") def test_result(generate_qeapp_workchain): - from aiidalab_qe.plugins.pdos.result import Result, export_pdos_data + import plotly.graph_objects as go - wkchain = generate_qeapp_workchain() - data = export_pdos_data(wkchain.node.outputs.pdos) - assert data is not None - # generate structure for scf calculation - result = Result(node=wkchain.node) - result._update_view() - assert len(result.children) == 2 - - -@pytest.mark.usefixtures("sssp") -def test_result_spin(generate_qeapp_workchain): - from aiidalab_qe.plugins.pdos.result import Result, export_pdos_data + from aiidalab_qe.common.bandpdoswidget import BandPdosWidget + from aiidalab_qe.plugins.pdos.result import Result - wkchain = generate_qeapp_workchain(spin_type="collinear") - data = export_pdos_data(wkchain.node.outputs.pdos) - assert data is not None + wkchain = generate_qeapp_workchain() # generate structure for scf calculation result = Result(node=wkchain.node) result._update_view() - assert len(result.children) == 2 + assert isinstance(result.children[0], BandPdosWidget) + assert isinstance(result.children[0].bandsplot_widget, go.FigureWidget) + # Check if data is correct + assert result.children[0].bands_data is None + assert result.children[0].pdos_data is not None -@pytest.mark.usefixtures("sssp") -def test_result_group_by(generate_qeapp_workchain): - from aiidalab_qe.plugins.pdos.result import Result, export_pdos_data + # Check PDOS settings is not None - wkchain = generate_qeapp_workchain() - data = export_pdos_data(wkchain.node.outputs.pdos) - assert data is not None - # generate structure for scf calculation - result = Result(node=wkchain.node) - result._update_view() - result.children[0].children[0].children[1].value = "angular" + # Check Bands axis + assert ( + result.children[0].bandsplot_widget.layout.xaxis.title.text + == "Density of states (eV)" + ) + assert result.children[0].bandsplot_widget.layout.yaxis.title.text is None diff --git a/tests/test_plugins_xas.py b/tests/test_plugins_xas.py new file mode 100644 index 000000000..4b45fc43a --- /dev/null +++ b/tests/test_plugins_xas.py @@ -0,0 +1,37 @@ +import pytest + + +@pytest.mark.usefixtures("sssp") +def test_settings(submit_app_generator): + """Test the settings of the xas app.""" + app = submit_app_generator(properties=["xas"]) + configure_step = app.configure_step + # test get_panel_value + # select the first elmement + configure_step.settings["xas"].element_and_ch_treatment.children[0].children[ + 0 + ].value = True + configure_step.settings["xas"].supercell_min_parameter.value = 4.0 + parameters = configure_step.settings["xas"].get_panel_value() + assert parameters["core_hole_treatments"] == {"Si": "full"} + assert parameters["pseudo_labels"] == { + "Si": { + "gipaw": "Si.pbe-van_gipaw.UPF", + "core_hole": "Si.star1s-pbe-van_gipaw.UPF", + } + } + assert parameters["core_wfc_data_labels"] == {"Si": "Si.pbe-van_gipaw.dat"} + assert parameters["supercell_min_parameter"] == 4.0 + # test set_panel_value + # update the parameters + parameters["supercell_min_parameter"] = 5.0 + parameters["core_hole_treatments"] = {"Si": "xch_smear"} + configure_step.settings["xas"].set_panel_value(parameters) + assert configure_step.settings["xas"].supercell_min_parameter.value == 5.0 + assert ( + configure_step.settings["xas"] + .element_and_ch_treatment.children[0] + .children[1] + .value + == "xch_smear" + ) diff --git a/tests/test_plugins_xps.py b/tests/test_plugins_xps.py new file mode 100644 index 000000000..91e0633a5 --- /dev/null +++ b/tests/test_plugins_xps.py @@ -0,0 +1,38 @@ +import pytest + + +@pytest.mark.usefixtures("sssp") +def test_settings(): + """Test the settings of the xps app.""" + + from ase.build import molecule + + from aiida.orm import StructureData + from aiidalab_qe.app.configuration import ConfigureQeAppWorkChainStep + + configure_step = ConfigureQeAppWorkChainStep() + # set the input structure + h2o = molecule("H2O") + h2o.center(vacuum=3.0) + structure = StructureData(ase=h2o) + configure_step.input_structure = structure + # select xps + configure_step.workchain_settings.properties["xps"].run.value = True + # test get_panel_value + configure_step.settings["xps"].structure_type.value = "molecule" + # select the first elmement, which is O_1s + configure_step.settings["xps"].core_level_list.children[0].value = True + parameters = configure_step.settings["xps"].get_panel_value() + print("parameters", parameters) + assert parameters["structure_type"] == "molecule" + assert parameters["core_level_list"] == ["O_1s"] + assert ( + "not supported" + in configure_step.settings["xps"].core_level_list.children[1].description + ) + # set the parameters + configure_step.settings["xps"].structure_type.value = "crystal" + configure_step.settings["xps"].core_level_list.children[0].value = False + configure_step.settings["xps"].set_panel_value(parameters) + assert configure_step.settings["xps"].core_level_list.children[0].value is True + assert configure_step.settings["xps"].structure_type.value == "molecule" diff --git a/tests/test_pseudo.py b/tests/test_pseudo.py index a6628da64..ce2481928 100644 --- a/tests/test_pseudo.py +++ b/tests/test_pseudo.py @@ -1,7 +1,7 @@ import pytest -from aiida import orm -from aiidalab_qe.common.setup_pseudos import ( +from aiida import orm +from aiidalab_qe.setup.pseudos import ( PSEUDODOJO_VERSION, SSSP_VERSION, _construct_cmd, @@ -11,7 +11,7 @@ def test_setup_pseudos_cmd(tmp_path): - """Test _construct_cmd function in setup_pseudos.py.""" + """Test _construct_cmd function in setup.pseudos""" # SSSP family pseudo_family = f"SSSP/{SSSP_VERSION}/PBE/efficiency" @@ -75,7 +75,10 @@ def test_setup_pseudos_cmd(tmp_path): assert "--from-download" not in cmd # mock the source file - source_file = tmp_path / "PseudoDojo_0.4_PBEsol_SR_standard_upf.aiida_pseudo" + source_file = ( + tmp_path + / f"PseudoDojo_{PSEUDODOJO_VERSION}_PBEsol_SR_standard_upf.aiida_pseudo" + ) source_file.touch() cmd = _construct_cmd(pseudo_family, cwd=tmp_path) assert cmd == [ @@ -85,7 +88,7 @@ def test_setup_pseudos_cmd(tmp_path): "--functional", "PBEsol", "--version", - "0.4", + f"{PSEUDODOJO_VERSION}", "-p", "standard", "--relativistic", @@ -93,7 +96,7 @@ def test_setup_pseudos_cmd(tmp_path): "--pseudo-format", "upf", "--from-download", - f"{str(tmp_path)}/PseudoDojo_0.4_PBEsol_SR_standard_upf.aiida_pseudo", + f"{tmp_path!s}/PseudoDojo_{PSEUDODOJO_VERSION}_PBEsol_SR_standard_upf.aiida_pseudo", ] @@ -101,39 +104,39 @@ def test_setup_pseudos_cmd(tmp_path): def test_pseudos_installation(): """Test install_pseudos""" # Test by compare the pseudos_to_install before and after the installation - assert len(pseudos_to_install()) == 8 + assert len(pseudos_to_install()) == 12 EXPECTED_PSEUDOS = { f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/SR/standard/upf", f"SSSP/{SSSP_VERSION}/PBE/efficiency", } # Install the pseudos - [_ for _ in _install_pseudos(EXPECTED_PSEUDOS)] + list(_install_pseudos(EXPECTED_PSEUDOS)) # Two pseudos are installed - assert len(pseudos_to_install()) == 6 + assert len(pseudos_to_install()) == 10 @pytest.mark.usefixtures("aiida_profile_clean") def test_download_and_install_pseudo_from_file(tmp_path): """Test download and install pseudo from file.""" - assert len(pseudos_to_install()) == 8 + assert len(pseudos_to_install()) == 12 EXPECTED_PSEUDOS = { f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/SR/standard/upf", f"SSSP/{SSSP_VERSION}/PBE/efficiency", } # Download the pseudos to the tmp_path but not install - [_ for _ in _install_pseudos(EXPECTED_PSEUDOS, download_only=True, cwd=tmp_path)] + list(_install_pseudos(EXPECTED_PSEUDOS, download_only=True, cwd=tmp_path)) - assert len(pseudos_to_install()) == 8 + assert len(pseudos_to_install()) == 12 assert len(list(tmp_path.iterdir())) == 2 # Install the pseudos from the tmp_path - [_ for _ in _install_pseudos(EXPECTED_PSEUDOS, cwd=tmp_path)] + list(_install_pseudos(EXPECTED_PSEUDOS, cwd=tmp_path)) # Two pseudos are installed - assert len(pseudos_to_install()) == 6 + assert len(pseudos_to_install()) == 10 def test_pseudos_family_selector_widget(): @@ -144,21 +147,27 @@ def test_pseudos_family_selector_widget(): assert w.override.value is False w.override.value = True - + w.spin_orbit = "wo_soc" # test the default value - assert w.value == "SSSP/1.2/PBEsol/efficiency" + assert w.value == f"SSSP/{SSSP_VERSION}/PBEsol/efficiency" # Test if the protocol change the value will be updated w.protocol = "precise" - assert w.value == "SSSP/1.2/PBEsol/precision" + assert w.value == f"SSSP/{SSSP_VERSION}/PBEsol/precision" # test the functional change will update the value w.dft_functional.value = "PBE" - assert w.value == "SSSP/1.2/PBE/precision" + assert w.value == f"SSSP/{SSSP_VERSION}/PBE/precision" # Test if selecet new pseudo library the value will be updated w.library_selection.value = "PseudoDojo stringent" - assert w.value == "PseudoDojo/0.4/PBE/SR/stringent/upf" + assert w.value == f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/SR/stringent/upf" + + # Test spin-orbit change will update + + w.spin_orbit = "soc" + w.protocol = "moderate" + assert w.value == f"PseudoDojo/{PSEUDODOJO_VERSION}/PBE/FR/standard/upf" @pytest.mark.usefixtures("sssp") @@ -168,7 +177,9 @@ def test_pseudos_setter_widget(generate_structure_data, generate_upf_data): # test the widget is set with the elements of the structure silicon = generate_structure_data("silicon") - w = PseudoSetter(structure=silicon, pseudo_family="SSSP/1.2/PBEsol/efficiency") + w = PseudoSetter( + structure=silicon, pseudo_family=f"SSSP/{SSSP_VERSION}/PBEsol/efficiency" + ) assert "Si" in w.pseudos.keys() assert w.ecutwfc == 30 diff --git a/tests/test_result.py b/tests/test_result.py index 7fca7ba85..60b1ba34a 100644 --- a/tests/test_result.py +++ b/tests/test_result.py @@ -1,3 +1,7 @@ +import pytest + + +@pytest.mark.usefixtures("sssp") def test_result_step(app_to_submit, generate_qeapp_workchain): """Test the result step is properly updated when the process is running.""" @@ -7,17 +11,37 @@ def test_result_step(app_to_submit, generate_qeapp_workchain): assert step.state == step.State.ACTIVE +@pytest.mark.usefixtures("sssp") +def test_kill_and_clean_buttons(app_to_submit, generate_qeapp_workchain): + """Test the kill and clean_scratch button are properly displayed when the process + is in different states.""" + + step = app_to_submit.results_step + step.process = generate_qeapp_workchain().node.uuid + step._update_state() + step._update_kill_button_layout() + step._update_clean_scratch_button_layout() + assert step.kill_button.layout.display == "block" + assert step.clean_scratch_button.layout.display == "none" + + +@pytest.mark.usefixtures("sssp") def test_workchainview(generate_qeapp_workchain): """Test the result tabs are properly updated""" + import time + from aiidalab_qe.app.result.workchain_viewer import WorkChainViewer wkchain = generate_qeapp_workchain() + wkchain.node.seal() wcv = WorkChainViewer(wkchain.node) + time.sleep(3) assert len(wcv.result_tabs.children) == 5 assert wcv.result_tabs._titles["0"] == "Workflow Summary" assert wcv.result_tabs._titles["1"] == "Final Geometry" +@pytest.mark.usefixtures("sssp") def test_summary_report(data_regression, generate_qeapp_workchain): """Test the summary report can be properly generated.""" from aiidalab_qe.app.result.summary_viewer import SummaryView @@ -29,18 +53,20 @@ def test_summary_report(data_regression, generate_qeapp_workchain): data_regression.check(report) +@pytest.mark.usefixtures("sssp") def test_summary_report_advanced_settings(data_regression, generate_qeapp_workchain): """Test advanced settings are properly reported""" from aiidalab_qe.app.result.summary_viewer import SummaryView wkchain = generate_qeapp_workchain( - spin_type="collinear", initial_magnetic_moments=0.1 + spin_type="collinear", electronic_type="metal", initial_magnetic_moments=0.1 ) viewer = SummaryView(wkchain.node) report = viewer.report assert report["initial_magnetic_moments"]["Si"] == 0.1 +@pytest.mark.usefixtures("sssp") def test_summary_view(generate_qeapp_workchain): """Test the report html can be properly generated.""" from bs4 import BeautifulSoup diff --git a/tests/test_result/test_summary_report.yml b/tests/test_result/test_summary_report.yml index 334d6ad33..b1ebaa504 100644 --- a/tests/test_result/test_summary_report.yml +++ b/tests/test_result/test_summary_report.yml @@ -17,13 +17,16 @@ properties: - pdos - relax protocol: fast -pseudo_family: SSSP/1.2/PBEsol/efficiency +pseudo_family: SSSP/1.3/PBEsol/efficiency pseudo_library: SSSP pseudo_link: https://www.materialscloud.org/discover/sssp/table/efficiency pseudo_protocol: efficiency -pseudo_version: '1.2' +pseudo_version: '1.3' relax_method: positions_cell relaxed: positions_cell scf_kpoints_distance: 0.5 smearing: cold +spin_orbit: false tot_charge: 0.0 +tot_magnetization: false +vdw_corr: none diff --git a/tests/test_submit_qe_workchain.py b/tests/test_submit_qe_workchain.py index 40a8560ad..266b81ac8 100644 --- a/tests/test_submit_qe_workchain.py +++ b/tests/test_submit_qe_workchain.py @@ -1,3 +1,7 @@ +import pytest + + +@pytest.mark.usefixtures("sssp") def test_create_builder_default( data_regression, submit_app_generator, @@ -10,15 +14,47 @@ def test_create_builder_default( app = submit_app_generator(properties=["bands", "pdos"]) submit_step = app.submit_step - builder = submit_step._create_builder() + submit_step._create_builder() + # since uuid is specific to each run, we remove it from the output + ui_parameters = remove_uuid_fields(submit_step.ui_parameters) + # regression test for the parameters generated by the app + # this parameters are passed to the workchain + data_regression.check(ui_parameters) + # test if create builder successfully + submit_step._create_builder() + # In the future, we will check the builder parameters using regresion test - # check and validate the builder - got = builder_to_readable_dict(builder) - # regression test - data_regression.check(got) +@pytest.mark.usefixtures("sssp") +def test_create_process_label( + submit_app_generator, +): + """ "Test the creation of the correct process label""" + app = submit_app_generator(properties=["bands", "pdos"]) + submit_step = app.submit_step + submit_step._update_process_label() + assert ( + submit_step.process_label.value + == "Si2 [relax: atoms+cell, moderate protocol] β†’ bands, pdos" + ) + # suppose we change the label of the structure: + submit_step.input_structure.label = "Si2, unit cell" + submit_step._update_process_label() + assert ( + submit_step.process_label.value + == "Si2, unit cell [relax: atoms+cell, moderate protocol] β†’ bands, pdos" + ) + # suppose by mistake we provide an empty label, we then fallback to use the formula: + submit_step.input_structure.label = "" + submit_step._update_process_label() + assert ( + submit_step.process_label.value + == "Si2 [relax: atoms+cell, moderate protocol] β†’ bands, pdos" + ) + +@pytest.mark.usefixtures("sssp") def test_create_builder_insulator( submit_app_generator, ): @@ -37,10 +73,14 @@ def test_create_builder_insulator( # check and validate the builder got = builder_to_readable_dict(builder) - assert got["bands"]["scf"]["pw"]["parameters"]["SYSTEM"]["occupations"] == "fixed" - assert "smearing" not in got["bands"]["scf"]["pw"]["parameters"]["SYSTEM"] + assert ( + got["bands"]["bands"]["scf"]["pw"]["parameters"]["SYSTEM"]["occupations"] + == "fixed" + ) + assert "smearing" not in got["bands"]["bands"]["scf"]["pw"]["parameters"]["SYSTEM"] +@pytest.mark.usefixtures("sssp") def test_create_builder_advanced_settings( submit_app_generator, ): @@ -50,13 +90,18 @@ def test_create_builder_advanced_settings( -collinear -tot_charge -initial_magnetic_moments + -vdw_corr + -electron_maxstep + -properties: bands, pdos """ app = submit_app_generator( electronic_type="metal", spin_type="collinear", tot_charge=1.0, + vdw_corr="dft-d3bj", initial_magnetic_moments=0.1, + electron_maxstep=100, properties=["bands", "pdos"], ) submit_step = app.submit_step @@ -69,11 +114,14 @@ def test_create_builder_advanced_settings( # test tot_charge is updated in the three steps for parameters in [ got["relax"]["base"], - got["bands"]["scf"], + got["bands"]["bands"]["scf"], got["pdos"]["scf"], got["pdos"]["nscf"], ]: assert parameters["pw"]["parameters"]["SYSTEM"]["tot_charge"] == 1.0 + assert parameters["pw"]["parameters"]["SYSTEM"]["vdw_corr"] == "dft-d3" + assert parameters["pw"]["parameters"]["SYSTEM"]["dftd3_version"] == 4 + assert parameters["pw"]["parameters"]["ELECTRONS"]["electron_maxstep"] == 100 # test initial_magnetic_moments set 'starting_magnetization' in pw.in assert ( @@ -84,6 +132,51 @@ def test_create_builder_advanced_settings( ) +@pytest.mark.usefixtures("sssp") +def test_warning_messages( + generate_structure_data, + submit_app_generator, +): + """Test the creation of the warning messages. + + For now, we test that the suggestions are indeed there. + We should check the whole message, but this is for now not easy to do: the message is built + on the fly with variables which are not accessible in this namespace. + """ + import os + + suggestions = { + "more_resources": "Increase the resources", + "change_configuration": "Review the configuration", + "go_remote": "Select a code that runs on a larger machine", + "avoid_overloading": "Reduce the number of CPUs to avoid the overloading of the local machine", + } + app = submit_app_generator(properties=["bands", "pdos"]) + submit_step = app.submit_step + submit_step.codes["pw"].num_cpus.value = 1 + submit_step._check_resources() + # no warning: + assert submit_step._submission_warning_messages.value == "" + + # now we increase the resources, so we should have the Warning-3 + submit_step.codes["pw"].num_cpus.value = len(os.sched_getaffinity(0)) + submit_step._check_resources() + for suggestion in ["avoid_overloading", "go_remote"]: + assert suggestions[suggestion] in submit_step._submission_warning_messages.value + + # now we use a large structure, so we should have the Warning-1 (and 2 if not on localhost) + structure = generate_structure_data("H2O-larger") + submit_step.input_structure = structure + submit_step.codes["pw"].num_cpus.value = 1 + submit_step._check_resources() + num_sites = len(structure.sites) + volume = structure.get_cell_volume() + estimated_CPUs = submit_step._estimate_min_cpus(num_sites, volume) + assert estimated_CPUs == 2 + for suggestion in ["more_resources", "change_configuration"]: + assert suggestions[suggestion] in submit_step._submission_warning_messages.value + + def builder_to_readable_dict(builder): """transverse the builder and return a dictionary with readable values.""" from aiida import orm @@ -112,3 +205,35 @@ def builder_to_readable_dict(builder): readable_dict[k] = v return readable_dict + + +def remove_uuid_fields(data): + """ + Recursively remove fields that contain UUID values from a dictionary. + + :param data: The dictionary to process. + :return: The dictionary with UUID fields removed. + """ + import re + + # Define a UUID pattern + uuid_pattern = re.compile( + r"[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}", re.I + ) + + if isinstance(data, dict): + new_dict = {} + for key, value in data.items(): + # If the value matches the UUID pattern, skip adding it to the new dictionary + if isinstance(value, str) and uuid_pattern.match(value): + continue + # Otherwise, process the value recursively and add it to the new dictionary + else: + new_dict[key] = remove_uuid_fields(value) + return new_dict + elif isinstance(data, list): + # Process each item in the list recursively + return [remove_uuid_fields(item) for item in data] + else: + # Return the value unchanged if it's not a dictionary or list + return data diff --git a/tests/test_submit_qe_workchain/test_create_builder_default.yml b/tests/test_submit_qe_workchain/test_create_builder_default.yml index b2af426fc..3ad9d35b7 100644 --- a/tests/test_submit_qe_workchain/test_create_builder_default.yml +++ b/tests/test_submit_qe_workchain/test_create_builder_default.yml @@ -1,168 +1,62 @@ +advanced: + clean_workdir: false + initial_magnetic_moments: null + kpoints_distance: 0.12 + pseudo_family: SSSP/1.3/PBEsol/efficiency + pw: + parameters: + CONTROL: + etot_conv_thr: 2.0e-05 + forc_conv_thr: 0.0001 + ELECTRONS: + conv_thr: 4.0e-10 + electron_maxstep: 80 + SYSTEM: + degauss: 0.015 + ecutrho: 240.0 + ecutwfc: 30.0 + smearing: methfessel-paxton + tot_charge: 0.0 + vdw_corr: none + pseudos: {} bands: - bands: - pw: - parallelization: - npool: 1 - parameters: - CONTROL: - calculation: bands - etot_conv_thr: 2.0e-05 - forc_conv_thr: 0.0001 - restart_mode: from_scratch - tprnfor: true - tstress: true - ELECTRONS: - conv_thr: 4.0e-10 - diago_full_acc: true - diagonalization: paro - electron_maxstep: 80 - mixing_beta: 0.4 - startingpot: file - SYSTEM: - degauss: 0.01 - ecutrho: 240.0 - ecutwfc: 30.0 - nosym: false - occupations: smearing - smearing: cold - tot_charge: 0.0 - pseudos: - Si: Si.upf - bands_kpoints_distance: 0.025 - nbands_factor: 3.0 - scf: - kpoints_distance: 0.12 - kpoints_force_parity: false - pw: - parallelization: - npool: 1 - parameters: - CONTROL: - calculation: scf - etot_conv_thr: 2.0e-05 - forc_conv_thr: 0.0001 - tprnfor: true - tstress: true - ELECTRONS: - conv_thr: 4.0e-10 - electron_maxstep: 80 - mixing_beta: 0.4 - SYSTEM: - degauss: 0.015 - ecutrho: 240.0 - ecutwfc: 30.0 - nosym: false - occupations: smearing - smearing: methfessel-paxton - tot_charge: 0.0 - pseudos: - Si: Si.upf -clean_workdir: false -pdos: + projwfc_bands: false +codes: dos: - parameters: - DOS: - DeltaE: 0.02 - nscf: - kpoints_distance: 0.1 - kpoints_force_parity: false - pw: - parallelization: - npool: 1 - parameters: - CONTROL: - calculation: nscf - etot_conv_thr: 2.0e-05 - forc_conv_thr: 0.0001 - restart_mode: from_scratch - tprnfor: true - tstress: true - ELECTRONS: - conv_thr: 4.0e-10 - electron_maxstep: 80 - mixing_beta: 0.4 - SYSTEM: - ecutrho: 240.0 - ecutwfc: 30.0 - nosym: true - occupations: tetrahedra - tot_charge: 0.0 - pseudos: - Si: Si.upf + cpus: 1 + cpus_per_task: 1 + max_wallclock_seconds: 43200 + nodes: 1 + ntasks_per_node: 1 projwfc: - parameters: - PROJWFC: - DeltaE: 0.02 - settings: - cmdline: - - -nk - - '1' - scf: - kpoints_distance: 0.12 - kpoints_force_parity: false - pw: - parallelization: - npool: 1 - parameters: - CONTROL: - calculation: scf - etot_conv_thr: 2.0e-05 - forc_conv_thr: 0.0001 - restart_mode: from_scratch - tprnfor: true - tstress: true - ELECTRONS: - conv_thr: 4.0e-10 - electron_maxstep: 80 - mixing_beta: 0.4 - SYSTEM: - degauss: 0.015 - ecutrho: 240.0 - ecutwfc: 30.0 - nosym: false - occupations: smearing - smearing: methfessel-paxton - tot_charge: 0.0 - pseudos: - Si: Si.upf -properties: -- bands -- pdos -- relax -relax: - base: - kpoints_distance: 0.12 - kpoints_force_parity: false - pw: - parallelization: - npool: 1 - parameters: - CELL: - cell_dofree: all - press_conv_thr: 0.5 - CONTROL: - calculation: vc-relax - etot_conv_thr: 2.0e-05 - forc_conv_thr: 0.0001 - tprnfor: true - tstress: true - ELECTRONS: - conv_thr: 4.0e-10 - electron_maxstep: 80 - mixing_beta: 0.4 - SYSTEM: - degauss: 0.015 - ecutrho: 240.0 - ecutwfc: 30.0 - nosym: false - occupations: smearing - smearing: methfessel-paxton - tot_charge: 0.0 - pseudos: - Si: Si.upf - base_final_scf: - pw: - pseudos: {} - max_meta_convergence_iterations: 5 - meta_convergence: true - volume_convergence: 0.02 + cpus: 1 + cpus_per_task: 1 + max_wallclock_seconds: 43200 + nodes: 1 + ntasks_per_node: 1 + projwfc_bands: + cpus: 1 + cpus_per_task: 1 + max_wallclock_seconds: 43200 + nodes: 1 + ntasks_per_node: 1 + pw: + cpus: 2 + cpus_per_task: 1 + max_wallclock_seconds: 43200 + nodes: 1 + ntasks_per_node: 2 + parallelization: {} +pdos: + nscf_kpoints_distance: 0.1 + pdos_degauss: 0.005 + use_pdos_degauss: false +workchain: + electronic_type: metal + properties: + - bands + - pdos + - relax + protocol: moderate + relax_type: positions_cell + spin_type: none diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py new file mode 100644 index 000000000..609fa40d8 --- /dev/null +++ b/tests/test_wrapper.py @@ -0,0 +1,64 @@ +from aiidalab_qe.app.wrapper import AppWrapperContoller, AppWrapperModel, AppWrapperView + + +class TestWrapper: + def test_enable_toggles(self): + """Test enable_toggles method.""" + self._instansiate_mvc_components() + assert self.view.guide_toggle.disabled is True + assert self.view.about_toggle.disabled is True + self.controller.enable_toggles() + assert self.view.guide_toggle.disabled is False + assert self.view.about_toggle.disabled is False + + def test_guide_toggle(self): + """Test guide_toggle method.""" + self._instansiate_mvc_components() + self.controller.enable_toggles() + self.controller._on_guide_toggle({"new": True}) + self._assert_guide_is_on() + self.controller._on_guide_toggle({"new": False}) + self._assert_no_guide_info() + + def test_about_toggle(self): + """Test about_toggle method.""" + self._instansiate_mvc_components() + self.controller.enable_toggles() + self.controller._on_about_toggle({"new": True}) + self._assert_about_is_on() + self.controller._on_about_toggle({"new": False}) + self._assert_no_guide_info() + + def test_toggle_switch(self): + """Test toggle_switch method.""" + self._instansiate_mvc_components() + self.controller.enable_toggles() + self._assert_no_guide_info() + self.controller._on_guide_toggle({"new": True}) + self._assert_guide_is_on() + self.controller._on_about_toggle({"new": True}) + self._assert_about_is_on() + self.controller._on_guide_toggle({"new": True}) + self._assert_guide_is_on() + self.controller._on_guide_toggle({"new": False}) + self._assert_no_guide_info() + + def _assert_guide_is_on(self): + """Assert guide is on.""" + assert len(self.view.info_container.children) == 1 + assert self.view.guide in self.view.info_container.children + + def _assert_about_is_on(self): + """Assert about is on.""" + assert len(self.view.info_container.children) == 1 + assert self.view.about in self.view.info_container.children + + def _assert_no_guide_info(self): + """Assert no info is shown.""" + assert len(self.view.info_container.children) == 0 + + def _instansiate_mvc_components(self): + """Instansiate `AppWrapper` MVC components.""" + self.model = AppWrapperModel() + self.view = AppWrapperView() + self.controller = AppWrapperContoller(self.model, self.view) diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index f27cc24ac..8c84dd919 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -4,7 +4,7 @@ import pytest import requests -import selenium.webdriver.support.expected_conditions as EC +import selenium.webdriver.support.expected_conditions as ec from requests.exceptions import ConnectionError from selenium.webdriver.common.by import By from selenium.webdriver.support.wait import WebDriverWait @@ -39,30 +39,33 @@ def execute(command, user=None, workdir=None, **kwargs): opts = f"{opts} --workdir={workdir}" command = f"exec {opts} aiidalab {command}" - return docker_compose.execute(command, **kwargs) + return docker_compose.execute(command, **kwargs).decode().strip() return execute @pytest.fixture(scope="session") def nb_user(aiidalab_exec): - return aiidalab_exec("bash -c 'echo \"${NB_USER}\"'").decode().strip() + return aiidalab_exec("bash -c 'echo \"${NB_USER}\"'") @pytest.fixture(scope="session") -def notebook_service(docker_ip, docker_services): +def notebook_service(docker_compose, docker_ip, docker_services): """Ensure that HTTP service is up and responsive.""" # `port_for` takes a container port and returns the corresponding host port port = docker_services.port_for("aiidalab", 8888) url = f"http://{docker_ip}:{port}" token = os.environ.get("JUPYTER_TOKEN", "testtoken") - docker_services.wait_until_responsive( - # The timeout is very high for this test, because the installation of pseudo libraries. - timeout=180.0, - pause=0.1, - check=lambda: is_responsive(url), - ) + try: + docker_services.wait_until_responsive( + timeout=60.0, + pause=1.0, + check=lambda: is_responsive(url), + ) + except Exception as e: + print(docker_compose.execute("logs").decode().strip()) + pytest.exit(e) return url, token @@ -85,7 +88,7 @@ def _selenium_driver(nb_path, wait_time=5.0): selenium.find_element(By.ID, "ipython-main-app") selenium.find_element(By.ID, "notebook-container") WebDriverWait(selenium, 100).until( - EC.invisibility_of_element((By.ID, "appmode-busy")) + ec.invisibility_of_element((By.ID, "appmode-busy")) ) return selenium @@ -116,12 +119,6 @@ def screenshot_dir(): return sdir -@pytest.fixture -def firefox_options(firefox_options): - firefox_options.add_argument("--headless") - return firefox_options - - @pytest.fixture def chrome_options(chrome_options): chrome_options.add_argument("--headless") diff --git a/tests_integration/docker-compose.yml b/tests_integration/docker-compose.yml index 5c506843b..cd9e4f03c 100644 --- a/tests_integration/docker-compose.yml +++ b/tests_integration/docker-compose.yml @@ -1,10 +1,8 @@ --- -version: '3.4' - services: aiidalab: - image: ${REGISTRY:-}${QE_IMAGE:-aiidalab/qe}:${TAG:-newly-baked} + image: ${REGISTRY:-}${QE_IMAGE:-aiidalab/qe}${TAG:-} environment: TZ: Europe/Zurich DOCKER_STACKS_JUPYTER_CMD: notebook diff --git a/tests_integration/test_app.py b/tests_integration/test_app.py index e20dd4c79..601e22ebd 100755 --- a/tests_integration/test_app.py +++ b/tests_integration/test_app.py @@ -1,7 +1,7 @@ import time from pathlib import Path -import selenium.webdriver.support.expected_conditions as EC +import selenium.webdriver.support.expected_conditions as EC # noqa: N812 from selenium.webdriver.common.by import By from selenium.webdriver.support.wait import WebDriverWait @@ -20,13 +20,15 @@ def test_qe_app_select_silicon_and_confirm( driver = selenium_driver("qe.ipynb", wait_time=30.0) driver.set_window_size(1920, 1485) - element = WebDriverWait(driver, 60).until( + element = WebDriverWait(driver, 60 * 2).until( EC.presence_of_element_located((By.XPATH, "//*[text()='From Examples']")) ) element.click() try: - driver.find_element(By.XPATH, "//option[@value='Diamond']").click() + driver.find_element( + By.XPATH, "//option[@value='Diamond (primitive cell)']" + ).click() time.sleep(10) element = WebDriverWait(driver, 60).until( EC.element_to_be_clickable((By.XPATH, "//button[text()='Confirm']")) diff --git a/tests_integration/test_image.py b/tests_integration/test_image.py index 1a4841b25..0303ab725 100755 --- a/tests_integration/test_image.py +++ b/tests_integration/test_image.py @@ -12,18 +12,21 @@ def test_notebook_service_available(notebook_service): def test_verdi_status(aiidalab_exec, nb_user): # Check the aiida service is running and connected to RabbitMQ # The notebook_service fixture is needed to wait for the services to be up - output = aiidalab_exec("verdi status", user=nb_user).decode().strip() - assert "Connected to RabbitMQ" in output + output = aiidalab_exec("verdi status", user=nb_user) + for status in ("version", "config", "profile", "storage", "broker", "daemon"): + assert f"βœ” {status}" in output + assert "/home/jovyan/.aiida" in output assert "Daemon is running" in output + assert "Unable to connect to broker" not in output @pytest.mark.usefixtures("notebook_service") def test_pseudos_families_are_installed(aiidalab_exec, nb_user): # Check the aiida service is running and connected to RabbitMQ # The notebook_service fixture is needed to wait for the services to be up - output = aiidalab_exec("aiida-pseudo list", user=nb_user).decode().strip() + output = aiidalab_exec("aiida-pseudo list", user=nb_user) assert "SSSP" in output assert "PseudoDojo" in output # Two lines of header, 8 pseudos - assert len(output.splitlines()) == 10 + assert len(output.splitlines()) == 14