Skip to content

Commit

Permalink
Add Readme
Browse files Browse the repository at this point in the history
  • Loading branch information
jan-janssen committed Oct 3, 2024
1 parent 58c35ba commit 307cc9b
Show file tree
Hide file tree
Showing 7 changed files with 122 additions and 80 deletions.
33 changes: 33 additions & 0 deletions .github/workflows/format_black.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# This workflow will checkout the branch of the PR, apply black formatting and commit the result to the PR. Does not work for forks.

name: Format black

on:
pull_request:
types: [labeled]

jobs:
build:
if: contains(github.event.pull_request.labels.*.name, 'format_black' )
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
token: ${{ secrets.DEPENDABOT_WORKFLOW_TOKEN }}
ref: ${{ github.event.pull_request.head.ref }} # Check out the head of the actual branch, not the PR
fetch-depth: 0 # otherwise, you will fail to push refs to dest repo
- name: format black
uses: psf/black@stable
with:
options: ""
src: "./${{ github.event.repository.name }}"
- name: commit
run: |
git config --local user.email "[email protected]"
git config --local user.name "pyiron-runner"
git commit -m "Format black" -a
- name: push
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.DEPENDABOT_WORKFLOW_TOKEN }}
branch: ${{ github.event.pull_request.head.ref }}
5 changes: 1 addition & 4 deletions .github/workflows/minimal.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ jobs:
condarc-file: .condarc
environment-file: .ci_support/environment-mini.yml
- name: Test
shell: bash -l {0}
timeout-minutes: 5
run: |
pip install versioneer[toml]==0.29
pip install versioneer[toml]==0.29
pip install . --no-deps --no-build-isolation
python -m unittest discover tests
21 changes: 21 additions & 0 deletions .github/workflows/pypicheck.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# This workflow is used to test, if the installation of the pypi package works

name: Pip check

on:
push:
branches: [ main ]
pull_request:

jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Pip check
shell: bash -l {0}
run: |
pip install versioneer[toml]==0.29
pip install . --no-deps --no-build-isolation
pip check
29 changes: 29 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Dataclasses for pyiron
The `pyiron_dataclasses` module provides a series of [dataclasses](https://docs.python.org/3/library/dataclasses.html)
for the `pyiron` workflow framework. It can load HDF5 files created by `pyiron_atomistics` and read the content stored
in those files, without depending on `pyiron_atomistics`. Furthermore, it is not fixed to a single version of
`pyiron_atomistics` but rather matches multiple versions of `pyiron_atomistics` to the same API version of
`pyiron_dataclasses`.

## Usage
Using the `get_dataclass()` function of the built-in converter:
```python
from h5io_browser import read_dict_from_hdf
from pyiron_dataclasses.v1.converter import get_dataclass

job_classes = get_dataclass(
job_dict=read_dict_from_hdf(
file_name=job.project_hdf5.file_name,
h5_path="/",
recursive=True,
slash='ignore',
)[job.job_name]
)
job_classes
```

## Supported Versions
### Version 1 - `v1`
Supported versions of `pyiron_atomistics`:
* `0.6.13`
* `0.6.12`
53 changes: 27 additions & 26 deletions tests/convert.py → pyiron_dataclasses/v1/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,15 +61,24 @@
)


def convert_sphinx_job_dict(job_dict: dict) -> SphinxJob:
def get_dataclass(job_dict):
funct_dict = {
"<class 'pyiron_atomistics.lammps.lammps.Lammps'>": _convert_lammps_job_dict,
"<class 'pyiron_atomistics.sphinx.sphinx.Sphinx'>": _convert_sphinx_job_dict,
"<class 'pyiron_atomistics.vasp.vasp.Vasp'>": _convert_vasp_job_dict,
}
return funct_dict[job_dict["TYPE"]](job_dict=job_dict)


def _convert_sphinx_job_dict(job_dict: dict) -> SphinxJob:
ureg = UnitRegistry()
sphinx_input_parameter_dict = convert_datacontainer_to_dictionary(
sphinx_input_parameter_dict = _convert_datacontainer_to_dictionary(
data_container_dict=job_dict["input"]["parameters"]
)
generic_input_dict = convert_generic_parameters_to_dictionary(
generic_input_dict = _convert_generic_parameters_to_dictionary(
generic_parameter_dict=job_dict["input"]["generic"],
)
output_dict = convert_datacontainer_to_dictionary(
output_dict = _convert_datacontainer_to_dictionary(
data_container_dict=job_dict["output"]["generic"]
)
if "ricQN" in sphinx_input_parameter_dict["sphinx"]["main"]:
Expand Down Expand Up @@ -461,9 +470,9 @@ def convert_sphinx_job_dict(job_dict: dict) -> SphinxJob:
)


def convert_lammps_job_dict(job_dict: dict) -> LammpsJob:
def _convert_lammps_job_dict(job_dict: dict) -> LammpsJob:
ureg = UnitRegistry()
generic_input_dict = convert_generic_parameters_to_dictionary(
generic_input_dict = _convert_generic_parameters_to_dictionary(
generic_parameter_dict=job_dict["input"]["generic"],
)
return LammpsJob(
Expand Down Expand Up @@ -541,10 +550,10 @@ def convert_lammps_job_dict(job_dict: dict) -> LammpsJob:
species=job_dict["input"]["potential_inp"]["potential"]["Species"],
),
input_files=LammpsInputFiles(
control_inp=convert_generic_parameters_to_string(
control_inp=_convert_generic_parameters_to_string(
generic_parameter_dict=job_dict["input"]["control_inp"]
),
potential_inp=convert_generic_parameters_to_string(
potential_inp=_convert_generic_parameters_to_string(
generic_parameter_dict=job_dict["input"]["potential_inp"]
),
),
Expand Down Expand Up @@ -609,9 +618,9 @@ def convert_lammps_job_dict(job_dict: dict) -> LammpsJob:
)


def convert_vasp_job_dict(job_dict):
def _convert_vasp_job_dict(job_dict):
ureg = UnitRegistry()
generic_input_dict = convert_generic_parameters_to_dictionary(
generic_input_dict = _convert_generic_parameters_to_dictionary(
generic_parameter_dict=job_dict["input"]["generic"],
)
return VaspJob(
Expand Down Expand Up @@ -693,14 +702,14 @@ def convert_vasp_job_dict(job_dict):
fix_spin_constraint=generic_input_dict.get("fix_spin_constraint", None),
max_iter=generic_input_dict.get("max_iter", None),
),
incar=convert_generic_parameters_to_string(
incar=_convert_generic_parameters_to_string(
generic_parameter_dict=job_dict["input"]["incar"]
),
kpoints=convert_generic_parameters_to_string(
kpoints=_convert_generic_parameters_to_string(
generic_parameter_dict=job_dict["input"]["kpoints"]
),
potcar=PotCar(
xc=convert_generic_parameters_to_dictionary(
xc=_convert_generic_parameters_to_dictionary(
generic_parameter_dict=job_dict["input"]["potcar"]
)["xc"]
),
Expand Down Expand Up @@ -915,16 +924,7 @@ def convert_vasp_job_dict(job_dict):
)


def convert(job_dict):
funct_dict = {
"<class 'pyiron_atomistics.lammps.lammps.Lammps'>": convert_lammps_job_dict,
"<class 'pyiron_atomistics.sphinx.sphinx.Sphinx'>": convert_sphinx_job_dict,
"<class 'pyiron_atomistics.vasp.vasp.Vasp'>": convert_vasp_job_dict,
}
return funct_dict[job_dict["TYPE"]](job_dict=job_dict)


def convert_generic_parameters_to_string(generic_parameter_dict: dict) -> str:
def _convert_generic_parameters_to_string(generic_parameter_dict: dict) -> str:
output_str = ""
for p, v in zip(
generic_parameter_dict["data_dict"]["Parameter"],
Expand All @@ -934,7 +934,7 @@ def convert_generic_parameters_to_string(generic_parameter_dict: dict) -> str:
return output_str[:-1]


def convert_generic_parameters_to_dictionary(generic_parameter_dict: dict) -> dict:
def _convert_generic_parameters_to_dictionary(generic_parameter_dict: dict) -> dict:
return {
p: v
for p, v in zip(
Expand Down Expand Up @@ -976,7 +976,7 @@ def recursive_sort(input_value: dict) -> dict:
else:
ind_dict[int(ind)] = key
content_dict[key] = recursive_sort(input_value=v)
elif k != "DICT_VERSION":
else:
content_dict[k] = recursive_sort(input_value=v)
if content_lst_flag:
return [ind_dict[ind] for ind in sorted(list(ind_dict.keys()))]
Expand All @@ -991,14 +991,15 @@ def recursive_sort(input_value: dict) -> dict:
raise KeyError(ind_dict, content_dict)


def convert_datacontainer_to_dictionary(data_container_dict: dict) -> dict:
def _convert_datacontainer_to_dictionary(data_container_dict: dict) -> dict:
return _sort_dictionary_from_datacontainer(
input_dict=_filter_dict(
input_dict=data_container_dict,
remove_keys_lst=[
"NAME",
"TYPE",
"OBJECT",
"DICT_VERSION",
"HDF_VERSION",
"READ_ONLY",
"VERSION",
Expand Down
17 changes: 7 additions & 10 deletions tests/test_pyiron_atomistics_live.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
import unittest

from h5io_browser.base import read_dict_from_hdf
from h5io_browser import read_dict_from_hdf
from pint import UnitRegistry

from convert import (
convert_sphinx_job_dict,
convert_lammps_job_dict,
)
from pyiron_dataclasses.v1.converter import get_dataclass

try:
from pyiron_atomistics import Project
Expand Down Expand Up @@ -45,7 +42,7 @@ def test_sphinx_calc_minimize(self):
recursive=True,
slash='ignore',
)
job_sphinx = convert_sphinx_job_dict(job_dict[job.job_name])
job_sphinx = get_dataclass(job_dict[job.job_name])
self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.78315943905295 * ureg.eV)

def test_sphinx_calc_static(self):
Expand All @@ -59,7 +56,7 @@ def test_sphinx_calc_static(self):
recursive=True,
slash='ignore',
)
job_sphinx = convert_sphinx_job_dict(job_dict[job.job_name])
job_sphinx = get_dataclass(job_dict[job.job_name])
self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.78315953829286 * ureg.eV)

def test_lammps_calc_static(self):
Expand All @@ -74,7 +71,7 @@ def test_lammps_calc_static(self):
recursive=True,
slash='ignore',
)
job_lammps = convert_lammps_job_dict(job_dict[job.job_name])
job_lammps = get_dataclass(job_dict[job.job_name])
self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -13.4486826111902 * ureg.eV)

def test_lammps_calc_md(self):
Expand All @@ -89,7 +86,7 @@ def test_lammps_calc_md(self):
recursive=True,
slash='ignore',
)
job_lammps = convert_lammps_job_dict(job_dict[job.job_name])
job_lammps = get_dataclass(job_dict[job.job_name])
self.assertEqual(len(job_lammps.calculation_output.generic.energy_tot), 11)

def test_lammps_calc_minimize(self):
Expand All @@ -104,5 +101,5 @@ def test_lammps_calc_minimize(self):
recursive=True,
slash='ignore',
)
job_lammps = convert_lammps_job_dict(job_dict[job.job_name])
job_lammps = get_dataclass(job_dict[job.job_name])
self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -13.4486826111902 * ureg.eV)
44 changes: 4 additions & 40 deletions tests/test_pyiron_atomistics_static.py
Original file line number Diff line number Diff line change
@@ -1,51 +1,15 @@
import os
from unittest import TestCase

from h5io_browser.base import read_dict_from_hdf
from h5io_browser import read_dict_from_hdf
from pint import UnitRegistry

from convert import (
convert,
convert_sphinx_job_dict,
convert_lammps_job_dict,
convert_vasp_job_dict,
from pyiron_dataclasses.v1.converter import (
get_dataclass,
)


class TestPyironAtomisticsStatic(TestCase):
def test_sphinx(self):
ureg = UnitRegistry()
job_dict = read_dict_from_hdf(
file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "sx.h5"),
h5_path="/sx",
recursive=True,
slash='ignore',
)
job_sphinx = convert_sphinx_job_dict(job_dict=job_dict)
self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.7831594379917 * ureg.eV)

def test_lammps(self):
ureg = UnitRegistry()
job_dict = read_dict_from_hdf(
file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "lmp.h5"),
h5_path="/lmp",
recursive=True,
slash='ignore',
)
job_lammps = convert_lammps_job_dict(job_dict=job_dict)
self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -9428.45286561574 * ureg.eV)

def test_vasp(self):
ureg = UnitRegistry()
job_dict = read_dict_from_hdf(
file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "vasp.h5"),
h5_path="/vasp",
recursive=True,
slash='ignore',
)
job_vasp = convert_vasp_job_dict(job_dict=job_dict)
self.assertEqual(job_vasp.calculation_output.generic.energy_tot[-1], -14.7459202 * ureg.eV)

def test_all(self):
ureg = UnitRegistry()
static_folder = os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13")
Expand All @@ -62,6 +26,6 @@ def test_all(self):
slash='ignore',
)[hdf5_file.split(".")[0]]
self.assertEqual(
convert(job_dict=job_dict).calculation_output.generic.energy_tot[-1],
get_dataclass(job_dict=job_dict).calculation_output.generic.energy_tot[-1],
energy_dict[hdf5_file]
)

0 comments on commit 307cc9b

Please sign in to comment.