diff --git a/.github/workflows/format_black.yml b/.github/workflows/format_black.yml new file mode 100644 index 0000000..e3fdafd --- /dev/null +++ b/.github/workflows/format_black.yml @@ -0,0 +1,33 @@ +# This workflow will checkout the branch of the PR, apply black formatting and commit the result to the PR. Does not work for forks. + +name: Format black + +on: + pull_request: + types: [labeled] + +jobs: + build: + if: contains(github.event.pull_request.labels.*.name, 'format_black' ) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.DEPENDABOT_WORKFLOW_TOKEN }} + ref: ${{ github.event.pull_request.head.ref }} # Check out the head of the actual branch, not the PR + fetch-depth: 0 # otherwise, you will fail to push refs to dest repo + - name: format black + uses: psf/black@stable + with: + options: "" + src: "./${{ github.event.repository.name }}" + - name: commit + run: | + git config --local user.email "pyiron@mpie.de" + git config --local user.name "pyiron-runner" + git commit -m "Format black" -a + - name: push + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.DEPENDABOT_WORKFLOW_TOKEN }} + branch: ${{ github.event.pull_request.head.ref }} \ No newline at end of file diff --git a/LICENSE b/LICENSE index 9557be6..3ce69da 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2018, Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department +Copyright (c) 2024, Jan Janssen All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md new file mode 100644 index 0000000..9d487cf --- /dev/null +++ b/README.md @@ -0,0 +1,29 @@ +# Dataclasses for pyiron +The `pyiron_dataclasses` module provides a series of [dataclasses](https://docs.python.org/3/library/dataclasses.html) +for the `pyiron` workflow framework. It can load HDF5 files created by `pyiron_atomistics` and read the content stored +in those files, without depending on `pyiron_atomistics`. Furthermore, it is not fixed to a single version of +`pyiron_atomistics` but rather matches multiple versions of `pyiron_atomistics` to the same API version of +`pyiron_dataclasses`. + +## Usage +Using the `get_dataclass()` function of the built-in converter: +```python +from h5io_browser import read_dict_from_hdf +from pyiron_dataclasses.v1.converter import get_dataclass + +job_classes = get_dataclass( + job_dict=read_dict_from_hdf( + file_name=job.project_hdf5.file_name, + h5_path="/", + recursive=True, + slash='ignore', + )[job.job_name] +) +job_classes +``` + +## Supported Versions +### Version 1 - `v1` +Supported versions of `pyiron_atomistics`: +* `0.6.13` +* `0.6.12` diff --git a/pyiron_dataclasses/v1/atomistic.py b/pyiron_dataclasses/v1/atomistic.py index 4bed6e8..bf53f2e 100644 --- a/pyiron_dataclasses/v1/atomistic.py +++ b/pyiron_dataclasses/v1/atomistic.py @@ -1,6 +1,8 @@ from dataclasses import dataclass +from typing import List, Optional + import numpy as np -from typing import Optional, List + from pyiron_dataclasses.v1.dft import OutputGenericDFT diff --git a/tests/convert.py b/pyiron_dataclasses/v1/converter.py similarity index 92% rename from tests/convert.py rename to pyiron_dataclasses/v1/converter.py index 6dad9ba..3ba6b5f 100644 --- a/tests/convert.py +++ b/pyiron_dataclasses/v1/converter.py @@ -1,84 +1,91 @@ from pint import UnitRegistry -from pyiron_dataclasses.v1.job import ( - Executable, - Interactive, - GenericDict, - Server, -) from pyiron_dataclasses.v1.atomistic import ( + Cell, GenericInput, GenericOutput, Structure, Units, - Cell, ) from pyiron_dataclasses.v1.dft import ( - OutputGenericDFT, - ElectronicStructure, - DensityOfStates, ChargeDensity, + DensityOfStates, + ElectronicStructure, + OutputGenericDFT, +) +from pyiron_dataclasses.v1.job import ( + Executable, + GenericDict, + Interactive, + Server, ) from pyiron_dataclasses.v1.lammps import ( - LammpsJob, LammpsInput, + LammpsInputFiles, + LammpsJob, LammpsOutput, LammpsPotential, - LammpsInputFiles, ) from pyiron_dataclasses.v1.sphinx import ( - SphinxJob, - SphinxInput, - SphinxInputParameters, - SphinxRho, + BornOppenheimer, + PawPot, + ScfDiag, + Species, SphinxAtom, - SphinxMain, SphinxBasis, - SphinxWaves, - SphinxOutput, - SphinxKpoint, + SphinxElectrostaticPotential, SphinxElement, SphinxEvalForces, - SphinxStructure, - SphinxRicQN, - SphinxInternalInput, - Species, SphinxInitialGuess, + SphinxInput, + SphinxInputParameters, + SphinxInternalInput, + SphinxJob, + SphinxKpoint, + SphinxMain, + SphinxOutput, SphinxPawHamiltonian, SphinxPreConditioner, - SphinxElectrostaticPotential, - ScfDiag, - PawPot, - BornOppenheimer, + SphinxRho, + SphinxRicQN, + SphinxStructure, + SphinxWaves, ) from pyiron_dataclasses.v1.vasp import ( - VaspJob, + OutCar, + PotCar, VaspInput, + VaspJob, VaspOutput, VaspResources, - PotCar, - OutCar, ) -def convert_sphinx_job_dict(job_dict: dict) -> SphinxJob: +def get_dataclass(job_dict): + funct_dict = { + "": _convert_lammps_job_dict, + "": _convert_sphinx_job_dict, + "": _convert_vasp_job_dict, + } + return funct_dict[job_dict["TYPE"]](job_dict=job_dict) + + +def _convert_sphinx_job_dict(job_dict: dict) -> SphinxJob: ureg = UnitRegistry() - sphinx_input_parameter_dict = convert_datacontainer_to_dictionary( + sphinx_input_parameter_dict = _convert_datacontainer_to_dictionary( data_container_dict=job_dict["input"]["parameters"] ) - generic_input_dict = convert_generic_parameters_to_dictionary( + generic_input_dict = _convert_generic_parameters_to_dictionary( generic_parameter_dict=job_dict["input"]["generic"], ) - output_dict = convert_datacontainer_to_dictionary( + output_dict = _convert_datacontainer_to_dictionary( data_container_dict=job_dict["output"]["generic"] ) if "ricQN" in sphinx_input_parameter_dict["sphinx"]["main"]: sphinx_main = SphinxMain( ric_qn=SphinxRicQN( max_steps=int( - sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"][ - "maxSteps" - ] + sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"]["maxSteps"] ), max_step_length=float( sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"][ @@ -88,53 +95,39 @@ def convert_sphinx_job_dict(job_dict: dict) -> SphinxJob: born_oppenheimer=BornOppenheimer( scf_diag=ScfDiag( rho_mixing=float( - sphinx_input_parameter_dict["sphinx"]["main"][ - "ricQN" - ]["bornOppenheimer"]["scfDiag"]["rhoMixing"] + sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"][ + "bornOppenheimer" + ]["scfDiag"]["rhoMixing"] ), spin_mixing=float( - sphinx_input_parameter_dict["sphinx"]["main"][ - "ricQN" - ]["bornOppenheimer"]["scfDiag"]["spinMixing"] + sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"][ + "bornOppenheimer" + ]["scfDiag"]["spinMixing"] ), - delta_energy=sphinx_input_parameter_dict["sphinx"][ - "main" - ]["ricQN"]["bornOppenheimer"]["scfDiag"]["dEnergy"], - max_steps=sphinx_input_parameter_dict["sphinx"][ - "main" - ]["ricQN"]["bornOppenheimer"]["scfDiag"][ - "maxSteps" - ], + delta_energy=sphinx_input_parameter_dict["sphinx"]["main"][ + "ricQN" + ]["bornOppenheimer"]["scfDiag"]["dEnergy"], + max_steps=sphinx_input_parameter_dict["sphinx"]["main"][ + "ricQN" + ]["bornOppenheimer"]["scfDiag"]["maxSteps"], preconditioner=SphinxPreConditioner( - type=sphinx_input_parameter_dict["sphinx"][ - "main" - ]["ricQN"]["bornOppenheimer"]["scfDiag"][ - "preconditioner" - ][ - "type" - ], - scaling=sphinx_input_parameter_dict["sphinx"][ - "main" - ]["ricQN"]["bornOppenheimer"]["scfDiag"][ - "preconditioner" - ][ + type=sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"][ + "bornOppenheimer" + ]["scfDiag"]["preconditioner"]["type"], + scaling=sphinx_input_parameter_dict["sphinx"]["main"][ + "ricQN" + ]["bornOppenheimer"]["scfDiag"]["preconditioner"][ "scaling" ], - spin_scaling=sphinx_input_parameter_dict[ - "sphinx" - ]["main"]["ricQN"]["bornOppenheimer"][ - "scfDiag" - ][ - "preconditioner" - ][ + spin_scaling=sphinx_input_parameter_dict["sphinx"]["main"][ + "ricQN" + ]["bornOppenheimer"]["scfDiag"]["preconditioner"][ "spinScaling" ], ), - block_ccg=sphinx_input_parameter_dict["sphinx"][ - "main" - ]["ricQN"]["bornOppenheimer"]["scfDiag"][ - "blockCCG" - ], + block_ccg=sphinx_input_parameter_dict["sphinx"]["main"][ + "ricQN" + ]["bornOppenheimer"]["scfDiag"]["blockCCG"], ), ), ), @@ -144,32 +137,46 @@ def convert_sphinx_job_dict(job_dict: dict) -> SphinxJob: else: sphinx_main = SphinxMain( ric_qn=None, - eval_forces=SphinxEvalForces(file=sphinx_input_parameter_dict["sphinx"]["main"]["evalForces"]["file"]), + eval_forces=SphinxEvalForces( + file=sphinx_input_parameter_dict["sphinx"]["main"]["evalForces"]["file"] + ), scf_diag=ScfDiag( - rho_mixing=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["rhoMixing"], - spin_mixing=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["spinMixing"], - delta_energy=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["dEnergy"], - max_steps=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["maxSteps"], + rho_mixing=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0][ + "rhoMixing" + ], + spin_mixing=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0][ + "spinMixing" + ], + delta_energy=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][ + 0 + ]["dEnergy"], + max_steps=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0][ + "maxSteps" + ], preconditioner=SphinxPreConditioner( - type=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["preconditioner"]["type"], - scaling=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["preconditioner"]["scaling"], - spin_scaling=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["preconditioner"]["spinScaling"], - ), - block_ccg=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["blockCCG"], + type=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0][ + "preconditioner" + ]["type"], + scaling=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0][ + "preconditioner" + ]["scaling"], + spin_scaling=sphinx_input_parameter_dict["sphinx"]["main"][ + "scfDiag" + ][0]["preconditioner"]["spinScaling"], + ), + block_ccg=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0][ + "blockCCG" + ], ), ) return SphinxJob( executable=Executable( version=job_dict["executable"]["version"], name=job_dict["executable"]["name"], - operation_system_nt=job_dict["executable"][ - "operation_system_nt" - ], + operation_system_nt=job_dict["executable"]["operation_system_nt"], executable=job_dict["executable"]["executable"], mpi=job_dict["executable"]["mpi"], - accepted_return_codes=job_dict["executable"][ - "accepted_return_codes" - ], + accepted_return_codes=job_dict["executable"]["accepted_return_codes"], ), server=Server( user=job_dict["server"]["user"], @@ -461,9 +468,9 @@ def convert_sphinx_job_dict(job_dict: dict) -> SphinxJob: ) -def convert_lammps_job_dict(job_dict: dict) -> LammpsJob: +def _convert_lammps_job_dict(job_dict: dict) -> LammpsJob: ureg = UnitRegistry() - generic_input_dict = convert_generic_parameters_to_dictionary( + generic_input_dict = _convert_generic_parameters_to_dictionary( generic_parameter_dict=job_dict["input"]["generic"], ) return LammpsJob( @@ -541,10 +548,10 @@ def convert_lammps_job_dict(job_dict: dict) -> LammpsJob: species=job_dict["input"]["potential_inp"]["potential"]["Species"], ), input_files=LammpsInputFiles( - control_inp=convert_generic_parameters_to_string( + control_inp=_convert_generic_parameters_to_string( generic_parameter_dict=job_dict["input"]["control_inp"] ), - potential_inp=convert_generic_parameters_to_string( + potential_inp=_convert_generic_parameters_to_string( generic_parameter_dict=job_dict["input"]["potential_inp"] ), ), @@ -552,14 +559,10 @@ def convert_lammps_job_dict(job_dict: dict) -> LammpsJob: executable=Executable( version=job_dict["executable"]["version"], name=job_dict["executable"]["name"], - operation_system_nt=job_dict["executable"][ - "operation_system_nt" - ], + operation_system_nt=job_dict["executable"]["operation_system_nt"], executable=job_dict["executable"]["executable"], mpi=job_dict["executable"]["mpi"], - accepted_return_codes=job_dict["executable"][ - "accepted_return_codes" - ], + accepted_return_codes=job_dict["executable"]["accepted_return_codes"], ), server=Server( user=job_dict["server"]["user"], @@ -609,9 +612,9 @@ def convert_lammps_job_dict(job_dict: dict) -> LammpsJob: ) -def convert_vasp_job_dict(job_dict): +def _convert_vasp_job_dict(job_dict): ureg = UnitRegistry() - generic_input_dict = convert_generic_parameters_to_dictionary( + generic_input_dict = _convert_generic_parameters_to_dictionary( generic_parameter_dict=job_dict["input"]["generic"], ) return VaspJob( @@ -693,14 +696,14 @@ def convert_vasp_job_dict(job_dict): fix_spin_constraint=generic_input_dict.get("fix_spin_constraint", None), max_iter=generic_input_dict.get("max_iter", None), ), - incar=convert_generic_parameters_to_string( + incar=_convert_generic_parameters_to_string( generic_parameter_dict=job_dict["input"]["incar"] ), - kpoints=convert_generic_parameters_to_string( + kpoints=_convert_generic_parameters_to_string( generic_parameter_dict=job_dict["input"]["kpoints"] ), potcar=PotCar( - xc=convert_generic_parameters_to_dictionary( + xc=_convert_generic_parameters_to_dictionary( generic_parameter_dict=job_dict["input"]["potcar"] )["xc"] ), @@ -915,16 +918,7 @@ def convert_vasp_job_dict(job_dict): ) -def convert(job_dict): - funct_dict = { - "": convert_lammps_job_dict, - "": convert_sphinx_job_dict, - "": convert_vasp_job_dict, - } - return funct_dict[job_dict["TYPE"]](job_dict=job_dict) - - -def convert_generic_parameters_to_string(generic_parameter_dict: dict) -> str: +def _convert_generic_parameters_to_string(generic_parameter_dict: dict) -> str: output_str = "" for p, v in zip( generic_parameter_dict["data_dict"]["Parameter"], @@ -934,7 +928,7 @@ def convert_generic_parameters_to_string(generic_parameter_dict: dict) -> str: return output_str[:-1] -def convert_generic_parameters_to_dictionary(generic_parameter_dict: dict) -> dict: +def _convert_generic_parameters_to_dictionary(generic_parameter_dict: dict) -> dict: return { p: v for p, v in zip( @@ -976,7 +970,7 @@ def recursive_sort(input_value: dict) -> dict: else: ind_dict[int(ind)] = key content_dict[key] = recursive_sort(input_value=v) - elif k != "DICT_VERSION": + else: content_dict[k] = recursive_sort(input_value=v) if content_lst_flag: return [ind_dict[ind] for ind in sorted(list(ind_dict.keys()))] @@ -991,7 +985,7 @@ def recursive_sort(input_value: dict) -> dict: raise KeyError(ind_dict, content_dict) -def convert_datacontainer_to_dictionary(data_container_dict: dict) -> dict: +def _convert_datacontainer_to_dictionary(data_container_dict: dict) -> dict: return _sort_dictionary_from_datacontainer( input_dict=_filter_dict( input_dict=data_container_dict, @@ -999,6 +993,7 @@ def convert_datacontainer_to_dictionary(data_container_dict: dict) -> dict: "NAME", "TYPE", "OBJECT", + "DICT_VERSION", "HDF_VERSION", "READ_ONLY", "VERSION", diff --git a/pyiron_dataclasses/v1/dft.py b/pyiron_dataclasses/v1/dft.py index 1a3fbf1..002e410 100644 --- a/pyiron_dataclasses/v1/dft.py +++ b/pyiron_dataclasses/v1/dft.py @@ -1,6 +1,7 @@ from dataclasses import dataclass +from typing import List, Optional + import numpy as np -from typing import Optional, List @dataclass diff --git a/pyiron_dataclasses/v1/lammps.py b/pyiron_dataclasses/v1/lammps.py index cd61358..f9fb44c 100644 --- a/pyiron_dataclasses/v1/lammps.py +++ b/pyiron_dataclasses/v1/lammps.py @@ -1,6 +1,5 @@ from dataclasses import dataclass -from typing import Optional, List - +from typing import List, Optional from pyiron_dataclasses.v1.atomistic import ( GenericInput, diff --git a/pyiron_dataclasses/v1/sphinx.py b/pyiron_dataclasses/v1/sphinx.py index e061a29..3d46730 100644 --- a/pyiron_dataclasses/v1/sphinx.py +++ b/pyiron_dataclasses/v1/sphinx.py @@ -1,7 +1,7 @@ from dataclasses import dataclass -import numpy as np from typing import List, Optional +import numpy as np from pyiron_dataclasses.v1.atomistic import ( GenericInput, @@ -9,8 +9,8 @@ Structure, ) from pyiron_dataclasses.v1.dft import ( - ElectronicStructure, ChargeDensity, + ElectronicStructure, ) from pyiron_dataclasses.v1.job import ( Executable, diff --git a/pyiron_dataclasses/v1/vasp.py b/pyiron_dataclasses/v1/vasp.py index e6fb3ae..6c6cb0e 100644 --- a/pyiron_dataclasses/v1/vasp.py +++ b/pyiron_dataclasses/v1/vasp.py @@ -1,6 +1,6 @@ from dataclasses import dataclass -import numpy as np +import numpy as np from pyiron_dataclasses.v1.atomistic import ( GenericInput, @@ -8,8 +8,8 @@ Structure, ) from pyiron_dataclasses.v1.dft import ( - ElectronicStructure, ChargeDensity, + ElectronicStructure, ) from pyiron_dataclasses.v1.job import ( Executable, diff --git a/pyproject.toml b/pyproject.toml index e768e1f..da1a672 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,9 +4,9 @@ build-backend = "setuptools.build_meta" [project] name = "pyiron_dataclasses" -description = "Core components of the pyiron integrated development environment (IDE) for computational materials science" +description = "Dataclasses for the pyiron workflow framework" authors = [ - { name = "Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department", email = "pyiron@mpie.de" }, + { name = "Jan Janssen", email = "janssen@mpie.de" }, ] readme = "README.md" license = { file = "LICENSE" } diff --git a/tests/test_pyiron_atomistics_live.py b/tests/test_pyiron_atomistics_live.py index 57852ee..167ca48 100644 --- a/tests/test_pyiron_atomistics_live.py +++ b/tests/test_pyiron_atomistics_live.py @@ -1,15 +1,13 @@ import unittest -from h5io_browser.base import read_dict_from_hdf +from h5io_browser import read_dict_from_hdf from pint import UnitRegistry -from convert import ( - convert_sphinx_job_dict, - convert_lammps_job_dict, -) +from pyiron_dataclasses.v1.converter import get_dataclass try: from pyiron_atomistics import Project + skip_pyiron_atomistics_test = False except ImportError: skip_pyiron_atomistics_test = True @@ -24,7 +22,8 @@ def get_node_from_job_dict(job_dict, node): @unittest.skipIf( - skip_pyiron_atomistics_test, "pyiron_atomistics is not installed, so the pyiron_atomistics tests are skipped." + skip_pyiron_atomistics_test, + "pyiron_atomistics is not installed, so the pyiron_atomistics tests are skipped.", ) class TestPyironAtomisticsLive(unittest.TestCase): def setUp(self): @@ -43,10 +42,13 @@ def test_sphinx_calc_minimize(self): file_name=job.project_hdf5.file_name, h5_path="/", recursive=True, - slash='ignore', + slash="ignore", + ) + job_sphinx = get_dataclass(job_dict[job.job_name]) + self.assertEqual( + job_sphinx.calculation_output.generic.energy_tot[-1], + -228.78315943905295 * ureg.eV, ) - job_sphinx = convert_sphinx_job_dict(job_dict[job.job_name]) - self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.78315943905295 * ureg.eV) def test_sphinx_calc_static(self): ureg = UnitRegistry() @@ -57,52 +59,61 @@ def test_sphinx_calc_static(self): file_name=job.project_hdf5.file_name, h5_path="/", recursive=True, - slash='ignore', + slash="ignore", + ) + job_sphinx = get_dataclass(job_dict[job.job_name]) + self.assertEqual( + job_sphinx.calculation_output.generic.energy_tot[-1], + -228.78315953829286 * ureg.eV, ) - job_sphinx = convert_sphinx_job_dict(job_dict[job.job_name]) - self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.78315953829286 * ureg.eV) def test_lammps_calc_static(self): ureg = UnitRegistry() job = self.project.create.job.Lammps("lmp_static") job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) - job.potential = '2002--Mishin-Y--Ni-Al--LAMMPS--ipr1' + job.potential = "2002--Mishin-Y--Ni-Al--LAMMPS--ipr1" job.run() job_dict = read_dict_from_hdf( file_name=job.project_hdf5.file_name, h5_path="/", recursive=True, - slash='ignore', + slash="ignore", + ) + job_lammps = get_dataclass(job_dict[job.job_name]) + self.assertEqual( + job_lammps.calculation_output.generic.energy_tot[-1], + -13.4486826111902 * ureg.eV, ) - job_lammps = convert_lammps_job_dict(job_dict[job.job_name]) - self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -13.4486826111902 * ureg.eV) def test_lammps_calc_md(self): job = self.project.create.job.Lammps("lmp_md") job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) - job.potential = '2002--Mishin-Y--Ni-Al--LAMMPS--ipr1' + job.potential = "2002--Mishin-Y--Ni-Al--LAMMPS--ipr1" job.calc_md(temperature=200.0, n_ionic_steps=1000, n_print=100) job.run() job_dict = read_dict_from_hdf( file_name=job.project_hdf5.file_name, h5_path="/", recursive=True, - slash='ignore', + slash="ignore", ) - job_lammps = convert_lammps_job_dict(job_dict[job.job_name]) + job_lammps = get_dataclass(job_dict[job.job_name]) self.assertEqual(len(job_lammps.calculation_output.generic.energy_tot), 11) def test_lammps_calc_minimize(self): ureg = UnitRegistry() job = self.project.create.job.Lammps("lmp_mini") job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) - job.potential = '2002--Mishin-Y--Ni-Al--LAMMPS--ipr1' + job.potential = "2002--Mishin-Y--Ni-Al--LAMMPS--ipr1" job.run() job_dict = read_dict_from_hdf( file_name=job.project_hdf5.file_name, h5_path="/", recursive=True, - slash='ignore', + slash="ignore", + ) + job_lammps = get_dataclass(job_dict[job.job_name]) + self.assertEqual( + job_lammps.calculation_output.generic.energy_tot[-1], + -13.4486826111902 * ureg.eV, ) - job_lammps = convert_lammps_job_dict(job_dict[job.job_name]) - self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -13.4486826111902 * ureg.eV) diff --git a/tests/test_pyiron_atomistics_static.py b/tests/test_pyiron_atomistics_static.py index 8eb58dc..5876b67 100644 --- a/tests/test_pyiron_atomistics_static.py +++ b/tests/test_pyiron_atomistics_static.py @@ -1,54 +1,20 @@ import os from unittest import TestCase -from h5io_browser.base import read_dict_from_hdf +from h5io_browser import read_dict_from_hdf from pint import UnitRegistry -from convert import ( - convert, - convert_sphinx_job_dict, - convert_lammps_job_dict, - convert_vasp_job_dict, +from pyiron_dataclasses.v1.converter import ( + get_dataclass, ) class TestPyironAtomisticsStatic(TestCase): - def test_sphinx(self): - ureg = UnitRegistry() - job_dict = read_dict_from_hdf( - file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "sx.h5"), - h5_path="/sx", - recursive=True, - slash='ignore', - ) - job_sphinx = convert_sphinx_job_dict(job_dict=job_dict) - self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.7831594379917 * ureg.eV) - - def test_lammps(self): - ureg = UnitRegistry() - job_dict = read_dict_from_hdf( - file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "lmp.h5"), - h5_path="/lmp", - recursive=True, - slash='ignore', - ) - job_lammps = convert_lammps_job_dict(job_dict=job_dict) - self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -9428.45286561574 * ureg.eV) - - def test_vasp(self): - ureg = UnitRegistry() - job_dict = read_dict_from_hdf( - file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "vasp.h5"), - h5_path="/vasp", - recursive=True, - slash='ignore', - ) - job_vasp = convert_vasp_job_dict(job_dict=job_dict) - self.assertEqual(job_vasp.calculation_output.generic.energy_tot[-1], -14.7459202 * ureg.eV) - def test_all(self): ureg = UnitRegistry() - static_folder = os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13") + static_folder = os.path.join( + os.path.dirname(__file__), "pyiron_atomistics_0_6_13" + ) energy_dict = { "sx.h5": -228.7831594379917 * ureg.eV, "lmp.h5": -9428.45286561574 * ureg.eV, @@ -59,9 +25,11 @@ def test_all(self): file_name=os.path.join(static_folder, hdf5_file), h5_path="/", recursive=True, - slash='ignore', + slash="ignore", )[hdf5_file.split(".")[0]] self.assertEqual( - convert(job_dict=job_dict).calculation_output.generic.energy_tot[-1], - energy_dict[hdf5_file] + get_dataclass(job_dict=job_dict).calculation_output.generic.energy_tot[ + -1 + ], + energy_dict[hdf5_file], )