diff --git a/.ci_support/environment-mini.yml b/.ci_support/environment-mini.yml new file mode 100644 index 0000000..d10a1a1 --- /dev/null +++ b/.ci_support/environment-mini.yml @@ -0,0 +1,5 @@ +channels: +- conda-forge +dependencies: +- h5io_browser =0.1.2 +- pint =0.24.3 diff --git a/.ci_support/environment-pyiron-atomistics-0-6-12.yml b/.ci_support/environment-pyiron-atomistics-0-6-12.yml new file mode 100644 index 0000000..a146cd3 --- /dev/null +++ b/.ci_support/environment-pyiron-atomistics-0-6-12.yml @@ -0,0 +1,9 @@ +channels: +- conda-forge +dependencies: +- jupyter +- papermill +- pyiron-data =0.0.30 +- pyiron_atomistics =0.6.12 +- lammps =2024.02.07=*_openmpi_* +- sphinxdft =3.1 diff --git a/.ci_support/environment-pyiron-atomistics-0-6-13.yml b/.ci_support/environment-pyiron-atomistics-0-6-13.yml new file mode 100644 index 0000000..06fa42c --- /dev/null +++ b/.ci_support/environment-pyiron-atomistics-0-6-13.yml @@ -0,0 +1,9 @@ +channels: +- conda-forge +dependencies: +- jupyter +- papermill +- pyiron-data =0.0.30 +- pyiron_atomistics =0.6.13 +- lammps =2024.02.07=*_openmpi_* +- sphinxdft =3.1 diff --git a/.github/workflows/minimal.yml b/.github/workflows/minimal.yml new file mode 100644 index 0000000..2700a8c --- /dev/null +++ b/.github/workflows/minimal.yml @@ -0,0 +1,29 @@ +name: Minimal pyiron_atomistics 0.6.13 + +on: + push: + branches: [ main ] + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Conda config + shell: bash -l {0} + run: echo -e "channels:\n - conda-forge\n" > .condarc + - name: Setup Mambaforge + uses: conda-incubator/setup-miniconda@v3 + with: + python-version: "3.12" + miniforge-version: latest + condarc-file: .condarc + environment-file: .ci_support/environment-mini.yml + - name: Test + shell: bash -l {0} + timeout-minutes: 5 + run: | + pip install versioneer[toml]==0.29 + pip install . --no-deps --no-build-isolation + python -m unittest discover tests diff --git a/.github/workflows/pyiron_atomistics_tests_0_6_12.yml b/.github/workflows/pyiron_atomistics_tests_0_6_12.yml new file mode 100644 index 0000000..382f25e --- /dev/null +++ b/.github/workflows/pyiron_atomistics_tests_0_6_12.yml @@ -0,0 +1,29 @@ +name: Tests for pyiron_atomistics 0.6.12 + +on: + push: + branches: [ main ] + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Conda config + shell: bash -l {0} + run: echo -e "channels:\n - conda-forge\n" > .condarc + - name: Setup Mambaforge + uses: conda-incubator/setup-miniconda@v3 + with: + python-version: "3.12" + miniforge-version: latest + condarc-file: .condarc + environment-file: .ci_support/environment-pyiron-atomistics-0-6-12.yml + - name: Test + shell: bash -l {0} + timeout-minutes: 5 + run: | + pip install versioneer[toml]==0.29 + pip install . --no-deps --no-build-isolation + python -m unittest discover tests diff --git a/.github/workflows/pyiron_atomistics_tests_0_6_13.yml b/.github/workflows/pyiron_atomistics_tests_0_6_13.yml new file mode 100644 index 0000000..cebb678 --- /dev/null +++ b/.github/workflows/pyiron_atomistics_tests_0_6_13.yml @@ -0,0 +1,29 @@ +name: Tests for pyiron_atomistics 0.6.13 + +on: + push: + branches: [ main ] + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Conda config + shell: bash -l {0} + run: echo -e "channels:\n - conda-forge\n" > .condarc + - name: Setup Mambaforge + uses: conda-incubator/setup-miniconda@v3 + with: + python-version: "3.12" + miniforge-version: latest + condarc-file: .condarc + environment-file: .ci_support/environment-pyiron-atomistics-0-6-13.yml + - name: Test + shell: bash -l {0} + timeout-minutes: 5 + run: | + pip install versioneer[toml]==0.29 + pip install . --no-deps --no-build-isolation + python -m unittest discover tests diff --git a/pyiron_dataclasses/__init__.py b/pyiron_dataclasses/__init__.py index e69de29..601103e 100644 --- a/pyiron_dataclasses/__init__.py +++ b/pyiron_dataclasses/__init__.py @@ -0,0 +1,5 @@ +# Internal init +from ._version import get_versions + +# Set version of pyiron_base +__version__ = get_versions()["version"] diff --git a/pyiron_dataclasses/_version.py b/pyiron_dataclasses/_version.py new file mode 100644 index 0000000..6ad06dd --- /dev/null +++ b/pyiron_dataclasses/_version.py @@ -0,0 +1,716 @@ +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. +# Generated by versioneer-0.29 +# https://github.com/python-versioneer/python-versioneer + +"""Git implementation of _version.py.""" + +import errno +import functools +import os +import re +import subprocess +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple + + +def get_keywords() -> Dict[str, str]: + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + VCS: str + style: str + tag_prefix: str + parentdir_prefix: str + versionfile_source: str + verbose: bool + + +def get_config() -> VersioneerConfig: + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440-pre" + cfg.tag_prefix = "pyiron_dataclasses-" + cfg.parentdir_prefix = "pyiron_dataclasses" + cfg.versionfile_source = "pyiron_dataclasses/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator + """Create decorator to mark a method as the handler of a VCS.""" + + def decorate(f: Callable) -> Callable: + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + + return decorate + + +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs: Dict[str, Any] = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + **popen_kwargs, + ) + break + except OSError as e: + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords: Dict[str, str] = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r"\d", r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs( + tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command +) -> Dict[str, Any]: + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + f"{tag_prefix}[[:digit:]]*", + ], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces: Dict[str, Any] = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces: Dict[str, Any]) -> str: + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces: Dict[str, Any]) -> str: + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces: Dict[str, Any]) -> str: + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces: Dict[str, Any]) -> str: + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces: Dict[str, Any]) -> str: + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces: Dict[str, Any]) -> str: + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +def get_versions() -> Dict[str, Any]: + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split("/"): + root = os.path.dirname(root) + except NameError: + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/pyiron_dataclasses/v1/calculation.py b/pyiron_dataclasses/v1/calculation.py deleted file mode 100644 index 17dc75f..0000000 --- a/pyiron_dataclasses/v1/calculation.py +++ /dev/null @@ -1,31 +0,0 @@ -from dataclasses import dataclass -from typing import List, Optional, Union - - -@dataclass -class CalculateMolecularDynamics: - temperature: float - pressure: Optional[Union[float, List[float]]] - n_ionic_steps: int - time_step: float - n_print: int - temperature_damping_timescale: float - pressure_damping_timescale: float - seed: Optional[int] - tloop: Optional[float] - initial_temperature: Optional[float] - langevin: bool - delta_temp: Optional[float] - delta_press: Optional[float] - - -@dataclass -class CalculateMinimize: - ionic_energy_tolerance: float - ionic_force_tolerance: float - e_tol: Optional[float] - f_tol: Optional[float] - max_iter: int - pressure: Optional[Union[float, List[float]]] - n_print: int - style: str diff --git a/pyiron_dataclasses/v1/sphinx.py b/pyiron_dataclasses/v1/sphinx.py index 8491f8d..e061a29 100644 --- a/pyiron_dataclasses/v1/sphinx.py +++ b/pyiron_dataclasses/v1/sphinx.py @@ -1,6 +1,6 @@ from dataclasses import dataclass import numpy as np -from typing import List +from typing import List, Optional from pyiron_dataclasses.v1.atomistic import ( @@ -123,9 +123,16 @@ class SphinxRicQN: born_oppenheimer: BornOppenheimer +@dataclass +class SphinxEvalForces: + file: str + + @dataclass class SphinxMain: - ric_qn: SphinxRicQN + ric_qn: Optional[SphinxRicQN] + eval_forces: Optional[SphinxEvalForces] + scf_diag: Optional[ScfDiag] @dataclass diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/convert.py b/tests/convert.py new file mode 100644 index 0000000..6dad9ba --- /dev/null +++ b/tests/convert.py @@ -0,0 +1,1007 @@ +from pint import UnitRegistry + +from pyiron_dataclasses.v1.job import ( + Executable, + Interactive, + GenericDict, + Server, +) +from pyiron_dataclasses.v1.atomistic import ( + GenericInput, + GenericOutput, + Structure, + Units, + Cell, +) +from pyiron_dataclasses.v1.dft import ( + OutputGenericDFT, + ElectronicStructure, + DensityOfStates, + ChargeDensity, +) +from pyiron_dataclasses.v1.lammps import ( + LammpsJob, + LammpsInput, + LammpsOutput, + LammpsPotential, + LammpsInputFiles, +) +from pyiron_dataclasses.v1.sphinx import ( + SphinxJob, + SphinxInput, + SphinxInputParameters, + SphinxRho, + SphinxAtom, + SphinxMain, + SphinxBasis, + SphinxWaves, + SphinxOutput, + SphinxKpoint, + SphinxElement, + SphinxEvalForces, + SphinxStructure, + SphinxRicQN, + SphinxInternalInput, + Species, + SphinxInitialGuess, + SphinxPawHamiltonian, + SphinxPreConditioner, + SphinxElectrostaticPotential, + ScfDiag, + PawPot, + BornOppenheimer, +) +from pyiron_dataclasses.v1.vasp import ( + VaspJob, + VaspInput, + VaspOutput, + VaspResources, + PotCar, + OutCar, +) + + +def convert_sphinx_job_dict(job_dict: dict) -> SphinxJob: + ureg = UnitRegistry() + sphinx_input_parameter_dict = convert_datacontainer_to_dictionary( + data_container_dict=job_dict["input"]["parameters"] + ) + generic_input_dict = convert_generic_parameters_to_dictionary( + generic_parameter_dict=job_dict["input"]["generic"], + ) + output_dict = convert_datacontainer_to_dictionary( + data_container_dict=job_dict["output"]["generic"] + ) + if "ricQN" in sphinx_input_parameter_dict["sphinx"]["main"]: + sphinx_main = SphinxMain( + ric_qn=SphinxRicQN( + max_steps=int( + sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"][ + "maxSteps" + ] + ), + max_step_length=float( + sphinx_input_parameter_dict["sphinx"]["main"]["ricQN"][ + "maxStepLength" + ] + ), + born_oppenheimer=BornOppenheimer( + scf_diag=ScfDiag( + rho_mixing=float( + sphinx_input_parameter_dict["sphinx"]["main"][ + "ricQN" + ]["bornOppenheimer"]["scfDiag"]["rhoMixing"] + ), + spin_mixing=float( + sphinx_input_parameter_dict["sphinx"]["main"][ + "ricQN" + ]["bornOppenheimer"]["scfDiag"]["spinMixing"] + ), + delta_energy=sphinx_input_parameter_dict["sphinx"][ + "main" + ]["ricQN"]["bornOppenheimer"]["scfDiag"]["dEnergy"], + max_steps=sphinx_input_parameter_dict["sphinx"][ + "main" + ]["ricQN"]["bornOppenheimer"]["scfDiag"][ + "maxSteps" + ], + preconditioner=SphinxPreConditioner( + type=sphinx_input_parameter_dict["sphinx"][ + "main" + ]["ricQN"]["bornOppenheimer"]["scfDiag"][ + "preconditioner" + ][ + "type" + ], + scaling=sphinx_input_parameter_dict["sphinx"][ + "main" + ]["ricQN"]["bornOppenheimer"]["scfDiag"][ + "preconditioner" + ][ + "scaling" + ], + spin_scaling=sphinx_input_parameter_dict[ + "sphinx" + ]["main"]["ricQN"]["bornOppenheimer"][ + "scfDiag" + ][ + "preconditioner" + ][ + "spinScaling" + ], + ), + block_ccg=sphinx_input_parameter_dict["sphinx"][ + "main" + ]["ricQN"]["bornOppenheimer"]["scfDiag"][ + "blockCCG" + ], + ), + ), + ), + eval_forces=None, + scf_diag=None, + ) + else: + sphinx_main = SphinxMain( + ric_qn=None, + eval_forces=SphinxEvalForces(file=sphinx_input_parameter_dict["sphinx"]["main"]["evalForces"]["file"]), + scf_diag=ScfDiag( + rho_mixing=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["rhoMixing"], + spin_mixing=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["spinMixing"], + delta_energy=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["dEnergy"], + max_steps=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["maxSteps"], + preconditioner=SphinxPreConditioner( + type=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["preconditioner"]["type"], + scaling=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["preconditioner"]["scaling"], + spin_scaling=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["preconditioner"]["spinScaling"], + ), + block_ccg=sphinx_input_parameter_dict["sphinx"]["main"]["scfDiag"][0]["blockCCG"], + ), + ) + return SphinxJob( + executable=Executable( + version=job_dict["executable"]["version"], + name=job_dict["executable"]["name"], + operation_system_nt=job_dict["executable"][ + "operation_system_nt" + ], + executable=job_dict["executable"]["executable"], + mpi=job_dict["executable"]["mpi"], + accepted_return_codes=job_dict["executable"][ + "accepted_return_codes" + ], + ), + server=Server( + user=job_dict["server"]["user"], + host=job_dict["server"]["host"], + run_mode=job_dict["server"]["run_mode"], + queue=job_dict["server"]["queue"], + qid=job_dict["server"]["qid"], + cores=job_dict["server"]["cores"], + threads=job_dict["server"]["threads"], + new_hdf=job_dict["server"]["new_hdf"], + run_time=job_dict["server"]["run_time"], + memory_limit=job_dict["server"]["memory_limit"], + accept_crash=job_dict["server"]["accept_crash"], + ), + calculation_input=SphinxInput( + generic_dict=GenericDict( + restart_file_list=job_dict["input"]["generic_dict"][ + "restart_file_list" + ], + restart_file_dict=job_dict["input"]["generic_dict"][ + "restart_file_dict" + ], + exclude_nodes_hdf=job_dict["input"]["generic_dict"][ + "exclude_nodes_hdf" + ], + exclude_groups_hdf=job_dict["input"]["generic_dict"][ + "exclude_groups_hdf" + ], + ), + interactive=Interactive( + interactive_flush_frequency=job_dict["input"]["interactive"][ + "interactive_flush_frequency" + ], + interactive_write_frequency=job_dict["input"]["interactive"][ + "interactive_flush_frequency" + ], + ), + generic=GenericInput( + calc_mode=generic_input_dict["calc_mode"], + structure=generic_input_dict["structure"], + fix_symmetry=generic_input_dict.get("fix_symmetry", None), + k_mesh_spacing=generic_input_dict.get("k_mesh_spacing", None), + k_mesh_center_shift=generic_input_dict.get("k_mesh_center_shift", None), + reduce_kpoint_symmetry=generic_input_dict.get( + "reduce_kpoint_symmetry", None + ), + restart_for_band_structure=generic_input_dict.get( + "restart_for_band_structure", None + ), + path_name=generic_input_dict.get("path_name", None), + n_path=generic_input_dict.get("n_path", None), + fix_spin_constraint=generic_input_dict.get("fix_spin_constraint", None), + max_iter=generic_input_dict.get("max_iter", None), + temperature=generic_input_dict.get("temperature", None), + n_ionic_steps=generic_input_dict.get("n_ionic_steps", None), + n_print=generic_input_dict.get("n_print", None), + temperature_damping_timescale=generic_input_dict.get( + "temperature_damping_timescale", None + ), + pressure_damping_timescale=generic_input_dict.get( + "pressure_damping_timescale", None + ), + time_step=generic_input_dict.get("time_step", None), + ), + parameters=SphinxInputParameters( + sphinx=SphinxInternalInput( + paw_pot=PawPot( + species=[ + Species( + name=s["name"], + pot_type=s["potType"], + element=s["element"], + potential=s["potential"], + ) + for s in sphinx_input_parameter_dict["sphinx"]["pawPot"][ + "species" + ] + ] + ), + structure=SphinxStructure( + cell=sphinx_input_parameter_dict["sphinx"]["structure"]["cell"], + species=[ + SphinxElement( + element=s["element"], + atom=[ + SphinxAtom( + label=a["label"], + coords=a["coords"], + movable=bool(a["movable"]), + ) + for a in s["atom"] + ], + ) + for s in sphinx_input_parameter_dict["sphinx"]["structure"][ + "species" + ] + ], + ), + basis=SphinxBasis( + e_cut=sphinx_input_parameter_dict["sphinx"]["basis"]["eCut"], + k_point=SphinxKpoint( + coords=sphinx_input_parameter_dict["sphinx"]["basis"][ + "kPoint" + ]["coords"], + weight=sphinx_input_parameter_dict["sphinx"]["basis"][ + "kPoint" + ]["weight"], + relative=bool( + sphinx_input_parameter_dict["sphinx"]["basis"][ + "kPoint" + ]["relative"] + ), + ), + folding=sphinx_input_parameter_dict["sphinx"]["basis"][ + "folding" + ], + save_memory=bool( + sphinx_input_parameter_dict["sphinx"]["basis"]["saveMemory"] + ), + ), + paw_hamilton=SphinxPawHamiltonian( + number_empty_states=sphinx_input_parameter_dict["sphinx"][ + "PAWHamiltonian" + ]["nEmptyStates"], + ekt=sphinx_input_parameter_dict["sphinx"]["PAWHamiltonian"][ + "ekt" + ], + methfessel_paxton=bool( + sphinx_input_parameter_dict["sphinx"]["PAWHamiltonian"][ + "MethfesselPaxton" + ] + ), + xc=sphinx_input_parameter_dict["sphinx"]["PAWHamiltonian"][ + "xc" + ], + spin_polarized=sphinx_input_parameter_dict["sphinx"][ + "PAWHamiltonian" + ]["spinPolarized"], + ), + initial_guess=SphinxInitialGuess( + waves=SphinxWaves( + paw_basis=sphinx_input_parameter_dict["sphinx"][ + "initialGuess" + ]["waves"]["pawBasis"], + lcao=sphinx_input_parameter_dict["sphinx"]["initialGuess"][ + "waves" + ]["lcao"], + ), + rho=SphinxRho( + atomic_orbitals=sphinx_input_parameter_dict["sphinx"][ + "initialGuess" + ]["rho"]["atomicOrbitals"] + ), + no_waves_storage=bool( + sphinx_input_parameter_dict["sphinx"]["initialGuess"][ + "noWavesStorage" + ] + ), + ), + main=sphinx_main, + ), + encut=float(sphinx_input_parameter_dict["EnCut"]), + kpointcoords=sphinx_input_parameter_dict["KpointCoords"], + kpointfolding=sphinx_input_parameter_dict["KpointFolding"], + empty_states=int(sphinx_input_parameter_dict["EmptyStates"]), + methfessel_paxton=bool(sphinx_input_parameter_dict["MethfesselPaxton"]), + sigma=float(sphinx_input_parameter_dict["Sigma"]), + xcorr=sphinx_input_parameter_dict["Xcorr"], + vasppot=bool(sphinx_input_parameter_dict["VaspPot"]), + e_step=int(sphinx_input_parameter_dict["Estep"]), + ediff=float(sphinx_input_parameter_dict["Ediff"]), + write_waves=bool(sphinx_input_parameter_dict["WriteWaves"]), + kj_xc=bool(sphinx_input_parameter_dict["KJxc"]), + save_memory=bool(sphinx_input_parameter_dict["SaveMemory"]), + rho_mixing=float(sphinx_input_parameter_dict["rhoMixing"]), + spin_mixing=float(sphinx_input_parameter_dict["spinMixing"]), + rho_residual_scaling=float( + sphinx_input_parameter_dict["rhoResidualScaling"] + ), + spin_residual_scaling=float( + sphinx_input_parameter_dict["spinResidualScaling"] + ), + check_overlap=bool(sphinx_input_parameter_dict["CheckOverlap"]), + threads=bool(sphinx_input_parameter_dict["THREADS"]), + use_on_the_fly_cg_optimization=bool( + sphinx_input_parameter_dict["use_on_the_fly_cg_optimization"] + ), + ionic_step=int(sphinx_input_parameter_dict.get("Istep", 0)), + ), + structure=Structure( + dimension=job_dict["input"]["structure"]["dimension"], + indices=job_dict["input"]["structure"]["indices"], + info=job_dict["input"]["structure"]["info"], + positions=job_dict["input"]["structure"]["positions"], + species=job_dict["input"]["structure"]["species"], + cell=Cell( + cell=job_dict["input"]["structure"]["cell"]["cell"], + pbc=job_dict["input"]["structure"]["cell"]["pbc"], + ), + units=Units( + length=job_dict["input"]["structure"]["units"]["length"], + mass=job_dict["input"]["structure"]["units"]["mass"], + ), + ), + ), + calculation_output=SphinxOutput( + charge_density=ChargeDensity( + total=job_dict["output"]["charge_density"]["total"] + ), + electronic_structure=ElectronicStructure( + efermi=job_dict["output"]["electronic_structure"]["efermi"], + eig_matrix=job_dict["output"]["electronic_structure"]["eig_matrix"], + k_points=job_dict["output"]["electronic_structure"]["k_points"], + k_weights=job_dict["output"]["electronic_structure"]["k_weights"], + occ_matrix=job_dict["output"]["electronic_structure"]["occ_matrix"], + dos=DensityOfStates( + energies=job_dict["output"]["electronic_structure"]["dos"][ + "energies" + ], + int_densities=job_dict["output"]["electronic_structure"]["dos"][ + "int_densities" + ], + tot_densities=job_dict["output"]["electronic_structure"]["dos"][ + "tot_densities" + ], + ), + ), + electrostatic_potential=SphinxElectrostaticPotential( + total=job_dict["output"]["electrostatic_potential"]["total"] + ), + generic=GenericOutput( + cells=output_dict["cells"] * ureg.angstrom, + energy_pot=output_dict["energy_pot"] * ureg.eV, + energy_tot=output_dict["energy_tot"] * ureg.eV, + forces=output_dict["forces"] * ureg.eV / ureg.angstrom, + indices=output_dict.get("indices", None), + natoms=output_dict.get("natoms", None), + positions=output_dict["positions"] * ureg.angstrom, + pressures=output_dict.get("pressures", None), + steps=output_dict.get("steps", None), + temperature=output_dict.get("temperature", None), + unwrapped_positions=output_dict.get("unwarapped_positions", None), + velocities=output_dict.get("velocities", None), + volume=output_dict["volume"] + * ureg.angstrom + * ureg.angstrom + * ureg.angstrom, + stresses=output_dict.get("stresses", None), + elastic_constants=output_dict.get("elastic_constants", None), + dft=OutputGenericDFT( + energy_free=output_dict["dft"]["energy_free"] * ureg.eV, + n_valence=output_dict["dft"]["n_valence"], + bands_k_weights=output_dict["dft"]["bands_k_weights"], + kpoints_cartesian=output_dict["dft"]["kpoints_cartesian"], + bands_e_fermi=output_dict["dft"]["bands_e_fermi"], + bands_occ=output_dict["dft"]["bands_occ"], + bands_eigen_values=output_dict["dft"]["bands_eigen_values"], + scf_convergence=output_dict["dft"]["scf_convergence"], + scf_energy_int=output_dict["dft"]["scf_energy_int"] * ureg.eV, + scf_energy_free=output_dict["dft"]["scf_energy_free"] * ureg.eV, + scf_computation_time=output_dict["dft"]["scf_computation_time"], + scf_energy_zero=output_dict["dft"]["scf_energy_zero"], + scf_energy_band=output_dict["dft"]["scf_energy_band"], + scf_electronic_entropy=output_dict["dft"]["scf_electronic_entropy"], + scf_residue=output_dict["dft"]["scf_residue"], + energy_int=output_dict["dft"]["energy_int"], + computation_time=output_dict["dft"]["computation_time"], + energy_zero=output_dict["dft"]["energy_zero"] * ureg.eV, + energy_band=output_dict["dft"]["energy_band"] * ureg.eV, + electronic_entropy=output_dict["dft"]["electronic_entropy"], + residue=output_dict["dft"]["residue"], + cbm_list=output_dict["dft"].get("cbm_list", None), + e_fermi_list=output_dict["dft"].get("e_fermi_list", None), + final_magmoms=output_dict["dft"].get("final_magmoms", None), + magnetization=output_dict["dft"].get("magnetization", None), + n_elect=output_dict["dft"].get("n_elect", None), + potentiostat_output=output_dict["dft"].get( + "potentiostat_output", None + ), + scf_dipole_mom=output_dict["dft"].get("scf_dipole_mom", None), + valence_charges=output_dict["dft"].get("valence_charges", None), + vbm_list=output_dict["dft"].get("vbm_list", None), + bands=output_dict["dft"].get("bands", None), + ), + ), + ), + job_id=job_dict["job_id"], + status=job_dict["status"], + ) + + +def convert_lammps_job_dict(job_dict: dict) -> LammpsJob: + ureg = UnitRegistry() + generic_input_dict = convert_generic_parameters_to_dictionary( + generic_parameter_dict=job_dict["input"]["generic"], + ) + return LammpsJob( + calculation_input=LammpsInput( + generic_dict=GenericDict( + restart_file_list=job_dict["input"]["generic_dict"][ + "restart_file_list" + ], + restart_file_dict=job_dict["input"]["generic_dict"][ + "restart_file_dict" + ], + exclude_nodes_hdf=job_dict["input"]["generic_dict"][ + "exclude_nodes_hdf" + ], + exclude_groups_hdf=job_dict["input"]["generic_dict"][ + "exclude_groups_hdf" + ], + ), + interactive=Interactive( + interactive_flush_frequency=job_dict["input"]["interactive"][ + "interactive_flush_frequency" + ], + interactive_write_frequency=job_dict["input"]["interactive"][ + "interactive_flush_frequency" + ], + ), + generic=GenericInput( + calc_mode=generic_input_dict["calc_mode"], + structure=generic_input_dict["structure"], + temperature=generic_input_dict.get("temperature", None), + n_ionic_steps=generic_input_dict.get("n_ionic_steps", None), + n_print=generic_input_dict.get("n_print", None), + temperature_damping_timescale=generic_input_dict.get( + "temperature_damping_timescale", None + ), + pressure_damping_timescale=generic_input_dict.get( + "pressure_damping_timescale", None + ), + time_step=generic_input_dict.get("time_step", None), + fix_symmetry=generic_input_dict.get("fix_symmetry", None), + k_mesh_spacing=generic_input_dict.get("k_mesh_spacing", None), + k_mesh_center_shift=generic_input_dict.get("k_mesh_center_shift", None), + reduce_kpoint_symmetry=generic_input_dict.get( + "reduce_kpoint_symmetry", None + ), + restart_for_band_structure=generic_input_dict.get( + "restart_for_band_structure", None + ), + path_name=generic_input_dict.get("path_name", None), + n_path=generic_input_dict.get("n_path", None), + fix_spin_constraint=generic_input_dict.get("fix_spin_constraint", None), + max_iter=generic_input_dict.get("max_iter", None), + ), + structure=Structure( + dimension=job_dict["input"]["structure"]["dimension"], + indices=job_dict["input"]["structure"]["indices"], + info=job_dict["input"]["structure"]["info"], + positions=job_dict["input"]["structure"]["positions"], + species=job_dict["input"]["structure"]["species"], + cell=Cell( + cell=job_dict["input"]["structure"]["cell"]["cell"], + pbc=job_dict["input"]["structure"]["cell"]["pbc"], + ), + units=Units( + length=job_dict["input"]["structure"]["units"]["length"], + mass=job_dict["input"]["structure"]["units"]["mass"], + ), + ), + potential=LammpsPotential( + citation=job_dict["input"]["potential_inp"]["potential"]["Citations"], + config=job_dict["input"]["potential_inp"]["potential"]["Config"], + filename=job_dict["input"]["potential_inp"]["potential"]["Filename"], + model=job_dict["input"]["potential_inp"]["potential"]["Model"], + name=job_dict["input"]["potential_inp"]["potential"]["Name"], + species=job_dict["input"]["potential_inp"]["potential"]["Species"], + ), + input_files=LammpsInputFiles( + control_inp=convert_generic_parameters_to_string( + generic_parameter_dict=job_dict["input"]["control_inp"] + ), + potential_inp=convert_generic_parameters_to_string( + generic_parameter_dict=job_dict["input"]["potential_inp"] + ), + ), + ), + executable=Executable( + version=job_dict["executable"]["version"], + name=job_dict["executable"]["name"], + operation_system_nt=job_dict["executable"][ + "operation_system_nt" + ], + executable=job_dict["executable"]["executable"], + mpi=job_dict["executable"]["mpi"], + accepted_return_codes=job_dict["executable"][ + "accepted_return_codes" + ], + ), + server=Server( + user=job_dict["server"]["user"], + host=job_dict["server"]["host"], + run_mode=job_dict["server"]["run_mode"], + queue=job_dict["server"]["queue"], + qid=job_dict["server"]["qid"], + cores=job_dict["server"]["cores"], + threads=job_dict["server"]["threads"], + new_hdf=job_dict["server"]["new_hdf"], + run_time=job_dict["server"]["run_time"], + memory_limit=job_dict["server"]["memory_limit"], + accept_crash=job_dict["server"]["accept_crash"], + ), + calculation_output=LammpsOutput( + generic=GenericOutput( + cells=job_dict["output"]["generic"]["cells"] * ureg.angstrom, + energy_pot=job_dict["output"]["generic"]["energy_pot"] * ureg.eV, + energy_tot=job_dict["output"]["generic"]["energy_tot"] * ureg.eV, + forces=job_dict["output"]["generic"]["forces"] + * ureg.eV + / ureg.angstrom, + indices=job_dict["output"]["generic"]["indices"], + natoms=job_dict["output"]["generic"]["natoms"], + positions=job_dict["output"]["generic"]["positions"] * ureg.angstrom, + pressures=job_dict["output"]["generic"]["pressures"] * ureg.GPa, + steps=job_dict["output"]["generic"]["steps"], + temperature=job_dict["output"]["generic"]["temperature"] * ureg.kelvin, + unwrapped_positions=job_dict["output"]["generic"]["unwrapped_positions"] + * ureg.angstrom, + velocities=job_dict["output"]["generic"]["velocities"] + * ureg.angstrom + / ureg.fs, + volume=job_dict["output"]["generic"]["volume"] + * ureg.angstrom + * ureg.angstrom + * ureg.angstrom, + dft=None, + stresses=job_dict["output"]["generic"].get("stresses", None), + elastic_constants=job_dict["output"]["generic"].get( + "elastic_constants", None + ), + ), + ), + job_id=job_dict["job_id"], + status=job_dict["status"], + ) + + +def convert_vasp_job_dict(job_dict): + ureg = UnitRegistry() + generic_input_dict = convert_generic_parameters_to_dictionary( + generic_parameter_dict=job_dict["input"]["generic"], + ) + return VaspJob( + executable=Executable( + version=job_dict["executable"]["executable"]["version"], + name=job_dict["executable"]["executable"]["name"], + operation_system_nt=job_dict["executable"]["executable"][ + "operation_system_nt" + ], + executable=job_dict["executable"]["executable"]["executable"], + mpi=job_dict["executable"]["executable"]["mpi"], + accepted_return_codes=job_dict["executable"]["executable"][ + "accepted_return_codes" + ], + ), + job_id=job_dict["job_id"], + server=Server( + user=job_dict["server"]["user"], + host=job_dict["server"]["host"], + run_mode=job_dict["server"]["run_mode"], + queue=job_dict["server"]["queue"], + qid=job_dict["server"]["qid"], + cores=job_dict["server"]["cores"], + threads=job_dict["server"]["threads"], + new_hdf=job_dict["server"]["new_h5"], + run_time=job_dict["server"]["run_time"], + memory_limit=job_dict["server"]["memory_limit"], + accept_crash=job_dict["server"]["accept_crash"], + ), + status=job_dict["status"], + calculation_input=VaspInput( + generic_dict=GenericDict( + restart_file_list=job_dict["input"]["generic_dict"][ + "restart_file_list" + ], + restart_file_dict=job_dict["input"]["generic_dict"][ + "restart_file_dict" + ], + exclude_nodes_hdf=job_dict["input"]["generic_dict"][ + "exclude_nodes_hdf" + ], + exclude_groups_hdf=job_dict["input"]["generic_dict"][ + "exclude_groups_hdf" + ], + ), + interactive=Interactive( + interactive_flush_frequency=job_dict["input"]["interactive"][ + "interactive_flush_frequency" + ], + interactive_write_frequency=job_dict["input"]["interactive"][ + "interactive_flush_frequency" + ], + ), + potential_dict=job_dict["input"]["potential_dict"], + generic=GenericInput( + calc_mode=generic_input_dict["calc_mode"], + structure=generic_input_dict["structure"], + temperature=generic_input_dict.get("temperature", None), + n_ionic_steps=generic_input_dict.get("n_ionic_steps", None), + n_print=generic_input_dict.get("n_print", None), + temperature_damping_timescale=generic_input_dict.get( + "temperature_damping_timescale", None + ), + pressure_damping_timescale=generic_input_dict.get( + "pressure_damping_timescale", None + ), + time_step=generic_input_dict.get("time_step", None), + fix_symmetry=generic_input_dict.get("fix_symmetry", None), + k_mesh_spacing=generic_input_dict.get("k_mesh_spacing", None), + k_mesh_center_shift=generic_input_dict.get("k_mesh_center_shift", None), + reduce_kpoint_symmetry=generic_input_dict.get( + "reduce_kpoint_symmetry", None + ), + restart_for_band_structure=generic_input_dict.get( + "restart_for_band_structure", None + ), + path_name=generic_input_dict.get("path_name", None), + n_path=generic_input_dict.get("n_path", None), + fix_spin_constraint=generic_input_dict.get("fix_spin_constraint", None), + max_iter=generic_input_dict.get("max_iter", None), + ), + incar=convert_generic_parameters_to_string( + generic_parameter_dict=job_dict["input"]["incar"] + ), + kpoints=convert_generic_parameters_to_string( + generic_parameter_dict=job_dict["input"]["kpoints"] + ), + potcar=PotCar( + xc=convert_generic_parameters_to_dictionary( + generic_parameter_dict=job_dict["input"]["potcar"] + )["xc"] + ), + structure=Structure( + dimension=job_dict["input"]["structure"]["dimension"], + indices=job_dict["input"]["structure"]["indices"], + info=job_dict["input"]["structure"]["info"], + positions=job_dict["input"]["structure"]["positions"], + species=job_dict["input"]["structure"]["species"], + cell=Cell( + cell=job_dict["input"]["structure"]["cell"]["cell"], + pbc=job_dict["input"]["structure"]["cell"]["pbc"], + ), + units=Units( + length=job_dict["input"]["structure"]["units"]["length"], + mass=job_dict["input"]["structure"]["units"]["mass"], + ), + ), + vasp_dict=job_dict["input"]["vasp_dict"], + ), + calculation_output=VaspOutput( + description=job_dict["output"]["description"], + charge_density=ChargeDensity( + total=job_dict["output"]["charge_density"]["total"] + ), + electronic_structure=ElectronicStructure( + efermi=job_dict["output"]["electronic_structure"]["efermi"] * ureg.eV, + eig_matrix=job_dict["output"]["electronic_structure"]["eig_matrix"], + k_points=job_dict["output"]["electronic_structure"]["k_points"], + k_weights=job_dict["output"]["electronic_structure"]["k_weights"], + occ_matrix=job_dict["output"]["electronic_structure"]["occ_matrix"], + dos=DensityOfStates( + energies=job_dict["output"]["electronic_structure"]["dos"][ + "energies" + ], + int_densities=job_dict["output"]["electronic_structure"]["dos"][ + "int_densities" + ], + tot_densities=job_dict["output"]["electronic_structure"]["dos"][ + "tot_densities" + ], + ), + ), + generic=GenericOutput( + cells=job_dict["output"]["generic"]["cells"] * ureg.angstrom, + energy_pot=job_dict["output"]["generic"]["energy_pot"] * ureg.eV, + energy_tot=job_dict["output"]["generic"]["energy_pot"] * ureg.eV, + elastic_constants=job_dict["output"]["generic"]["elastic_constants"], + forces=job_dict["output"]["generic"]["forces"] + * ureg.eV + / ureg.angstrom, + indices=job_dict["output"]["generic"].get("indices", None), + natoms=job_dict["output"]["generic"].get("natoms", None), + positions=job_dict["output"]["generic"]["positions"] * ureg.angstrom, + pressures=job_dict["output"]["generic"]["pressures"] * ureg.GPa, + steps=job_dict["output"]["generic"]["steps"], + stresses=job_dict["output"]["generic"]["stresses"], + temperature=job_dict["output"]["generic"]["temperature"] * ureg.kelvin, + unwrapped_positions=job_dict["output"]["generic"].get( + "unwrapped_positions", None + ), + velocities=job_dict["output"]["generic"].get("velocities", None), + volume=job_dict["output"]["generic"]["volume"] + * ureg.angstrom + * ureg.angstrom + * ureg.angstrom, + dft=OutputGenericDFT( + cbm_list=job_dict["output"]["generic"]["dft"]["cbm_list"], + e_fermi_list=job_dict["output"]["generic"]["dft"]["e_fermi_list"], + energy_free=job_dict["output"]["generic"]["dft"]["energy_free"] + * ureg.eV, + energy_int=job_dict["output"]["generic"]["dft"]["energy_int"] + * ureg.eV, + energy_zero=job_dict["output"]["generic"]["dft"]["energy_zero"] + * ureg.eV, + final_magmoms=job_dict["output"]["generic"]["dft"]["final_magmoms"], + magnetization=job_dict["output"]["generic"]["dft"]["magnetization"], + n_elect=job_dict["output"]["generic"]["dft"]["n_elect"], + potentiostat_output=job_dict["output"]["generic"]["dft"][ + "potentiostat_output" + ], + n_valence=job_dict["output"]["generic"]["dft"].get( + "n_valence", None + ), + bands_k_weights=job_dict["output"]["generic"]["dft"].get( + "bands_k_weights", None + ), + kpoints_cartesian=job_dict["output"]["generic"]["dft"].get( + "kpoints_cartesian", None + ), + bands_e_fermi=job_dict["output"]["generic"]["dft"].get( + "bands_e_fermi", None + ), + bands_occ=job_dict["output"]["generic"]["dft"].get( + "bands_occ", None + ), + bands_eigen_values=job_dict["output"]["generic"]["dft"].get( + "bands_eigen_values", None + ), + scf_convergence=job_dict["output"]["generic"]["dft"].get( + "scf_convergence", None + ), + scf_dipole_mom=job_dict["output"]["generic"]["dft"].get( + "scf_dipole_mom", None + ), + scf_energy_int=job_dict["output"]["generic"]["dft"][ + "scf_energy_int" + ], + scf_energy_free=job_dict["output"]["generic"]["dft"][ + "scf_energy_free" + ] + * ureg.eV, + scf_computation_time=job_dict["output"]["generic"]["dft"].get( + "scf_computation_time", None + ), + scf_energy_zero=job_dict["output"]["generic"]["dft"][ + "scf_energy_zero" + ] + * ureg.eV, + valence_charges=job_dict["output"]["generic"]["dft"][ + "valence_charges" + ] + * ureg.eV, + vbm_list=job_dict["output"]["generic"]["dft"]["vbm_list"], + bands=ElectronicStructure( + efermi=job_dict["output"]["generic"]["dft"]["bands"]["efermi"], + eig_matrix=job_dict["output"]["generic"]["dft"]["bands"][ + "eig_matrix" + ], + k_points=job_dict["output"]["generic"]["dft"]["bands"][ + "k_points" + ], + k_weights=job_dict["output"]["generic"]["dft"]["bands"][ + "k_weights" + ], + occ_matrix=job_dict["output"]["generic"]["dft"]["bands"][ + "occ_matrix" + ], + dos=DensityOfStates( + energies=job_dict["output"]["generic"]["dft"]["bands"][ + "dos" + ]["energies"], + int_densities=job_dict["output"]["generic"]["dft"]["bands"][ + "dos" + ]["int_densities"], + tot_densities=job_dict["output"]["generic"]["dft"]["bands"][ + "dos" + ]["tot_densities"], + ), + ), + scf_energy_band=job_dict["output"]["generic"]["dft"].get( + "scf_energy_band", None + ), + scf_electronic_entropy=job_dict["output"]["generic"]["dft"].get( + "scf_electronic_entropy", None + ), + scf_residue=job_dict["output"]["generic"]["dft"].get( + "scf_residue", None + ), + computation_time=job_dict["output"]["generic"]["dft"].get( + "computation_time", None + ), + energy_band=job_dict["output"]["generic"]["dft"].get( + "energy_band", None + ), + electronic_entropy=job_dict["output"]["generic"]["dft"].get( + "electronic_entropy", None + ), + residue=job_dict["output"]["generic"]["dft"].get("residue", None), + ), + ), + outcar=OutCar( + broyden_mixing=job_dict["output"]["outcar"]["broyden_mixing"], + irreducible_kpoint_weights=job_dict["output"]["outcar"][ + "irreducible_kpoint_weights" + ], + irreducible_kpoints=job_dict["output"]["outcar"]["irreducible_kpoints"], + kin_energy_error=job_dict["output"]["outcar"]["kin_energy_error"], + number_plane_waves=job_dict["output"]["outcar"]["number_plane_waves"], + resources=VaspResources( + cpu_time=job_dict["output"]["outcar"]["resources"]["cpu_time"], + user_time=job_dict["output"]["outcar"]["resources"]["user_time"], + system_time=job_dict["output"]["outcar"]["resources"][ + "system_time" + ], + elapsed_time=job_dict["output"]["outcar"]["resources"][ + "elapsed_time" + ], + memory_used=job_dict["output"]["outcar"]["resources"][ + "memory_used" + ], + ), + stresses=job_dict["output"]["outcar"]["stresses"], + energy_components=job_dict["output"]["outcar"]["energy_components"], + ), + structure=Structure( + dimension=job_dict["output"]["structure"]["dimension"], + indices=job_dict["output"]["structure"]["indices"], + info=job_dict["output"]["structure"]["info"], + positions=job_dict["output"]["structure"]["positions"], + species=job_dict["output"]["structure"]["species"], + cell=Cell( + cell=job_dict["output"]["structure"]["cell"]["cell"], + pbc=job_dict["output"]["structure"]["cell"]["pbc"], + ), + units=Units( + length=job_dict["output"]["structure"]["units"]["length"], + mass=job_dict["output"]["structure"]["units"]["mass"], + ), + ), + ), + ) + + +def convert(job_dict): + funct_dict = { + "": convert_lammps_job_dict, + "": convert_sphinx_job_dict, + "": convert_vasp_job_dict, + } + return funct_dict[job_dict["TYPE"]](job_dict=job_dict) + + +def convert_generic_parameters_to_string(generic_parameter_dict: dict) -> str: + output_str = "" + for p, v in zip( + generic_parameter_dict["data_dict"]["Parameter"], + generic_parameter_dict["data_dict"]["Value"], + ): + output_str += p.replace("___", " ") + " " + str(v) + "\n" + return output_str[:-1] + + +def convert_generic_parameters_to_dictionary(generic_parameter_dict: dict) -> dict: + return { + p: v + for p, v in zip( + generic_parameter_dict["data_dict"]["Parameter"], + generic_parameter_dict["data_dict"]["Value"], + ) + } + + +def _filter_dict(input_dict: dict, remove_keys_lst: list) -> dict: + def recursive_filter(input_value: dict, remove_keys_lst: list) -> dict: + if isinstance(input_value, dict): + return _filter_dict(input_dict=input_value, remove_keys_lst=remove_keys_lst) + else: + return input_value + + return { + k: recursive_filter(input_value=v, remove_keys_lst=remove_keys_lst) + for k, v in input_dict.items() + if k not in remove_keys_lst + } + + +def _sort_dictionary_from_datacontainer(input_dict: dict) -> dict: + def recursive_sort(input_value: dict) -> dict: + if isinstance(input_value, dict): + return _sort_dictionary_from_datacontainer(input_dict=input_value) + else: + return input_value + + ind_dict, content_dict = {}, {} + content_lst_flag = False + for k, v in input_dict.items(): + if "__index_" in k: + key, ind = k.split("__index_") + if key == "": + content_lst_flag = True + ind_dict[int(ind)] = recursive_sort(input_value=v) + else: + ind_dict[int(ind)] = key + content_dict[key] = recursive_sort(input_value=v) + elif k != "DICT_VERSION": + content_dict[k] = recursive_sort(input_value=v) + if content_lst_flag: + return [ind_dict[ind] for ind in sorted(list(ind_dict.keys()))] + elif len(ind_dict) == len(content_dict): + return { + ind_dict[ind]: content_dict[ind_dict[ind]] + for ind in sorted(list(ind_dict.keys())) + } + elif len(ind_dict) == 0: + return content_dict + else: + raise KeyError(ind_dict, content_dict) + + +def convert_datacontainer_to_dictionary(data_container_dict: dict) -> dict: + return _sort_dictionary_from_datacontainer( + input_dict=_filter_dict( + input_dict=data_container_dict, + remove_keys_lst=[ + "NAME", + "TYPE", + "OBJECT", + "HDF_VERSION", + "READ_ONLY", + "VERSION", + ], + ) + ) diff --git a/tests/pyiron_atomistics_0_6_13/lmp.h5 b/tests/pyiron_atomistics_0_6_13/lmp.h5 new file mode 100644 index 0000000..34fce81 Binary files /dev/null and b/tests/pyiron_atomistics_0_6_13/lmp.h5 differ diff --git a/tests/pyiron_atomistics_0_6_13/sx.h5 b/tests/pyiron_atomistics_0_6_13/sx.h5 new file mode 100644 index 0000000..e169b66 Binary files /dev/null and b/tests/pyiron_atomistics_0_6_13/sx.h5 differ diff --git a/tests/pyiron_atomistics_0_6_13/vasp.h5 b/tests/pyiron_atomistics_0_6_13/vasp.h5 new file mode 100644 index 0000000..472e20b Binary files /dev/null and b/tests/pyiron_atomistics_0_6_13/vasp.h5 differ diff --git a/tests/test_pyiron_atomistics_live.py b/tests/test_pyiron_atomistics_live.py new file mode 100644 index 0000000..57852ee --- /dev/null +++ b/tests/test_pyiron_atomistics_live.py @@ -0,0 +1,108 @@ +import unittest + +from h5io_browser.base import read_dict_from_hdf +from pint import UnitRegistry + +from convert import ( + convert_sphinx_job_dict, + convert_lammps_job_dict, +) + +try: + from pyiron_atomistics import Project + skip_pyiron_atomistics_test = False +except ImportError: + skip_pyiron_atomistics_test = True + + +def get_node_from_job_dict(job_dict, node): + node_name_lst = node.split("/") + tmp_dict = job_dict + for group in node_name_lst: + tmp_dict = tmp_dict[group] + return tmp_dict + + +@unittest.skipIf( + skip_pyiron_atomistics_test, "pyiron_atomistics is not installed, so the pyiron_atomistics tests are skipped." +) +class TestPyironAtomisticsLive(unittest.TestCase): + def setUp(self): + self.project = Project("test") + + def tearDown(self): + Project("test").remove(enable=True) + + def test_sphinx_calc_minimize(self): + ureg = UnitRegistry() + job = self.project.create.job.Sphinx("sx_mini") + job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) + job.calc_minimize() + job.run() + job_dict = read_dict_from_hdf( + file_name=job.project_hdf5.file_name, + h5_path="/", + recursive=True, + slash='ignore', + ) + job_sphinx = convert_sphinx_job_dict(job_dict[job.job_name]) + self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.78315943905295 * ureg.eV) + + def test_sphinx_calc_static(self): + ureg = UnitRegistry() + job = self.project.create.job.Sphinx("sx_static") + job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) + job.run() + job_dict = read_dict_from_hdf( + file_name=job.project_hdf5.file_name, + h5_path="/", + recursive=True, + slash='ignore', + ) + job_sphinx = convert_sphinx_job_dict(job_dict[job.job_name]) + self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.78315953829286 * ureg.eV) + + def test_lammps_calc_static(self): + ureg = UnitRegistry() + job = self.project.create.job.Lammps("lmp_static") + job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) + job.potential = '2002--Mishin-Y--Ni-Al--LAMMPS--ipr1' + job.run() + job_dict = read_dict_from_hdf( + file_name=job.project_hdf5.file_name, + h5_path="/", + recursive=True, + slash='ignore', + ) + job_lammps = convert_lammps_job_dict(job_dict[job.job_name]) + self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -13.4486826111902 * ureg.eV) + + def test_lammps_calc_md(self): + job = self.project.create.job.Lammps("lmp_md") + job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) + job.potential = '2002--Mishin-Y--Ni-Al--LAMMPS--ipr1' + job.calc_md(temperature=200.0, n_ionic_steps=1000, n_print=100) + job.run() + job_dict = read_dict_from_hdf( + file_name=job.project_hdf5.file_name, + h5_path="/", + recursive=True, + slash='ignore', + ) + job_lammps = convert_lammps_job_dict(job_dict[job.job_name]) + self.assertEqual(len(job_lammps.calculation_output.generic.energy_tot), 11) + + def test_lammps_calc_minimize(self): + ureg = UnitRegistry() + job = self.project.create.job.Lammps("lmp_mini") + job.structure = self.project.create.structure.ase.bulk("Al", cubic=True) + job.potential = '2002--Mishin-Y--Ni-Al--LAMMPS--ipr1' + job.run() + job_dict = read_dict_from_hdf( + file_name=job.project_hdf5.file_name, + h5_path="/", + recursive=True, + slash='ignore', + ) + job_lammps = convert_lammps_job_dict(job_dict[job.job_name]) + self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -13.4486826111902 * ureg.eV) diff --git a/tests/test_pyiron_atomistics_static.py b/tests/test_pyiron_atomistics_static.py new file mode 100644 index 0000000..8eb58dc --- /dev/null +++ b/tests/test_pyiron_atomistics_static.py @@ -0,0 +1,67 @@ +import os +from unittest import TestCase + +from h5io_browser.base import read_dict_from_hdf +from pint import UnitRegistry + +from convert import ( + convert, + convert_sphinx_job_dict, + convert_lammps_job_dict, + convert_vasp_job_dict, +) + + +class TestPyironAtomisticsStatic(TestCase): + def test_sphinx(self): + ureg = UnitRegistry() + job_dict = read_dict_from_hdf( + file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "sx.h5"), + h5_path="/sx", + recursive=True, + slash='ignore', + ) + job_sphinx = convert_sphinx_job_dict(job_dict=job_dict) + self.assertEqual(job_sphinx.calculation_output.generic.energy_tot[-1], -228.7831594379917 * ureg.eV) + + def test_lammps(self): + ureg = UnitRegistry() + job_dict = read_dict_from_hdf( + file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "lmp.h5"), + h5_path="/lmp", + recursive=True, + slash='ignore', + ) + job_lammps = convert_lammps_job_dict(job_dict=job_dict) + self.assertEqual(job_lammps.calculation_output.generic.energy_tot[-1], -9428.45286561574 * ureg.eV) + + def test_vasp(self): + ureg = UnitRegistry() + job_dict = read_dict_from_hdf( + file_name=os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13", "vasp.h5"), + h5_path="/vasp", + recursive=True, + slash='ignore', + ) + job_vasp = convert_vasp_job_dict(job_dict=job_dict) + self.assertEqual(job_vasp.calculation_output.generic.energy_tot[-1], -14.7459202 * ureg.eV) + + def test_all(self): + ureg = UnitRegistry() + static_folder = os.path.join(os.path.dirname(__file__), "pyiron_atomistics_0_6_13") + energy_dict = { + "sx.h5": -228.7831594379917 * ureg.eV, + "lmp.h5": -9428.45286561574 * ureg.eV, + "vasp.h5": -14.7459202 * ureg.eV, + } + for hdf5_file in os.listdir(static_folder): + job_dict = read_dict_from_hdf( + file_name=os.path.join(static_folder, hdf5_file), + h5_path="/", + recursive=True, + slash='ignore', + )[hdf5_file.split(".")[0]] + self.assertEqual( + convert(job_dict=job_dict).calculation_output.generic.energy_tot[-1], + energy_dict[hdf5_file] + )