From 4e6c391161655c357a73b9dc4df0ce0123e40fbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Jan=C3=9Fen?= Date: Sat, 16 Dec 2023 20:24:54 +0100 Subject: [PATCH 1/4] GenericJob restructure to_hdf() --- pyiron_base/jobs/job/generic.py | 50 ++++++++++++++++++++++----------- 1 file changed, 33 insertions(+), 17 deletions(-) diff --git a/pyiron_base/jobs/job/generic.py b/pyiron_base/jobs/job/generic.py index 82c836ca2..71736d6aa 100644 --- a/pyiron_base/jobs/job/generic.py +++ b/pyiron_base/jobs/job/generic.py @@ -1012,23 +1012,36 @@ def to_hdf(self, hdf=None, group_name=None): hdf (ProjectHDFio): HDF5 group object - optional group_name (str): HDF5 subgroup name - optional """ + self._set_hdf(hdf=hdf, group_name=group_name) self._executable_activate_mpi() - self._type_to_hdf() - self._hdf5["status"] = self.status.string + + # Build data dictionary + data_dict = self._type_to_dict() + data_dict["status"] = self.status.string + data_dict["input"] = {"generic_dict": { + "restart_file_list": self._restart_file_list, + "restart_file_dict": self._restart_file_dict, + "exclude_nodes_hdf": self._exclude_nodes_hdf, + "exclude_groups_hdf": self._exclude_groups_hdf, + }} if self._import_directory is not None: - self._hdf5["import_directory"] = self._import_directory + data_dict["import_directory"] = self._import_directory + + # Write combined dictionary to HDF5 + group_lst = ["input"] + for k, v in data_dict.items(): + if k not in group_lst: + self._hdf5[k] = v + for group in group_lst: + with self._hdf5.open(group) as hdf_group: + for k, v in data_dict[group].items(): + hdf_group[k] = v + + # Write remaining objects to HDF5 self._server.to_hdf(self._hdf5) if self._executable is not None: self.executable.to_hdf(self._hdf5) - with self._hdf5.open("input") as hdf_input: - generic_dict = { - "restart_file_list": self._restart_file_list, - "restart_file_dict": self._restart_file_dict, - "exclude_nodes_hdf": self._exclude_nodes_hdf, - "exclude_groups_hdf": self._exclude_groups_hdf, - } - hdf_input["generic_dict"] = generic_dict @classmethod def from_hdf_args(cls, hdf): @@ -1322,18 +1335,21 @@ def _executable_activate(self, enforce=False, codename=None): path_binary_codes=None, ) - def _type_to_hdf(self): + def _type_to_dict(self): """ Internal helper function to save type and version in HDF5 file root """ - self._hdf5["NAME"] = self.__name__ - self._hdf5["TYPE"] = str(type(self)) + data_dict = { + "NAME": self.__name__, + "TYPE": str(type(self)), + } if self._executable: - self._hdf5["VERSION"] = self.executable.version + data_dict["VERSION"] = self.executable.version else: - self._hdf5["VERSION"] = self.__version__ + data_dict["VERSION"] = self.__version__ if hasattr(self, "__hdf_version__"): - self._hdf5["HDF_VERSION"] = self.__hdf_version__ + data_dict["HDF_VERSION"] = self.__hdf_version__ + return data_dict def _type_from_hdf(self): """ From 3bf049a10f4873d248054b2d32f0660717836e1b Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Sat, 16 Dec 2023 19:36:22 +0000 Subject: [PATCH 2/4] Format black --- pyiron_base/jobs/job/generic.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pyiron_base/jobs/job/generic.py b/pyiron_base/jobs/job/generic.py index 71736d6aa..b16867ae6 100644 --- a/pyiron_base/jobs/job/generic.py +++ b/pyiron_base/jobs/job/generic.py @@ -1019,12 +1019,14 @@ def to_hdf(self, hdf=None, group_name=None): # Build data dictionary data_dict = self._type_to_dict() data_dict["status"] = self.status.string - data_dict["input"] = {"generic_dict": { - "restart_file_list": self._restart_file_list, - "restart_file_dict": self._restart_file_dict, - "exclude_nodes_hdf": self._exclude_nodes_hdf, - "exclude_groups_hdf": self._exclude_groups_hdf, - }} + data_dict["input"] = { + "generic_dict": { + "restart_file_list": self._restart_file_list, + "restart_file_dict": self._restart_file_dict, + "exclude_nodes_hdf": self._exclude_nodes_hdf, + "exclude_groups_hdf": self._exclude_groups_hdf, + } + } if self._import_directory is not None: data_dict["import_directory"] = self._import_directory From cbee39210c21dd5f692334f841d36c975df43155 Mon Sep 17 00:00:00 2001 From: Jan Janssen Date: Wed, 20 Dec 2023 10:55:58 +0100 Subject: [PATCH 3/4] GenericJob implement to_dict() and from_dict() --- pyiron_base/jobs/job/generic.py | 100 +++++++++++++++++--------------- 1 file changed, 54 insertions(+), 46 deletions(-) diff --git a/pyiron_base/jobs/job/generic.py b/pyiron_base/jobs/job/generic.py index 780acab8a..1451a800b 100644 --- a/pyiron_base/jobs/job/generic.py +++ b/pyiron_base/jobs/job/generic.py @@ -1004,6 +1004,20 @@ def _set_hdf(self, hdf=None, group_name=None): if group_name is not None and self._hdf5 is not None: self._hdf5 = self._hdf5.open(group_name) + def to_dict(self): + data_dict = self._type_to_dict() + data_dict["status"] = self.status.string + data_dict["input/generic_dict"] = { + "restart_file_list": self._restart_file_list, + "restart_file_dict": self._restart_file_dict, + "exclude_nodes_hdf": self._exclude_nodes_hdf, + "exclude_groups_hdf": self._exclude_groups_hdf, + } + data_dict["server"] = self._server.to_dict() + if self._import_directory is not None: + data_dict["import_directory"] = self._import_directory + return data_dict + def to_hdf(self, hdf=None, group_name=None): """ Store the GenericJob in an HDF5 file @@ -1016,32 +1030,11 @@ def to_hdf(self, hdf=None, group_name=None): self._set_hdf(hdf=hdf, group_name=group_name) self._executable_activate_mpi() - # Build data dictionary - data_dict = self._type_to_dict() - data_dict["status"] = self.status.string - data_dict["input"] = { - "generic_dict": { - "restart_file_list": self._restart_file_list, - "restart_file_dict": self._restart_file_dict, - "exclude_nodes_hdf": self._exclude_nodes_hdf, - "exclude_groups_hdf": self._exclude_groups_hdf, - } - } - if self._import_directory is not None: - data_dict["import_directory"] = self._import_directory - # Write combined dictionary to HDF5 - group_lst = ["input"] - for k, v in data_dict.items(): - if k not in group_lst: - self._hdf5[k] = v - for group in group_lst: - with self._hdf5.open(group) as hdf_group: - for k, v in data_dict[group].items(): - hdf_group[k] = v + for k, v in self.to_dict().items(): + self._hdf5[k] = v # Write remaining objects to HDF5 - self._server.to_hdf(self._hdf5) if self._executable is not None: self.executable.to_hdf(self._hdf5) @@ -1059,6 +1052,28 @@ def from_hdf_args(cls, hdf): ) return {"job_name": job_name, "project": project_hdf5} + def from_dict(self, job_dict): + self._type_from_dict(type_dict=job_dict) + if "import_directory" in job_dict.keys(): + self._import_directory = job_dict["import_directory"] + self._server.from_dict(server_dict=job_dict["server"]) + input_dict = job_dict["input"] + if "generic_dict" in input_dict.keys(): + generic_dict = input_dict["generic_dict"] + self._restart_file_list = generic_dict["restart_file_list"] + self._restart_file_dict = generic_dict["restart_file_dict"] + self._exclude_nodes_hdf = generic_dict["exclude_nodes_hdf"] + self._exclude_groups_hdf = generic_dict["exclude_groups_hdf"] + # Backwards compatbility + if "restart_file_list" in input_dict.keys(): + self._restart_file_list = input_dict["restart_file_list"] + if "restart_file_dict" in input_dict.keys(): + self._restart_file_dict = input_dict["restart_file_dict"] + if "exclude_nodes_hdf" in input_dict.keys(): + self._exclude_nodes_hdf = input_dict["exclude_nodes_hdf"] + if "exclude_groups_hdf" in input_dict.keys(): + self._exclude_groups_hdf = input_dict["exclude_groups_hdf"] + def from_hdf(self, hdf=None, group_name=None): """ Restore the GenericJob from an HDF5 file @@ -1068,28 +1083,13 @@ def from_hdf(self, hdf=None, group_name=None): group_name (str): HDF5 subgroup name - optional """ self._set_hdf(hdf=hdf, group_name=group_name) - self._type_from_hdf() - if "import_directory" in self._hdf5.list_nodes(): - self._import_directory = self._hdf5["import_directory"] - self._server.from_hdf(self._hdf5) + job_dict = {k: self._hdf5[k] for k in self._hdf5.list_nodes()} + with self._hdf5.open("input") as hdf_input: + job_dict["input"] = {k: hdf_input[k] for k in hdf_input.list_nodes()} + self.from_dict(job_dict=job_dict) + if "executable" in self._hdf5.list_groups(): self.executable.from_hdf(self._hdf5) - with self._hdf5.open("input") as hdf_input: - if "generic_dict" in hdf_input.list_nodes(): - generic_dict = hdf_input["generic_dict"] - self._restart_file_list = generic_dict["restart_file_list"] - self._restart_file_dict = generic_dict["restart_file_dict"] - self._exclude_nodes_hdf = generic_dict["exclude_nodes_hdf"] - self._exclude_groups_hdf = generic_dict["exclude_groups_hdf"] - # Backwards compatbility - if "restart_file_list" in hdf_input.list_nodes(): - self._restart_file_list = hdf_input["restart_file_list"] - if "restart_file_dict" in hdf_input.list_nodes(): - self._restart_file_dict = hdf_input["restart_file_dict"] - if "exclude_nodes_hdf" in hdf_input.list_nodes(): - self._exclude_nodes_hdf = hdf_input["exclude_nodes_hdf"] - if "exclude_groups_hdf" in hdf_input.list_nodes(): - self._exclude_groups_hdf = hdf_input["exclude_groups_hdf"] def save(self): """ @@ -1353,13 +1353,21 @@ def _type_to_dict(self): data_dict["HDF_VERSION"] = self.__hdf_version__ return data_dict + def _type_from_dict(self, type_dict): + self.__obj_type__ = type_dict["TYPE"] + if self._executable is None: + self.__obj_version__ = type_dict["VERSION"] + def _type_from_hdf(self): """ Internal helper function to load type and version from HDF5 file root """ - self.__obj_type__ = self._hdf5["TYPE"] - if self._executable is None: - self.__obj_version__ = self._hdf5["VERSION"] + self._type_from_dict( + type_dict={ + "TYPE": self._hdf5["TYPE"], + "VERSION": self._hdf5["VERSION"], + } + ) def run_time_to_db(self): """ From d5720d0d1a70e385b6c9275692da834a0bb8d677 Mon Sep 17 00:00:00 2001 From: Jan Janssen Date: Wed, 20 Dec 2023 11:01:33 +0100 Subject: [PATCH 4/4] move from_dict() fucntion --- pyiron_base/jobs/job/generic.py | 44 ++++++++++++++++----------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/pyiron_base/jobs/job/generic.py b/pyiron_base/jobs/job/generic.py index 1451a800b..afd0644f4 100644 --- a/pyiron_base/jobs/job/generic.py +++ b/pyiron_base/jobs/job/generic.py @@ -1018,6 +1018,28 @@ def to_dict(self): data_dict["import_directory"] = self._import_directory return data_dict + def from_dict(self, job_dict): + self._type_from_dict(type_dict=job_dict) + if "import_directory" in job_dict.keys(): + self._import_directory = job_dict["import_directory"] + self._server.from_dict(server_dict=job_dict["server"]) + input_dict = job_dict["input"] + if "generic_dict" in input_dict.keys(): + generic_dict = input_dict["generic_dict"] + self._restart_file_list = generic_dict["restart_file_list"] + self._restart_file_dict = generic_dict["restart_file_dict"] + self._exclude_nodes_hdf = generic_dict["exclude_nodes_hdf"] + self._exclude_groups_hdf = generic_dict["exclude_groups_hdf"] + # Backwards compatbility + if "restart_file_list" in input_dict.keys(): + self._restart_file_list = input_dict["restart_file_list"] + if "restart_file_dict" in input_dict.keys(): + self._restart_file_dict = input_dict["restart_file_dict"] + if "exclude_nodes_hdf" in input_dict.keys(): + self._exclude_nodes_hdf = input_dict["exclude_nodes_hdf"] + if "exclude_groups_hdf" in input_dict.keys(): + self._exclude_groups_hdf = input_dict["exclude_groups_hdf"] + def to_hdf(self, hdf=None, group_name=None): """ Store the GenericJob in an HDF5 file @@ -1052,28 +1074,6 @@ def from_hdf_args(cls, hdf): ) return {"job_name": job_name, "project": project_hdf5} - def from_dict(self, job_dict): - self._type_from_dict(type_dict=job_dict) - if "import_directory" in job_dict.keys(): - self._import_directory = job_dict["import_directory"] - self._server.from_dict(server_dict=job_dict["server"]) - input_dict = job_dict["input"] - if "generic_dict" in input_dict.keys(): - generic_dict = input_dict["generic_dict"] - self._restart_file_list = generic_dict["restart_file_list"] - self._restart_file_dict = generic_dict["restart_file_dict"] - self._exclude_nodes_hdf = generic_dict["exclude_nodes_hdf"] - self._exclude_groups_hdf = generic_dict["exclude_groups_hdf"] - # Backwards compatbility - if "restart_file_list" in input_dict.keys(): - self._restart_file_list = input_dict["restart_file_list"] - if "restart_file_dict" in input_dict.keys(): - self._restart_file_dict = input_dict["restart_file_dict"] - if "exclude_nodes_hdf" in input_dict.keys(): - self._exclude_nodes_hdf = input_dict["exclude_nodes_hdf"] - if "exclude_groups_hdf" in input_dict.keys(): - self._exclude_groups_hdf = input_dict["exclude_groups_hdf"] - def from_hdf(self, hdf=None, group_name=None): """ Restore the GenericJob from an HDF5 file