Skip to content

Commit

Permalink
datacontainer open HDF5 file only once - unless the object implements…
Browse files Browse the repository at this point in the history
… its own to_hdf() function
  • Loading branch information
jan-janssen committed Dec 17, 2023
1 parent 5739f8e commit 511e144
Showing 1 changed file with 8 additions and 2 deletions.
10 changes: 8 additions & 2 deletions pyiron_base/storage/datacontainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

from pyiron_base.storage.fileio import read, write
from pyiron_base.storage.hdfstub import HDFStub
from pyiron_base.storage.helper_functions import open_hdf5, write_hdf5_with_json_support
from pyiron_base.interfaces.has_groups import HasGroups
from pyiron_base.interfaces.has_hdf import HasHDF

Expand Down Expand Up @@ -784,7 +785,7 @@ def _get_hdf_group_name(self):
return self.table_name

def _to_hdf(self, hdf):
hdf["READ_ONLY"] = self.read_only
data_dict = {"READ_ONLY": self.read_only}
written_keys = _internal_hdf_nodes.copy()
for i, (k, v) in enumerate(self.items()):
if isinstance(k, str) and "__index_" in k:
Expand All @@ -803,10 +804,15 @@ def _to_hdf(self, hdf):
del hdf[k]
v.to_hdf(hdf=hdf, group_name=k)
else:
data_dict[k] = v
with open_hdf5(hdf.file_name, mode="a") as store:
for k, v in data_dict.items():
# if the value doesn't know how to serialize itself, assume
# that h5py knows how to
try:
hdf[k] = v
write_hdf5_with_json_support(
file_handle=store, value=v, path=hdf.get_h5_path(k)
)
except TypeError:
raise TypeError(
"Error saving {} (key {}): DataContainer doesn't support saving elements "
Expand Down

0 comments on commit 511e144

Please sign in to comment.