Skip to content

Commit

Permalink
ENH: Peregrine HDF5 Reader Layer Thickness Override & Bug Fixes (#1045)
Browse files Browse the repository at this point in the history
Enhancements:
* Added an option to override layer thickness from the input HDF5 file.

Bug Fixes:
* Numpy dtypes are now properly converted to NX DataTypes.
* Filter iterates properly through camera data during execute.

Signed-off-by: Joey Kleingers <[email protected]>
  • Loading branch information
joeykleingers authored Aug 9, 2024
1 parent e1f4593 commit 7c39172
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 30 deletions.
26 changes: 14 additions & 12 deletions src/Plugins/SimplnxCore/wrapping/python/simplnxpy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -456,51 +456,53 @@ PYBIND11_MODULE(simplnx, mod)
mod.def(
"convert_np_dtype_to_datatype",
[](const py::dtype& dtype) {
if(dtype.is(py::dtype::of<int8>()))
if((dtype.attr("__eq__")(py::dtype::of<int8>())).cast<bool>())
{
return DataType::int8;
}
if(dtype.is(py::dtype::of<uint8>()))
if((dtype.attr("__eq__")(py::dtype::of<uint8>())).cast<bool>())
{
return DataType::uint8;
}
if(dtype.is(py::dtype::of<int16>()))
if((dtype.attr("__eq__")(py::dtype::of<int16>())).cast<bool>())
{
return DataType::int16;
}
if(dtype.is(py::dtype::of<uint16>()))
if((dtype.attr("__eq__")(py::dtype::of<uint16>())).cast<bool>())
{
return DataType::uint16;
}
if(dtype.is(py::dtype::of<int32>()))
if((dtype.attr("__eq__")(py::dtype::of<int32>())).cast<bool>())
{
return DataType::int32;
}
if(dtype.is(py::dtype::of<uint32>()))
if((dtype.attr("__eq__")(py::dtype::of<uint32>())).cast<bool>())
{
return DataType::uint32;
}
if(dtype.is(py::dtype::of<int64>()))
if((dtype.attr("__eq__")(py::dtype::of<int64>())).cast<bool>())
{
return DataType::int64;
}
if(dtype.is(py::dtype::of<uint64>()))
if((dtype.attr("__eq__")(py::dtype::of<uint64>())).cast<bool>())
{
return DataType::uint64;
}
if(dtype.is(py::dtype::of<float32>()))
if((dtype.attr("__eq__")(py::dtype::of<float32>())).cast<bool>())
{
return DataType::float32;
}
if(dtype.is(py::dtype::of<float64>()))
if((dtype.attr("__eq__")(py::dtype::of<float64>())).cast<bool>())
{
return DataType::float64;
}
if(dtype.is(py::dtype::of<bool>()))
if((dtype.attr("__eq__")(py::dtype::of<bool>())).cast<bool>())
{
return DataType::boolean;
}
throw std::invalid_argument("Unable to convert dtype to DataType: Unsupported dtype.");

std::string dtypeStr = py::str(static_cast<py::object>(dtype));
throw std::invalid_argument(fmt::format("Unable to convert dtype to DataType: Unsupported dtype '{}'.", dtypeStr));
},
"Convert numpy dtype to simplnx DataType", "dtype"_a);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import re
from typing import List, Tuple
from pathlib import Path
from NXDataAnalysisToolkit.common.Result import Result, make_error_result
from .common.Result import Result, make_error_result

class Polyline(object):
def __init__(self, layer_id, z_height, data: dict, poly_id, dir, n, xvals, yvals) -> Result:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,8 @@ def default_tags(self) -> List[str]:

# Parameter Keys
INPUT_FILE_PATH_KEY = 'input_file_path'
OVERRIDE_LAYER_THICKNESS_KEY = 'override_layer_thickness'
LAYER_THICKNESS_KEY = 'layer_thickness'
ENABLE_SLICES_SUBVOLUME_KEY = 'enable_slices_subvolume'
SLICES_SUBVOLUME_MINMAX_X_KEY = 'slices_subvolume_minmax_x'
SLICES_SUBVOLUME_MINMAX_Y_KEY = 'slices_subvolume_minmax_y'
Expand Down Expand Up @@ -121,6 +123,8 @@ def parameters(self) -> nx.Parameters:

params.insert(nx.Parameters.Separator("Input Parameters"))
params.insert(nx.FileSystemPathParameter(ReadPeregrineHDF5File.INPUT_FILE_PATH_KEY, 'Input Peregrine HDF5 File', 'The input Peregrine HDF5 file that will be read.', '', {'.hdf5', '.h5'}, nx.FileSystemPathParameter.PathType.InputFile, False))
params.insert_linkable_parameter(nx.BoolParameter(ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY, 'Override Layer Thickness', 'Specifies whether or not to override the layer thickness found in the input file.', False))
params.insert(nx.Float64Parameter(ReadPeregrineHDF5File.LAYER_THICKNESS_KEY, 'Layer Thickness', 'The layer thickness that will be used to override the layer thickness found in the input file.', 0.05))

params.insert(nx.Parameters.Separator("Slice Data Parameters"))
params.insert(nx.DataGroupCreationParameter(ReadPeregrineHDF5File.SLICE_DATA_KEY, 'Slice Data Geometry', 'The path to the newly created Slice Data image geometry', nx.DataPath(['Slice Data'])))
Expand Down Expand Up @@ -162,6 +166,7 @@ def parameters(self) -> nx.Parameters:
params.insert_linkable_parameter(nx.BoolParameter(ReadPeregrineHDF5File.ENABLE_SCAN_DATA_SUBVOLUME_KEY, 'Enable Scan Data Subvolume', 'Specifies whether or not to read a subvolume of the scan data from the input file.', False))
params.insert(nx.VectorUInt64Parameter(ReadPeregrineHDF5File.SCAN_DATA_SUBVOLUME_MINMAX_KEY, 'Scan Data Slice Bounds', 'The min/max slice bounds (inclusive) for the Scan Data subvolume.', [0, 1], ['Min', 'Max']))

params.link_parameters(ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY, ReadPeregrineHDF5File.LAYER_THICKNESS_KEY, True)
params.link_parameters(ReadPeregrineHDF5File.READ_SEGMENTATION_RESULTS_KEY, ReadPeregrineHDF5File.SEGMENTATION_RESULTS_VALUES_KEY, True)
params.link_parameters(ReadPeregrineHDF5File.ENABLE_SLICES_SUBVOLUME_KEY, ReadPeregrineHDF5File.SLICES_SUBVOLUME_MINMAX_X_KEY, True)
params.link_parameters(ReadPeregrineHDF5File.ENABLE_SLICES_SUBVOLUME_KEY, ReadPeregrineHDF5File.SLICES_SUBVOLUME_MINMAX_Y_KEY, True)
Expand Down Expand Up @@ -193,6 +198,8 @@ def preflight_impl(self, data_structure: nx.DataStructure, args: dict, message_h
:rtype: nx.IFilter.PreflightResult
"""
input_file_path = args[ReadPeregrineHDF5File.INPUT_FILE_PATH_KEY]
override_layer_thickness: bool = args[ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY]
layer_thickness: bool = args[ReadPeregrineHDF5File.LAYER_THICKNESS_KEY]
read_segmentation_results: bool = args[ReadPeregrineHDF5File.READ_SEGMENTATION_RESULTS_KEY]
read_camera_data: bool = args[ReadPeregrineHDF5File.READ_CAMERA_DATA_KEY]
read_part_ids: bool = args[ReadPeregrineHDF5File.READ_PART_IDS_KEY]
Expand All @@ -214,7 +221,7 @@ def preflight_impl(self, data_structure: nx.DataStructure, args: dict, message_h
except Exception as e:
return nx.IFilter.PreflightResult(errors=[nx.Error(-2012, f"Error opening file '{str(input_file_path)}': {e}")])

spacing_result: Result = self._calculate_spacing(h5_file_reader)
spacing_result: Result = self._calculate_spacing(h5_file_reader, layer_thickness if override_layer_thickness else None)
if not spacing_result.valid():
return nx.IFilter.PreflightResult(errors=spacing_result.errors)

Expand Down Expand Up @@ -571,7 +578,7 @@ def _read_dataset_dimensions(self, h5_file_reader: h5py.File, h5_dataset_path: s

return Result(value=list(dataset.shape))

def _read_dataset_type(self, h5_file_reader: h5py.File, h5_dataset_path: str) -> Result[List[int]]:
def _read_dataset_type(self, h5_file_reader: h5py.File, h5_dataset_path: str) -> Result[h5py.Datatype]:
result: Result[h5py.Dataset] = self._open_hdf5_data_object(h5_file_reader, h5_dataset_path)
if result.invalid():
return Result(errors=result.errors)
Expand Down Expand Up @@ -635,7 +642,7 @@ def _validate_subvolume_dimensions(self, volume_dims: List[int], subvolume_min_m

return Result()

def _calculate_spacing(self, h5_file_reader: h5py.File) -> Result[List[float]]:
def _calculate_spacing(self, h5_file_reader: h5py.File, layer_thickness: float = None) -> Result[List[float]]:
if ReadPeregrineHDF5File.X_REAL_DIMENSION_PATH not in h5_file_reader.attrs:
return make_error_result(code=-3007, message=f"Attribute at path '{ReadPeregrineHDF5File.X_REAL_DIMENSION_PATH}' does not exist, so the X spacing cannot be calculated!")
try:
Expand Down Expand Up @@ -664,12 +671,15 @@ def _calculate_spacing(self, h5_file_reader: h5py.File) -> Result[List[float]]:
except KeyError:
return make_error_result(code=-3014, message=f"Attribute at path '{ReadPeregrineHDF5File.Y_CAMERA_DIMENSION_PATH}' cannot be accessed, so the Y spacing cannot be calculated!")

if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs:
return make_error_result(code=-3015, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist, so the Z spacing cannot be calculated!")
try:
z_spacing = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH]
except KeyError:
return make_error_result(code=-3016, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed, so the Z spacing cannot be calculated!")
if layer_thickness is None:
if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs:
return make_error_result(code=-3015, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist, so the Z spacing cannot be calculated!")
try:
z_spacing = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH]
except KeyError:
return make_error_result(code=-3016, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed, so the Z spacing cannot be calculated!")
else:
z_spacing = layer_thickness

spacing = [float(x_real_dim / x_camera_dim), float(y_real_dim / y_camera_dim), float(z_spacing)]
return Result(value=spacing)
Expand Down Expand Up @@ -719,7 +729,9 @@ def _read_slice_datasets(self, h5_file_reader: h5py.File, data_structure: nx.Dat
if should_cancel:
return Result()

for camera_data_dataset in camera_data_datasets_str:
camera_data_datasets_str = camera_data_datasets_str.strip()
camera_data_datasets = camera_data_datasets_str.split(',')
for camera_data_dataset in camera_data_datasets:
if should_cancel:
return Result()

Expand Down Expand Up @@ -853,6 +865,8 @@ def _read_scan_data(self, h5_file_reader: h5py.File, scan_path: str, z_offset: i
return Result(value=(vertices,edges,tot))

def _read_scan_datasets(self, h5_file_reader: h5py.File, data_structure: nx.DataStructure, filter_args: dict, message_handler: nx.IFilter.MessageHandler, should_cancel: nx.AtomicBoolProxy) -> Result:
override_layer_thickness: bool = filter_args[ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY]
layer_thickness: float = filter_args[ReadPeregrineHDF5File.LAYER_THICKNESS_KEY]
read_scan_datasets: bool = filter_args[ReadPeregrineHDF5File.READ_SCAN_DATASETS_KEY]
scan_data_edge_geom_path: nx.DataPath = filter_args[ReadPeregrineHDF5File.SCAN_DATA_KEY]
scan_data_vertex_attr_mat_name: str = filter_args[ReadPeregrineHDF5File.SCAN_DATA_VERTEX_ATTR_MAT_KEY]
Expand Down Expand Up @@ -889,13 +903,16 @@ def _read_scan_datasets(self, h5_file_reader: h5py.File, data_structure: nx.Data
return Result(errors=result.errors)
scan_group_reader: h5py.Group = result.value

# Read the Z thickness value
if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs:
return make_error_result(code=-3007, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!")
try:
z_thickness: float = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH]
except Exception as e:
return make_error_result(code=-3008, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!\n\n{e}")
if override_layer_thickness:
z_thickness = layer_thickness
else:
# Read the Z thickness value
if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs:
return make_error_result(code=-3007, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!")
try:
z_thickness: float = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH]
except Exception as e:
return make_error_result(code=-3008, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!\n\n{e}")

# Calculate the start and end values for the scans
z_start: int = 0
Expand Down

0 comments on commit 7c39172

Please sign in to comment.