From 7c391722e0e2c44ca0b7b7b01951cce6740bfa45 Mon Sep 17 00:00:00 2001 From: Joey Kleingers Date: Fri, 9 Aug 2024 18:11:06 -0400 Subject: [PATCH] ENH: Peregrine HDF5 Reader Layer Thickness Override & Bug Fixes (#1045) Enhancements: * Added an option to override layer thickness from the input HDF5 file. Bug Fixes: * Numpy dtypes are now properly converted to NX DataTypes. * Filter iterates properly through camera data during execute. Signed-off-by: Joey Kleingers --- .../SimplnxCore/wrapping/python/simplnxpy.cpp | 26 +++++----- .../NXDataAnalysisToolkit/CliReaderFilter.py | 2 +- .../ReadPeregrineHDF5File.py | 51 ++++++++++++------- 3 files changed, 49 insertions(+), 30 deletions(-) diff --git a/src/Plugins/SimplnxCore/wrapping/python/simplnxpy.cpp b/src/Plugins/SimplnxCore/wrapping/python/simplnxpy.cpp index c4219fb6d0..6821e4a83b 100644 --- a/src/Plugins/SimplnxCore/wrapping/python/simplnxpy.cpp +++ b/src/Plugins/SimplnxCore/wrapping/python/simplnxpy.cpp @@ -456,51 +456,53 @@ PYBIND11_MODULE(simplnx, mod) mod.def( "convert_np_dtype_to_datatype", [](const py::dtype& dtype) { - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::int8; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::uint8; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::int16; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::uint16; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::int32; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::uint32; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::int64; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::uint64; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::float32; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::float64; } - if(dtype.is(py::dtype::of())) + if((dtype.attr("__eq__")(py::dtype::of())).cast()) { return DataType::boolean; } - throw std::invalid_argument("Unable to convert dtype to DataType: Unsupported dtype."); + + std::string dtypeStr = py::str(static_cast(dtype)); + throw std::invalid_argument(fmt::format("Unable to convert dtype to DataType: Unsupported dtype '{}'.", dtypeStr)); }, "Convert numpy dtype to simplnx DataType", "dtype"_a); diff --git a/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/CliReaderFilter.py b/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/CliReaderFilter.py index a5ef0d5f91..22231afdb6 100644 --- a/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/CliReaderFilter.py +++ b/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/CliReaderFilter.py @@ -9,7 +9,7 @@ import re from typing import List, Tuple from pathlib import Path -from NXDataAnalysisToolkit.common.Result import Result, make_error_result +from .common.Result import Result, make_error_result class Polyline(object): def __init__(self, layer_id, z_height, data: dict, poly_id, dir, n, xvals, yvals) -> Result: diff --git a/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/ReadPeregrineHDF5File.py b/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/ReadPeregrineHDF5File.py index ee52e74ec9..62352e601a 100644 --- a/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/ReadPeregrineHDF5File.py +++ b/wrapping/python/plugins/NXDataAnalysisToolkit/src/NXDataAnalysisToolkit/ReadPeregrineHDF5File.py @@ -78,6 +78,8 @@ def default_tags(self) -> List[str]: # Parameter Keys INPUT_FILE_PATH_KEY = 'input_file_path' + OVERRIDE_LAYER_THICKNESS_KEY = 'override_layer_thickness' + LAYER_THICKNESS_KEY = 'layer_thickness' ENABLE_SLICES_SUBVOLUME_KEY = 'enable_slices_subvolume' SLICES_SUBVOLUME_MINMAX_X_KEY = 'slices_subvolume_minmax_x' SLICES_SUBVOLUME_MINMAX_Y_KEY = 'slices_subvolume_minmax_y' @@ -121,6 +123,8 @@ def parameters(self) -> nx.Parameters: params.insert(nx.Parameters.Separator("Input Parameters")) params.insert(nx.FileSystemPathParameter(ReadPeregrineHDF5File.INPUT_FILE_PATH_KEY, 'Input Peregrine HDF5 File', 'The input Peregrine HDF5 file that will be read.', '', {'.hdf5', '.h5'}, nx.FileSystemPathParameter.PathType.InputFile, False)) + params.insert_linkable_parameter(nx.BoolParameter(ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY, 'Override Layer Thickness', 'Specifies whether or not to override the layer thickness found in the input file.', False)) + params.insert(nx.Float64Parameter(ReadPeregrineHDF5File.LAYER_THICKNESS_KEY, 'Layer Thickness', 'The layer thickness that will be used to override the layer thickness found in the input file.', 0.05)) params.insert(nx.Parameters.Separator("Slice Data Parameters")) params.insert(nx.DataGroupCreationParameter(ReadPeregrineHDF5File.SLICE_DATA_KEY, 'Slice Data Geometry', 'The path to the newly created Slice Data image geometry', nx.DataPath(['Slice Data']))) @@ -162,6 +166,7 @@ def parameters(self) -> nx.Parameters: params.insert_linkable_parameter(nx.BoolParameter(ReadPeregrineHDF5File.ENABLE_SCAN_DATA_SUBVOLUME_KEY, 'Enable Scan Data Subvolume', 'Specifies whether or not to read a subvolume of the scan data from the input file.', False)) params.insert(nx.VectorUInt64Parameter(ReadPeregrineHDF5File.SCAN_DATA_SUBVOLUME_MINMAX_KEY, 'Scan Data Slice Bounds', 'The min/max slice bounds (inclusive) for the Scan Data subvolume.', [0, 1], ['Min', 'Max'])) + params.link_parameters(ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY, ReadPeregrineHDF5File.LAYER_THICKNESS_KEY, True) params.link_parameters(ReadPeregrineHDF5File.READ_SEGMENTATION_RESULTS_KEY, ReadPeregrineHDF5File.SEGMENTATION_RESULTS_VALUES_KEY, True) params.link_parameters(ReadPeregrineHDF5File.ENABLE_SLICES_SUBVOLUME_KEY, ReadPeregrineHDF5File.SLICES_SUBVOLUME_MINMAX_X_KEY, True) params.link_parameters(ReadPeregrineHDF5File.ENABLE_SLICES_SUBVOLUME_KEY, ReadPeregrineHDF5File.SLICES_SUBVOLUME_MINMAX_Y_KEY, True) @@ -193,6 +198,8 @@ def preflight_impl(self, data_structure: nx.DataStructure, args: dict, message_h :rtype: nx.IFilter.PreflightResult """ input_file_path = args[ReadPeregrineHDF5File.INPUT_FILE_PATH_KEY] + override_layer_thickness: bool = args[ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY] + layer_thickness: bool = args[ReadPeregrineHDF5File.LAYER_THICKNESS_KEY] read_segmentation_results: bool = args[ReadPeregrineHDF5File.READ_SEGMENTATION_RESULTS_KEY] read_camera_data: bool = args[ReadPeregrineHDF5File.READ_CAMERA_DATA_KEY] read_part_ids: bool = args[ReadPeregrineHDF5File.READ_PART_IDS_KEY] @@ -214,7 +221,7 @@ def preflight_impl(self, data_structure: nx.DataStructure, args: dict, message_h except Exception as e: return nx.IFilter.PreflightResult(errors=[nx.Error(-2012, f"Error opening file '{str(input_file_path)}': {e}")]) - spacing_result: Result = self._calculate_spacing(h5_file_reader) + spacing_result: Result = self._calculate_spacing(h5_file_reader, layer_thickness if override_layer_thickness else None) if not spacing_result.valid(): return nx.IFilter.PreflightResult(errors=spacing_result.errors) @@ -571,7 +578,7 @@ def _read_dataset_dimensions(self, h5_file_reader: h5py.File, h5_dataset_path: s return Result(value=list(dataset.shape)) - def _read_dataset_type(self, h5_file_reader: h5py.File, h5_dataset_path: str) -> Result[List[int]]: + def _read_dataset_type(self, h5_file_reader: h5py.File, h5_dataset_path: str) -> Result[h5py.Datatype]: result: Result[h5py.Dataset] = self._open_hdf5_data_object(h5_file_reader, h5_dataset_path) if result.invalid(): return Result(errors=result.errors) @@ -635,7 +642,7 @@ def _validate_subvolume_dimensions(self, volume_dims: List[int], subvolume_min_m return Result() - def _calculate_spacing(self, h5_file_reader: h5py.File) -> Result[List[float]]: + def _calculate_spacing(self, h5_file_reader: h5py.File, layer_thickness: float = None) -> Result[List[float]]: if ReadPeregrineHDF5File.X_REAL_DIMENSION_PATH not in h5_file_reader.attrs: return make_error_result(code=-3007, message=f"Attribute at path '{ReadPeregrineHDF5File.X_REAL_DIMENSION_PATH}' does not exist, so the X spacing cannot be calculated!") try: @@ -664,12 +671,15 @@ def _calculate_spacing(self, h5_file_reader: h5py.File) -> Result[List[float]]: except KeyError: return make_error_result(code=-3014, message=f"Attribute at path '{ReadPeregrineHDF5File.Y_CAMERA_DIMENSION_PATH}' cannot be accessed, so the Y spacing cannot be calculated!") - if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs: - return make_error_result(code=-3015, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist, so the Z spacing cannot be calculated!") - try: - z_spacing = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH] - except KeyError: - return make_error_result(code=-3016, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed, so the Z spacing cannot be calculated!") + if layer_thickness is None: + if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs: + return make_error_result(code=-3015, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist, so the Z spacing cannot be calculated!") + try: + z_spacing = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH] + except KeyError: + return make_error_result(code=-3016, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed, so the Z spacing cannot be calculated!") + else: + z_spacing = layer_thickness spacing = [float(x_real_dim / x_camera_dim), float(y_real_dim / y_camera_dim), float(z_spacing)] return Result(value=spacing) @@ -719,7 +729,9 @@ def _read_slice_datasets(self, h5_file_reader: h5py.File, data_structure: nx.Dat if should_cancel: return Result() - for camera_data_dataset in camera_data_datasets_str: + camera_data_datasets_str = camera_data_datasets_str.strip() + camera_data_datasets = camera_data_datasets_str.split(',') + for camera_data_dataset in camera_data_datasets: if should_cancel: return Result() @@ -853,6 +865,8 @@ def _read_scan_data(self, h5_file_reader: h5py.File, scan_path: str, z_offset: i return Result(value=(vertices,edges,tot)) def _read_scan_datasets(self, h5_file_reader: h5py.File, data_structure: nx.DataStructure, filter_args: dict, message_handler: nx.IFilter.MessageHandler, should_cancel: nx.AtomicBoolProxy) -> Result: + override_layer_thickness: bool = filter_args[ReadPeregrineHDF5File.OVERRIDE_LAYER_THICKNESS_KEY] + layer_thickness: float = filter_args[ReadPeregrineHDF5File.LAYER_THICKNESS_KEY] read_scan_datasets: bool = filter_args[ReadPeregrineHDF5File.READ_SCAN_DATASETS_KEY] scan_data_edge_geom_path: nx.DataPath = filter_args[ReadPeregrineHDF5File.SCAN_DATA_KEY] scan_data_vertex_attr_mat_name: str = filter_args[ReadPeregrineHDF5File.SCAN_DATA_VERTEX_ATTR_MAT_KEY] @@ -889,13 +903,16 @@ def _read_scan_datasets(self, h5_file_reader: h5py.File, data_structure: nx.Data return Result(errors=result.errors) scan_group_reader: h5py.Group = result.value - # Read the Z thickness value - if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs: - return make_error_result(code=-3007, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!") - try: - z_thickness: float = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH] - except Exception as e: - return make_error_result(code=-3008, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!\n\n{e}") + if override_layer_thickness: + z_thickness = layer_thickness + else: + # Read the Z thickness value + if ReadPeregrineHDF5File.LAYER_THICKNESS_PATH not in h5_file_reader.attrs: + return make_error_result(code=-3007, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' does not exist in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!") + try: + z_thickness: float = h5_file_reader.attrs[ReadPeregrineHDF5File.LAYER_THICKNESS_PATH] + except Exception as e: + return make_error_result(code=-3008, message=f"Attribute at path '{ReadPeregrineHDF5File.LAYER_THICKNESS_PATH}' cannot be accessed in HDF5 file '{h5_file_reader.filename}', so the scan datasets cannot be read!\n\n{e}") # Calculate the start and end values for the scans z_start: int = 0