diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 52528f07db..503f8f3b0d 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -43,7 +43,7 @@ jobs: sudo apt-get -y install ninja-build - name: Install Sphinx run: | - sudo pip install sphinx myst-parser sphinx-markdown-tables sphinx_rtd_theme + sudo pip install sphinx myst-parser sphinx-markdown-tables sphinx_rtd_theme numpy - name: Setup NuGet Credentials shell: bash run: | diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 7a388ab2f0..26f98161c9 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -32,7 +32,7 @@ jobs: brew install ninja - name: Install Sphinx run: | - sudo pip3 install sphinx myst-parser sphinx-markdown-tables sphinx_rtd_theme + sudo pip3 install sphinx myst-parser sphinx-markdown-tables sphinx_rtd_theme numpy - name: Setup NuGet Credentials shell: bash run: | diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index fd649a3d4f..ca2981560a 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -49,7 +49,7 @@ jobs: -source "https://nuget.pkg.github.com/BlueQuartzSoftware/index.json" - name: Install Sphinx run: | - pip install sphinx myst-parser sphinx-markdown-tables sphinx_rtd_theme + pip install sphinx myst-parser sphinx-markdown-tables sphinx_rtd_theme numpy - name: Configure run: | cmake --preset ci-windows-${{matrix.toolset}} ${{github.workspace}} -T ${{matrix.toolset}} diff --git a/CMakeLists.txt b/CMakeLists.txt index ce69a866e9..d26d2c9bd4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -63,7 +63,7 @@ option(COMPLEX_ENABLE_INSTALL "Enables COMPLEX install rules" ON) file(TO_CMAKE_PATH "${CMAKE_COMMAND}" CMAKE_COMMAND_NORM) project(complex - VERSION 1.2.0 + VERSION 1.2.1 DESCRIPTION "SIMPL Redesign" HOMEPAGE_URL "https://github.com/BlueQuartzSoftware/complex" LANGUAGES CXX diff --git a/conda/meta.yaml b/conda/meta.yaml index 543239d37e..35af5d184e 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "complex" %} -{% set version = "1.2.0" %} +{% set version = "1.2.1" %} package: name: {{ name|lower }} diff --git a/conda/recipe.yaml b/conda/recipe.yaml index e5b57677f7..78c9c5a7e6 100644 --- a/conda/recipe.yaml +++ b/conda/recipe.yaml @@ -1,5 +1,5 @@ context: - version: "1.2.0" + version: "1.2.1" name: complex package: diff --git a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp index 867e00cb9d..288f07a335 100644 --- a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp +++ b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp @@ -10,7 +10,6 @@ #include "ComplexCore/ComplexCoreFilterBinding.hpp" #include - #include #include #include @@ -80,6 +79,7 @@ #include #include #include +#include #include @@ -505,12 +505,118 @@ PYBIND11_MODULE(complex, mod) dataObject.def_property_readonly("id", &DataObject::getId); dataObject.def_property_readonly("name", &DataObject::getName); + dataObject.def_property_readonly("type", &DataObject::getDataObjectType); dataStructure.def(py::init<>()); dataStructure.def("__getitem__", py::overload_cast(&DataStructure::getSharedData)); + dataStructure.def("__getitem__", [](DataStructure& self, const std::string& path) { + auto pathConversionResult = DataPath::FromString(path); + if(!pathConversionResult) + { + return std::shared_ptr(nullptr); + } + return self.getSharedData(pathConversionResult.value()); + }); dataStructure.def_property_readonly("size", &DataStructure::getSize); dataStructure.def("__len__", &DataStructure::getSize); dataStructure.def("remove", py::overload_cast(&DataStructure::removeData)); + dataStructure.def("remove", [](DataStructure& self, const std::string& path) { + auto pathConversionResult = DataPath::FromString(path); + if(!pathConversionResult) + { + return false; + } + return self.removeData(pathConversionResult.value()); + }); + dataStructure.def("hierarchy_to_str", [](DataStructure& self) { + std::stringstream ss; + self.exportHierarchyAsText(ss); + return ss.str(); + }); + dataStructure.def("hierarchy_to_graphviz", [](DataStructure& self) { + std::stringstream ss; + self.exportHierarchyAsGraphViz(ss); + return ss.str(); + }); + dataStructure.def("get_children", [](DataStructure& self, complex::DataPath& parentPath) { + if(parentPath.empty()) + { + std::vector outputPaths; + for(const auto* object : self.getTopLevelData()) + { + auto topLevelPath = DataPath::FromString(object->getDataPaths()[0].getTargetName()).value(); + outputPaths.push_back(topLevelPath); + } + return outputPaths; + } + else + { + auto result = complex::GetAllChildDataPaths(self, parentPath); + if(result) + { + return result.value(); + } + return std::vector{}; + } + }); + dataStructure.def("get_children", [](DataStructure& self, const std::string& parentPath) { + if(parentPath.empty()) + { + std::vector outputPaths; + for(const auto* object : self.getTopLevelData()) + { + auto topLevelPath = DataPath::FromString(object->getDataPaths()[0].getTargetName()).value(); + outputPaths.push_back(topLevelPath); + } + return outputPaths; + } + else + { + auto pathConversionResult = DataPath::FromString(parentPath); + if(!pathConversionResult) + { + return std::vector{}; + } + + auto result = complex::GetAllChildDataPaths(self, pathConversionResult.value()); + if(result) + { + return result.value(); + } + return std::vector{}; + } + }); + + auto dataObjectType = py::enum_(dataObject, "DataObjectType"); + dataObjectType.value("DataObject", DataObject::Type::DataObject); + dataObjectType.value("DynamicListArray", DataObject::Type::DynamicListArray); + dataObjectType.value("ScalarData", DataObject::Type::ScalarData); + dataObjectType.value("BaseGroup", DataObject::Type::BaseGroup); + dataObjectType.value("AttributeMatrix", DataObject::Type::AttributeMatrix); + dataObjectType.value("DataGroup", DataObject::Type::DataGroup); + dataObjectType.value("IDataArray", DataObject::Type::IDataArray); + dataObjectType.value("DataArray", DataObject::Type::DataArray); + dataObjectType.value("IGeometry", DataObject::Type::IGeometry); + dataObjectType.value("IGridGeometry", DataObject::Type::IGridGeometry); + dataObjectType.value("RectGridGeom", DataObject::Type::RectGridGeom); + dataObjectType.value("ImageGeom", DataObject::Type::ImageGeom); + dataObjectType.value("INodeGeometry0D", DataObject::Type::INodeGeometry0D); + dataObjectType.value("VertexGeom", DataObject::Type::VertexGeom); + dataObjectType.value("INodeGeometry1D", DataObject::Type::INodeGeometry1D); + dataObjectType.value("EdgeGeom", DataObject::Type::EdgeGeom); + dataObjectType.value("INodeGeometry2D", DataObject::Type::INodeGeometry2D); + dataObjectType.value("QuadGeom", DataObject::Type::QuadGeom); + dataObjectType.value("TriangleGeom", DataObject::Type::TriangleGeom); + dataObjectType.value("INodeGeometry3D", DataObject::Type::INodeGeometry3D); + dataObjectType.value("HexahedralGeom", DataObject::Type::HexahedralGeom); + dataObjectType.value("TetrahedralGeom", DataObject::Type::TetrahedralGeom); + dataObjectType.value("INeighborList", DataObject::Type::INeighborList); + dataObjectType.value("NeighborList", DataObject::Type::NeighborList); + dataObjectType.value("StringArray", DataObject::Type::StringArray); + dataObjectType.value("AbstractMontage", DataObject::Type::AbstractMontage); + dataObjectType.value("GridMontage", DataObject::Type::GridMontage); + dataObjectType.value("Unknown", DataObject::Type::Unknown); + dataObjectType.value("Any", DataObject::Type::Any); py::class_> baseGroup(mod, "BaseGroup"); baseGroup.def("contains", py::overload_cast(&BaseGroup::contains, py::const_)); @@ -593,6 +699,7 @@ PYBIND11_MODULE(complex, mod) iDataArray.def_property_readonly("store", py::overload_cast<>(&IDataArray::getIDataStore)); iDataArray.def_property_readonly("tdims", &IDataArray::getTupleShape); iDataArray.def_property_readonly("cdims", &IDataArray::getComponentShape); + iDataArray.def_property_readonly("data_type", &IDataArray::getDataType); auto dataArrayInt8 = COMPLEX_PY_BIND_DATA_ARRAY(mod, Int8Array); auto dataArrayUInt8 = COMPLEX_PY_BIND_DATA_ARRAY(mod, UInt8Array); diff --git a/src/complex/DataStructure/DataStructure.cpp b/src/complex/DataStructure/DataStructure.cpp index 454c1e7e61..df8b447352 100644 --- a/src/complex/DataStructure/DataStructure.cpp +++ b/src/complex/DataStructure/DataStructure.cpp @@ -908,15 +908,14 @@ void DataStructure::exportHierarchyAsText(std::ostream& outputStream) const { auto topLevelPath = DataPath::FromString(object->getDataPaths()[0].getTargetName()).value(); outputStream << k_Delimiter << topLevelPath.getTargetName() << "\n"; - auto optionalChildPaths = GetAllChildDataPaths(*this, topLevelPath); + auto optionalDataPaths = GetAllChildDataPaths(*this, topLevelPath); - if(optionalChildPaths.has_value() && !optionalChildPaths.value().empty()) + if(optionalDataPaths.has_value() && !optionalDataPaths.value().empty()) { // Begin recursion - recurseHierarchyToText(outputStream, optionalChildPaths.value(), ""); + recurseHierarchyToText(outputStream, optionalDataPaths.value(), ""); } } - outputStream << '\n'; // for readability } void DataStructure::recurseHierarchyToGraphViz(std::ostream& outputStream, const std::vector paths, const std::string& parent) const @@ -929,12 +928,15 @@ void DataStructure::recurseHierarchyToGraphViz(std::ostream& outputStream, const // pull child paths or skip to next iteration auto optionalChildPaths = GetAllChildDataPaths(*this, path); - if(optionalChildPaths.has_value() && !optionalChildPaths.value().empty()) + if(!optionalChildPaths.has_value() || optionalChildPaths.value().empty()) { - // Begin recursion - recurseHierarchyToGraphViz(outputStream, optionalChildPaths.value(), path.getTargetName()); + continue; } + + // recurse + recurseHierarchyToGraphViz(outputStream, optionalChildPaths.value(), path.getTargetName()); } + // outputStream << "\n"; // for readability } void DataStructure::recurseHierarchyToText(std::ostream& outputStream, const std::vector paths, std::string indent) const diff --git a/test/DataStructTest.cpp b/test/DataStructTest.cpp index 4677bfb0c4..a25e87b203 100644 --- a/test/DataStructTest.cpp +++ b/test/DataStructTest.cpp @@ -17,6 +17,7 @@ #include "complex/UnitTest/UnitTestCommon.hpp" #include "complex/Utilities/DataArrayUtilities.hpp" #include "complex/Utilities/DataGroupUtilities.hpp" +#include "complex/unit_test/complex_test_dirs.hpp" #include @@ -42,6 +43,26 @@ constexpr StringLiteral k_SharedPolyhedrons = "SharedPolyhedronList"; constexpr StringLiteral k_HexGeo = "Hex Geometry"; } // namespace +// This test will ensure we don't run into runtime exceptions trying to run the functions +TEST_CASE("ComplexCore::exportHierarchyAsGraphViz") +{ + DataStructure dataStructure = UnitTest::CreateComplexMultiLevelDataGraph(); + auto outputPath = fs::path(fmt::format("{}/exportHierarchyAsGraphViz_test.dot", unit_test::k_BinaryTestOutputDir)); + std::cout << outputPath << std::endl; + std::ofstream output(outputPath, std::ios_base::trunc); + dataStructure.exportHierarchyAsGraphViz(output); +} + +// This test will ensure we don't run into runtime exceptions trying to run the functions +TEST_CASE("ComplexCore::exportHierarchyAsText") +{ + DataStructure dataStructure = UnitTest::CreateComplexMultiLevelDataGraph(); + auto outputPath = fs::path(fmt::format("{}/exportHierarchyAsText_test.txt", unit_test::k_BinaryTestOutputDir)); + std::cout << outputPath << std::endl; + std::ofstream output(outputPath, std::ios_base::trunc); + dataStructure.exportHierarchyAsText(output); +} + DataStructure createTestDataStructure() { DataStructure dataStruct; diff --git a/wrapping/python/ReadMe.md b/wrapping/python/ReadMe.md index b010aba7dd..e8becb2eb5 100644 --- a/wrapping/python/ReadMe.md +++ b/wrapping/python/ReadMe.md @@ -1,5 +1,20 @@ # Complex Python Information +## Checklist when updating Python Bindings + +- Update Version number. + + - If you add API then update the third number + - If you break API (any where in complex), update the second number + +- Document **ALL** new API in the appropriate documentation file(s) +- Create a ReleaseNotes_1XX.rst file with the appropriate highlights from the release +- Create example python code for any new API +- Update example python codes for any changed API +- Add unit test for any NEW API +- Update Unit test for changed API +- Tag the repository with the correct version in the correct semantic form + ## Creating the Python Bindings ### MacOS: Use Mamba @@ -32,13 +47,16 @@ Create the package from the `complex` sources ```shell [user@host] $ cd complex/conda - (nx-build) [user@host] $ conda mambabuild --python 3.8 .; conda mambabuild --python 3.9 .; conda mambabuild --python 3.10 . + (nx-build) [user@host] $ conda mambabuild --python 3.8 . + (nx-build) [user@host] $ conda mambabuild --python 3.9 . + (nx-build) [user@host] $ conda mambabuild --python 3.10 . ``` ### Windows/Linux ```shell + [user@host] $ conda create on nx-build python=3.10 mamba boa [user@host] $ cd complex/conda [user@host] $ conda build . ``` @@ -46,8 +64,10 @@ Create the package from the `complex` sources For faster environment solves mamba can also be used. ```shell -conda install boa -conda mambabuild --python 3.8|3.9|3.10 . + [user@host] $ conda install boa + [user@host] $ conda mambabuild --python 3.8 . + [user@host] $ conda mambabuild --python 3.9 . + [user@host] $ conda mambabuild --python 3.10 . ``` ### Uploading to Anaconda.org @@ -56,7 +76,7 @@ Open a "base" anaconda prompt. ```shell [user@host] $ anaconda login - [user@host] $ anaconda upload -c bluequartzsoftware [path/to/tar.bz] + [user@host] $ anaconda upload --user bluequartzsoftware [path/to/tar.bz] ``` ## Using the Python Bindings @@ -64,7 +84,7 @@ Open a "base" anaconda prompt. ```shell conda config --add channels conda-forge conda config --set channel_priority strict -conda create -n cxpython python=3.8 +conda create -n cxpython python=3.10 conda activate cxpython conda install -c bluequartzsoftware complex ``` diff --git a/wrapping/python/ToDo.md b/wrapping/python/ToDo.md new file mode 100644 index 0000000000..43e8a1368f --- /dev/null +++ b/wrapping/python/ToDo.md @@ -0,0 +1,10 @@ +# ToDo list for python bindings + +- Create each kind of geometry using the python bindings + + - combine that with numpy to generate some nodes for a node geometry + +- Document all wrapped methods in the complexpy.cpp file. +- Example of using MatPlotLib to generate a plot and save the whole plot as an image file +- Example of using Pandas DataFrame +- Example of looping over X number of EBSD files and generate a Pole figure for each file diff --git a/wrapping/python/docs/index_template.rst b/wrapping/python/docs/index_template.rst index 4575a5118e..173cae6475 100644 --- a/wrapping/python/docs/index_template.rst +++ b/wrapping/python/docs/index_template.rst @@ -5,7 +5,7 @@ DREAM3D-NX Python Docs ======================= -Latest Version: 1.2.0 +Latest Version: 1.2.1 --------------------- The *complex* library can be installed through an Anaconda packages from the *BlueQuartzSoftware* channel. This can be achieved @@ -32,6 +32,7 @@ by creating a new virtual environment Reference_Frame_Notes ReleaseNotes_110 ReleaseNotes_120 + ReleaseNotes_121 @PLUGIN_LIST@ Indices and tables diff --git a/wrapping/python/docs/source/API.rst b/wrapping/python/docs/source/API.rst index b4960d0e93..b39254e960 100644 --- a/wrapping/python/docs/source/API.rst +++ b/wrapping/python/docs/source/API.rst @@ -352,7 +352,7 @@ General Parameters .. code:: python generated_file_list_value = cx.GeneratedFileListParameter.ValueType() - generated_file_list_value.input_path = "/Users/mjackson/DREAM3DNXData/Data/Porosity_Image" + generated_file_list_value.input_path = "DREAM3DNXData/Data/Porosity_Image" generated_file_list_value.ordering = cx.GeneratedFileListParameter.Ordering.LowToHigh generated_file_list_value.file_prefix = "slice-" diff --git a/wrapping/python/docs/source/DataObjects.rst b/wrapping/python/docs/source/DataObjects.rst index 68a7b34db0..1f03fa864a 100644 --- a/wrapping/python/docs/source/DataObjects.rst +++ b/wrapping/python/docs/source/DataObjects.rst @@ -4,7 +4,7 @@ DataStructure Objects .. _DataStructure: DataStructure --------------- +---------------- The **complex** DataStructure can be filled with various types of objects. Those are all listed below. In the **DREAM3D-NX** user interface, the DataStructure of any @@ -16,10 +16,61 @@ a yellow box at the right side of the user interface. :height: 809 :scale: 45 + +- All DataObjects are stored in a DataStructure. +- Multiple DataStructure objects are allowed in a python program. + + +.. py:class:: DataStructure + + .. py:method:: [data_path] + [string] + + Retrieves the DataObject at the given DataPath_ + + :param DataPath data_path: The DataPath (or string convertable to a DataPath) to retrieve. + + .. py:method:: size() + + :return: An integer that is the total number of all objects that are held by the DataStructure. + :rtype: int + + .. py:method:: remove(data_path) + remove(string) + + :param DataPath data_path: The DataPath (or string convertable to a DataPath) to remove from the DataStructure. + :return: A boolean indicating if the path was removed or not. + :rtype: Bool + + .. py:method:: hierarchy_to_str() + + :return: A string that attempts to show the internal hierarchy of the DataStructure + :rtype: string + + .. py:method:: hierarchy_to_graphviz() + + :return: A string that attempts to show the internal hierarchy of the DataStructure formatted in the GraphViz 'dot' language. + :rtype: string + + .. py:method:: get_children(complex.DataPath) + get_children(string) + + :param DataPath data_path: The DataPath (or string convertable to a DataPath) to get the children. An empty DataPath object will return the top level DataPaths. + :return: A string that attempts to show the internal hierarchy of the DataStructure + :rtype: List of DataPath_ + .. code:: python + # this is just sample code. The developer is expected to use these on well + # constructed DataStructure objects. data_structure = cx.DataStructure() - + num_objects = data_structure.size + did_remove = data_structure.remove(complex.DataPath("/Path/to/Object")) + hierarchy = data_structure.hierarchy_to_str() + hierarchy_gv = data_structure.hierarchy_to_graphviz() + top_level_child_paths = data_structure.get_children() + child_paths = data_structure.get_children(complex.DataPath("Group")) + child_paths = data_structure.get_children("/Path/to/Object") .. _DataObject: @@ -30,6 +81,52 @@ This is the abstract base class for all other objects that can be inserted into DataStructure_ . It should never be used as the appropriate class from the list below should be used instead. +.. py:class:: DataObject + + :ivar id: Integer. The internal id value used in the DataStructure + :ivar name: String. The name of the object + :var type: complex.DataObject.DataObjectType value + +The possible *type* values are: + +- complex.DataObject::Type::DataObject +- complex.DataObject::Type::DynamicListArray +- complex.DataObject::Type::ScalarData +- complex.DataObject::Type::BaseGroup +- complex.DataObject::Type::AttributeMatrix +- complex.DataObject::Type::DataGroup +- complex.DataObject::Type::IDataArray +- complex.DataObject::Type::DataArray +- complex.DataObject::Type::IGeometry +- complex.DataObject::Type::IGridGeometry +- complex.DataObject::Type::RectGridGeom +- complex.DataObject::Type::ImageGeom +- complex.DataObject::Type::INodeGeometry0D +- complex.DataObject::Type::VertexGeom +- complex.DataObject::Type::INodeGeometry1D +- complex.DataObject::Type::EdgeGeom +- complex.DataObject::Type::INodeGeometry2D +- complex.DataObject::Type::QuadGeom +- complex.DataObject::Type::TriangleGeom +- complex.DataObject::Type::INodeGeometry3D +- complex.DataObject::Type::HexahedralGeom +- complex.DataObject::Type::TetrahedralGeom +- complex.DataObject::Type::INeighborList +- complex.DataObject::Type::NeighborList +- complex.DataObject::Type::StringArray +- complex.DataObject::Type::AbstractMontage +- complex.DataObject::Type::GridMontage +- complex.DataObject::Type::Unknown +- complex.DataObject::Type::Any + + .. code:: python + + data_object = data_structure["Image Geometry"] + if data_object.type == cx.DataObject.DataObjectType.ImageGeom: + print("Image Geometry") + else: + print("NOT Image Geometry") + .. _DataPath: DataPath @@ -79,9 +176,9 @@ codes that are based on the `complex ` filter. + +.. py:class:: DataStore + + + :ivar dtype: The type of Data stored in the DataStore + + .. py:method:: length(data_store) + + Get the number of tuples in the DataStore + + .. py:method:: [index] + + Get a value at a specified index. Use of the numpy view into the DataArray is preferred. + + .. code:: python # First get the array from the DataStructure diff --git a/wrapping/python/docs/source/Geometry.rst b/wrapping/python/docs/source/Geometry.rst index 740faf690b..5b69530272 100644 --- a/wrapping/python/docs/source/Geometry.rst +++ b/wrapping/python/docs/source/Geometry.rst @@ -31,6 +31,17 @@ are **ALWAYS** given in "C" order, or slowest to fastest order. Definition of Image Geometry +.. py:class:: ImageGeom + + :ivar dimensions: Returns dimensions of the Image Geometry in [XYZ] as integers + :ivar spacing: Returns the spacing of the Image Geometry in [XYZ] as float32 + :ivar origin: Returns the origin of the Image Geometry in [XYZ] as float32 + :ivar num_x_cells: Returns the number of Cells along the X Axis. + :ivar num_y_cells: Returns the number of Cells along the Y Axis. + :ivar num_z_cells: Returns the number of Cells along the Z Axis. + + + .. _RectGridGeometry: RectilinearGrid Geometry (Semi-Regular Grid) @@ -53,6 +64,59 @@ in each array is +1 from the dimension size. Definition of RectGrid Geometry +.. py:class:: RectilinearGrid + + :ivar dimensions: Returns dimensions of the Image Geometry in [XYZ] as integers + :ivar num_x_cells: Returns the number of Cells along the X Axis. + :ivar num_y_cells: Returns the number of Cells along the Y Axis. + :ivar num_z_cells: Returns the number of Cells along the Z Axis. + :ivar x_bounds: Returns the axis values along the X Axis + :ivar y_bounds: Returns the axis values along the Y Axis + :ivar z_bounds: Returns the axis values along the Z Axis + + +.. code:: python + + # This code snippet assumes the developer has already generated the + # needed DataArrays and added them to the DataStructure through the proper + # CreateDataArray filters (or any other way) + result = cx.CreateGeometryFilter.execute(data_structure=data_structure, + array_handling= 1, # Move the arrays from their original location. + cell_attribute_matrix_name="Cell Data", + geometry_name=cx.DataPath(["RectGrid Geometry"]), + geometry_type=1, + x_bounds=cx.DataPath("RectGridCoords/X Coords"), + y_bounds=cx.DataPath("RectGridCoords/Y Coords"), + z_bounds=cx.DataPath("RectGridCoords/Z Coords") + ) + if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) + else: + print("No errors running the CreateGeometryFilter filter") + + rect_grid_geom = data_structure[cx.DataPath(["RectGrid Geometry"])] + x_cell_count = rect_grid_geom.num_x_cells + print(f'num_x_cells: {x_cell_count}') + x_bounds = rect_grid_geom.x_bounds + print(f'x_bounds: {x_bounds.store.npview()}') + +The output produced is: + +:: + + num_x_cells: 9 + x_bounds: [[0.] + [1.] + [2.] + [3.] + [4.] + [5.] + [6.] + [7.] + [8.] + [9.]] + Node Based Geometries (Unstructured Grid) ----------------------------------------- diff --git a/wrapping/python/docs/source/Installation.rst b/wrapping/python/docs/source/Installation.rst index 59a6854c18..7d493d8ae0 100644 --- a/wrapping/python/docs/source/Installation.rst +++ b/wrapping/python/docs/source/Installation.rst @@ -1,7 +1,7 @@ Installation ============ -Latest Version: 1.2.0 +Latest Version: 1.2.1 --------------------- The *complex* library can be installed through an Anaconda packages from the *BlueQuartzSoftware* channel. This can be achieved diff --git a/wrapping/python/docs/source/Python_Introduction.rst b/wrapping/python/docs/source/Python_Introduction.rst index d9b58685da..11ea1cdd9d 100644 --- a/wrapping/python/docs/source/Python_Introduction.rst +++ b/wrapping/python/docs/source/Python_Introduction.rst @@ -39,10 +39,10 @@ If you will need functionality from either of the other *complex* plugins then y Creating the DataStructure -------------------------- -In order to effectively use the *complex* classes and functions, you will need to create at leaset one *DataStructure* object. -The *DataStructure* object holds the various *DataGroup*, *AttributeMatrix*, and *DataArray* that will be created. When -then *DataStructure* goes out of scope those items will also be cleaned up. The code to create the -*DataStructure* object is straight forward. +In order to effectively use the *complex* classes and functions, you will need to create at leaset one :ref:`DataStructure` object. +The :ref:`DataStructure` object holds the various *DataGroup*, *AttributeMatrix*, and *DataArray* that will be created. When +then :ref:`DataStructure` goes out of scope those items will also be cleaned up. The code to create the +:ref:`DataStructure` object is straight forward. .. code:: python @@ -79,7 +79,7 @@ An example of executing a file in immediate mode is the following code snippet. numeric_type=cx.NumericType.float32, output_data_array=cx.DataPath(["3D Array"]), tuple_dimensions= [[3, 2, 5]]) - npdata = data_structure[cx.DataPath(["3D Array"])].store.npview() + npdata = data_structure[cx.DataPath(["3D Array"])].npview() The resulting :ref:`DataArray ` is available for use immediately following the execution of the filter. diff --git a/wrapping/python/docs/source/ReleaseNotes_121.rst b/wrapping/python/docs/source/ReleaseNotes_121.rst new file mode 100644 index 0000000000..8afb8fae46 --- /dev/null +++ b/wrapping/python/docs/source/ReleaseNotes_121.rst @@ -0,0 +1,64 @@ +Release Notes 1.2.1 +=================== + +The `complex` library is under activate development and while we strive to maintain a stable API bugs are +found the necessitate the changing of the API. + +Version 1.2.1 +------------- + +- Documentation has been updated +- Examples updated to use new API + + +API Additions 1.2.1 +^^^^^^^^^^^^^^^^^^^ + +- DataObject add the "type" property + +- Retrieve the children of a DataObject in the DataStructure using a complex.DataPath or "/" delimited string + + .. code:: python + + #------------------------------------------------------------------------------ + # If you want to list out the children at a specific level of the DataStruture + #------------------------------------------------------------------------------ + # Use an empty path for the top level objects + children_paths = data_structure.get_children(cx.DataPath("")) + print(children_paths) + +- Generate a text or GraphViz representation of the DataStructure. + + .. code:: python + + # This will generate the hierarchy as a GraphViz formatted string that you can + # print or save to a file + graphviz_content = data_structure.hierarchy_to_graphviz() + print(graphviz_content) + + # This will generate the hierarchy as an ASCI Formatted string. + hierarchy_as_str = data_structure.hierarchy_to_str() + print(hierarchy_as_str) + +- Get a numpy view of a DataArray directly from the DataStructure by using the a complex.DataPath or "/" delimited string + + .. code:: python + + data_structure = cx.DataStructure() + output_array_path = cx.DataPath(["3D Array"]) + tuple_dims = [[3, 2, 5]] + array_type = cx.NumericType.float32 + create_array_filter = cx.CreateDataArray() + result = create_array_filter.execute(data_structure=data_structure, + component_count=1, + data_format="", + initialization_value="10", + numeric_type=array_type, + output_data_array=output_array_path, + tuple_dimensions=tuple_dims) + npdata = data_structure[output_array_path].npview() + + + +Filter Changes 1.2.1 +^^^^^^^^^^^^^^^^^^^^ diff --git a/wrapping/python/docs/source/conf.py b/wrapping/python/docs/source/conf.py index c609005f14..ac7eabc7d7 100644 --- a/wrapping/python/docs/source/conf.py +++ b/wrapping/python/docs/source/conf.py @@ -9,7 +9,7 @@ project = 'DREAM3D-NX' copyright = '2023, BlueQuartz Software, LLC' author = 'BlueQuartz Software, LLC' -release = '1.2.0' +release = '1.2.1' # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/wrapping/python/examples/basic_arrays.py b/wrapping/python/examples/basic_arrays.py index 5ea19f2f43..e41d7330fc 100644 --- a/wrapping/python/examples/basic_arrays.py +++ b/wrapping/python/examples/basic_arrays.py @@ -75,6 +75,12 @@ else: print("No errors running the filter") +data_array = data_structure[output_array_path] +print(f'name: {data_array.name}') +print(f'tuple_shape: {data_array.tuple_shape}') +print(f'component_shape: {data_array.component_shape}') +print(f'dtype: {data_array.dtype}') + npdata = data_structure[output_array_path].npview() print(npdata) @@ -123,4 +129,31 @@ print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) else: - print("No errors running the filter") \ No newline at end of file + print("No errors running the filter") + + +#------------------------------------------------------------------------------ +# If you need to show the hierarchy of the DataStructure you can do one of the +# following: +#------------------------------------------------------------------------------ + +# This will generate the hierarchy as a GraphViz formatted string that you can +# print or save to a file +graphviz_content = data_structure.hierarchy_to_graphviz() +print(graphviz_content) + +# This will generate the hierarchy as an ASCI Formatted string. +hierarchy_as_str = data_structure.hierarchy_to_str() +print(hierarchy_as_str) + +#------------------------------------------------------------------------------ +# If you want to list out the children at a specific level of the DataStruture +#------------------------------------------------------------------------------ +# Use an empty path for the top level objects +children_paths = data_structure.get_children(cx.DataPath("")) +print(children_paths) + +children_paths = data_structure.get_children(cx.DataPath("Some/Path/To")) +print(children_paths) + +print(f'data_structure.size: {data_structure.size}') diff --git a/wrapping/python/examples/basic_ebsd_ipf.py b/wrapping/python/examples/basic_ebsd_ipf.py index 10ba833b5a..82bfedc9ba 100644 --- a/wrapping/python/examples/basic_ebsd_ipf.py +++ b/wrapping/python/examples/basic_ebsd_ipf.py @@ -17,7 +17,7 @@ cell_attribute_matrix_name="Scan Data", cell_ensemble_attribute_matrix_name="Phase Data", data_container_name=cx.DataPath(["Small IN100"]), - input_file="/Users/mjackson/Workspace1/DREAM3D_Data/Data/SmallIN100/Slice_1.ang") + input_file="DREAM3D_Data/Data/SmallIN100/Slice_1.ang") if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) diff --git a/wrapping/python/examples/generated_file_list.py b/wrapping/python/examples/generated_file_list.py index 3927c627d9..27518b700b 100644 --- a/wrapping/python/examples/generated_file_list.py +++ b/wrapping/python/examples/generated_file_list.py @@ -11,7 +11,7 @@ generated_file_list_value = cx.GeneratedFileListParameter.ValueType() -generated_file_list_value.input_path = "/Users/mjackson/DREAM3DNXData/Data/Porosity_Image" +generated_file_list_value.input_path = "Data/Porosity_Image" generated_file_list_value.ordering = cx.GeneratedFileListParameter.Ordering.LowToHigh generated_file_list_value.file_prefix = "slice_" diff --git a/wrapping/python/examples/geometry_examples.py b/wrapping/python/examples/geometry_examples.py index c609b8b5be..170476d7dc 100644 --- a/wrapping/python/examples/geometry_examples.py +++ b/wrapping/python/examples/geometry_examples.py @@ -13,199 +13,247 @@ """ # Create the DataStructure object data_structure = cx.DataStructure() -ig_dims = [10, 20, 30] # NOTE: These are in XYZ order -result = cx.CreateGeometryFilter.execute( data_structure=data_structure, - array_handling= 0, # This does not matter for Image Geometry - cell_attribute_matrix_name="Cell Data", - dimensions=ig_dims, # Note that the dimensions are list as X, Y, Z - geometry_name=cx.DataPath(["Image Geometry"]), - geometry_type=0, # 0 = Image Geometry - origin=[0.0, 0.0, 0.0], - spacing=[1.0, 1.0, 1.0]) +ig_dims = [10, 20, 30] # NOTE: These are in XYZ order +result = cx.CreateGeometryFilter.execute(data_structure=data_structure, + array_handling=0, # This does not matter for Image Geometry + cell_attribute_matrix_name="Cell Data", + dimensions=ig_dims, # Note that the dimensions are list as X, Y, Z + geometry_name=cx.DataPath("Image Geometry"), + geometry_type=0, # 0 = Image Geometry + origin=[0.0, 0.0, 0.0], + spacing=[1.0, 1.0, 1.0]) if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) else: print("No errors running the CreateGeometryFilter filter") +data_object = data_structure["Image Geometry"] +print(f'data_object: {type(data_object)}') +print(f'data_object: {data_object.type}') +if data_object.type == cx.DataObject.DataObjectType.ImageGeom: + print("Image Geometry") +else: + print("NOT Image Geometry") # Now we can create some (or import from another source) some cell based data # this is data that lives at the center of each cell # NOTE: we do *not* need to set the tuple dimensions because we are adding this array to the # AttributeMatrix that we generated in the last filter. -output_array_path = cx.DataPath(["Image Geometry", "Cell Data", "Float Cell Data"]) +output_array_path = cx.DataPath("Image Geometry/Cell Data/Float Cell Data") array_type = cx.NumericType.float32 create_array_filter = cx.CreateDataArray() -result = create_array_filter.execute(data_structure=data_structure, component_count=1, data_format="", initialization_value="10", - numeric_type=array_type, output_data_array=output_array_path) +result = create_array_filter.execute(data_structure=data_structure, component_count=1, data_format="", + initialization_value="10", + numeric_type=array_type, output_data_array=output_array_path) if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) else: print("No errors running the CreateDataArray filter") - - -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Lets try a Rectilinear Grid Geometry # We will need 3 arrays for the X, Y, Z created in the group RectGridCoords -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ result = cx.CreateDataGroup.execute(data_structure=data_structure, - Data_Object_Path=cx.DataPath(['RectGridCoords'])) + data_object_path=cx.DataPath('RectGridCoords')) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataGroup filter") -output_array_path = cx.DataPath(["RectGridCoords", "X Coords"]) +output_array_path = cx.DataPath("RectGridCoords/X Coords") array_type = cx.NumericType.float32 tuple_dims = [[10]] create_array_filter = cx.CreateDataArray() -result = create_array_filter.execute(data_structure=data_structure, - component_count=1, - data_format="", - initialization_value="0", - numeric_type=array_type, - output_data_array=output_array_path, - tuple_dimensions=tuple_dims) +result = create_array_filter.execute(data_structure=data_structure, + component_count=1, + data_format="", + initialization_value="0", + numeric_type=array_type, + output_data_array=output_array_path, + tuple_dimensions=tuple_dims) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataArray filter") x_coords = data_structure[output_array_path].npview() x_coords = np.squeeze(x_coords, axis=1) x_coords[:] = np.arange(0, 10, 1) -output_array_path = cx.DataPath(["RectGridCoords", "Y Coords"]) +output_array_path = cx.DataPath("RectGridCoords/Y Coords") array_type = cx.NumericType.float32 tuple_dims = [[10]] create_array_filter = cx.CreateDataArray() -result = create_array_filter.execute(data_structure=data_structure, - component_count=1, - data_format="", - initialization_value="0", - numeric_type=array_type, - output_data_array=output_array_path, - tuple_dimensions=tuple_dims) +result = create_array_filter.execute(data_structure=data_structure, + component_count=1, + data_format="", + initialization_value="0", + numeric_type=array_type, + output_data_array=output_array_path, + tuple_dimensions=tuple_dims) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataArray filter") y_coords = data_structure[output_array_path].npview() y_coords = np.squeeze(y_coords, axis=1) y_coords[:] = np.arange(10, 20, 1) -output_array_path = cx.DataPath(["RectGridCoords", "Z Coords"]) +output_array_path = cx.DataPath("RectGridCoords/Z Coords") array_type = cx.NumericType.float32 tuple_dims = [[10]] create_array_filter = cx.CreateDataArray() -result = create_array_filter.execute(data_structure=data_structure, - component_count=1, - data_format="", - initialization_value="0", - numeric_type=array_type, - output_data_array=output_array_path, - tuple_dimensions=tuple_dims) +result = create_array_filter.execute(data_structure=data_structure, + component_count=1, + data_format="", + initialization_value="0", + numeric_type=array_type, + output_data_array=output_array_path, + tuple_dimensions=tuple_dims) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataArray filter") z_coords = data_structure[output_array_path].npview() z_coords = np.squeeze(z_coords, axis=1) z_coords[:] = np.arange(20, 30, 1) result = cx.CreateGeometryFilter.execute(data_structure=data_structure, - array_handling= 1, # Move the arrays from their original location. - cell_attribute_matrix_name="Cell Data", - geometry_name=cx.DataPath(["RectGrid Geometry"]), - geometry_type=1, - x_bounds=cx.DataPath(["RectGridCoords", "X Coords"]), - y_bounds=cx.DataPath(["RectGridCoords", "Y Coords"]), - z_bounds=cx.DataPath(["RectGridCoords", "Z Coords"]) - ) + array_handling=1, # Move the arrays from their original location. + cell_attribute_matrix_name="Cell Data", + geometry_name=cx.DataPath("RectGrid Geometry"), + geometry_type=1, + x_bounds=cx.DataPath("RectGridCoords/X Coords"), + y_bounds=cx.DataPath("RectGridCoords/Y Coords"), + z_bounds=cx.DataPath("RectGridCoords/Z Coords") + ) if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) else: print("No errors running the CreateGeometryFilter filter") +rect_grid_geom = data_structure["RectGrid Geometry"] +x_cell_count = rect_grid_geom.num_x_cells +print(f'num_x_cells: {x_cell_count}') +x_bounds = rect_grid_geom.x_bounds +print(f'x_bounds: {x_bounds.store.npview()}') +print(f'id: {rect_grid_geom.id}') +print(f'name: {rect_grid_geom.name}') - -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Lets try a Triangle Geometry # For this we need the vertex data and the Triangle connectivity data -#------------------------------------------------------------------------------ -array_path = cx.DataPath(['Vertices']) +# ------------------------------------------------------------------------------ +array_path = cx.DataPath('Vertices') result = cx.CreateDataArray.execute(data_structure, numeric_type=cx.NumericType.float32, component_count=3, tuple_dimensions=[[144]], output_data_array=array_path, initialization_value='0') +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataArray filter") # Read the CSV file into the DataArray using the numpy view vertex_coords = data_structure[array_path].npview() file_path = 'complex/test/Data/VertexCoordinates.csv' vertex_coords[:] = np.loadtxt(file_path, delimiter=',', skiprows=1) - -array_path = cx.DataPath(['Triangles']) +array_path = cx.DataPath('Triangles') result = cx.CreateDataArray.execute(data_structure, numeric_type=cx.NumericType.uint64, component_count=3, tuple_dimensions=[[242]], output_data_array=array_path, initialization_value='0') - +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataArray filter") # Read the CSV file into the DataArray using the numpy view triangles = data_structure[array_path].npview() file_path = 'complex/test/Data/TriangleConnectivity.csv' triangles[:] = np.loadtxt(file_path, delimiter=',', skiprows=1) result = cx.CreateGeometryFilter.execute(data_structure=data_structure, - array_handling= 1, # Move the arrays from their original location. - geometry_name=cx.DataPath(["Triangle Geometry"]), - geometry_type=4, - face_attribute_matrix_name="Triangle Data", - edge_attribute_matrix_name="Triangle Edge Data", - vertex_attribute_matrix_name="Vertex Data", - vertex_list_name=cx.DataPath(['Vertices']), - triangle_list_name=cx.DataPath(['Triangles']) - ) + array_handling=1, # Move the arrays from their original location. + geometry_name=cx.DataPath("Triangle Geometry"), + geometry_type=4, + face_attribute_matrix_name="Triangle Data", + edge_attribute_matrix_name="Triangle Edge Data", + vertex_attribute_matrix_name="Vertex Data", + vertex_list_name=cx.DataPath('Vertices'), + triangle_list_name=cx.DataPath('Triangles') + ) if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) else: print("No errors running the CreateGeometryFilter (Triangle) filter") - -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Lets try a Edge Geometry # For this we need the vertex data and the Edge connectivity data -#------------------------------------------------------------------------------ -array_path = cx.DataPath(['Vertices']) +# ------------------------------------------------------------------------------ +array_path = cx.DataPath('Vertices') result = cx.CreateDataArray.execute(data_structure, numeric_type=cx.NumericType.float32, component_count=3, tuple_dimensions=[[144]], output_data_array=array_path, initialization_value='0') +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataArray filter") # Read the CSV file into the DataArray using the numpy view vertex_coords = data_structure[array_path].npview() file_path = 'complex/test/Data/VertexCoordinates.csv' vertex_coords[:] = np.loadtxt(file_path, delimiter=',', skiprows=1) - -array_path = cx.DataPath(['Edges']) +array_path = cx.DataPath('Edges') result = cx.CreateDataArray.execute(data_structure, numeric_type=cx.NumericType.uint64, component_count=2, tuple_dimensions=[[264]], output_data_array=array_path, initialization_value='0') +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the CreateDataArray filter") # Read the CSV file into the DataArray using the numpy view -edges = data_structure[array_path].npview() file_path = 'complex/test/Data/EdgeConnectivity.csv' -edges[:] = np.loadtxt(file_path, delimiter=',', skiprows=1) +edges_view = data_structure["Edges"].npview() +edges_view[:] = np.loadtxt(file_path, delimiter=',', skiprows=1) result = cx.CreateGeometryFilter.execute(data_structure=data_structure, - array_handling= 1, # Move the arrays from their original location. - geometry_name=cx.DataPath(["Edge Geometry"]), - geometry_type=3, - edge_attribute_matrix_name="Edge Data", - vertex_attribute_matrix_name="Vertex Data", - vertex_list_name=cx.DataPath(['Vertices']), - edge_list_name=cx.DataPath(['Edges']) - ) + array_handling=1, # Move the arrays from their original location. + geometry_name=cx.DataPath("Edge Geometry"), + geometry_type=3, + edge_attribute_matrix_name="Edge Data", + vertex_attribute_matrix_name="Vertex Data", + vertex_list_name=cx.DataPath('Vertices'), + edge_list_name=cx.DataPath('Edges') + ) if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) diff --git a/wrapping/python/examples/import_d3d.py b/wrapping/python/examples/import_d3d.py index e5f5fd43ea..cd15980581 100644 --- a/wrapping/python/examples/import_d3d.py +++ b/wrapping/python/examples/import_d3d.py @@ -15,18 +15,18 @@ import_data.file_path = "/private/tmp/basic_ebsd.dream3d" import_data.data_paths = None -result = cx.ImportDREAM3DFilter.execute(data_structure=data_structure, import_file_data=import_data) +result = cx.ReadDREAM3DFilter.execute(data_structure=data_structure, import_file_data=import_data) if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) else: - print("No errors running the ImportDREAM3DFilter filter") - + print("No errors running the ReadDREAM3DFilter filter") #------------------------------------------------------------------------------ # Get the underlying data from the DataStructure #------------------------------------------------------------------------------ -npview = data_structure[["Small IN100", "Scan Data", "Image Quality"]].npview() +npview_data_path = cx.DataPath("Small IN100/Scan Data/Image Quality") +npview = data_structure[npview_data_path].npview() # Change the underlying data based on some criteria using Numpy npview[npview < 120] = 0 diff --git a/wrapping/python/examples/import_hdf5.py b/wrapping/python/examples/import_hdf5.py index 558c80019a..40e78c7530 100644 --- a/wrapping/python/examples/import_hdf5.py +++ b/wrapping/python/examples/import_hdf5.py @@ -25,27 +25,27 @@ read_h5_ebsd_parameter=h5ebsdParameter ) -dataset1 = cx.ImportHDF5DatasetParameter.DatasetImportInfo() +dataset1 = cx.ReadHDF5DatasetParameter.DatasetImportInfo() dataset1.dataset_path = "/DataStructure/DataContainer/CellData/Confidence Index" dataset1.tuple_dims = "117,201,189" dataset1.component_dims = "1" -dataset2 = cx.ImportHDF5DatasetParameter.DatasetImportInfo() +dataset2 = cx.ReadHDF5DatasetParameter.DatasetImportInfo() dataset2.dataset_path = "/DataStructure/DataContainer/CellData/EulerAngles" dataset2.tuple_dims = "117,201,189" dataset2.component_dims = "3" -import_hdf5_param = cx.ImportHDF5DatasetParameter.ValueType() -import_hdf5_param.input_file = "/Users/mjackson/DREAM3DNXData/Data/Output/Reconstruction/SmallIN100_Final.dream3d" +import_hdf5_param = cx.ReadHDF5DatasetParameter.ValueType() +import_hdf5_param.input_file = "Data/Output/Reconstruction/SmallIN100_Final.dream3d" import_hdf5_param.datasets = [dataset1, dataset2] # import_hdf5_param.parent = cx.DataPath(["Imported Data"]) -result = cx.ImportHDF5Dataset.execute(data_structure=data_structure, +result = cx.ReadHDF5Dataset.execute(data_structure=data_structure, import_hd_f5_file=import_hdf5_param ) if len(result.errors) != 0: print('Errors: {}', result.errors) print('Warnings: {}', result.warnings) else: - print("No errors running the ImportHDF5DatasetParameter filter") + print("No errors running the ReadHDF5DatasetParameter filter") diff --git a/wrapping/python/examples/pipeline.py b/wrapping/python/examples/pipeline.py index db66bb66d8..072deced5f 100644 --- a/wrapping/python/examples/pipeline.py +++ b/wrapping/python/examples/pipeline.py @@ -8,7 +8,7 @@ data_structure = cx.DataStructure() -pipeline = cx.Pipeline().from_file("/Users/mjackson/Workspace1/complex/src/Plugins/OrientationAnalysis/pipelines/EBSD Reconstruction/(01) Small IN100 Archive.d3dpipeline") +pipeline = cx.Pipeline().from_file("complex/src/Plugins/OrientationAnalysis/pipelines/EBSD Reconstruction/(01) Small IN100 Archive.d3dpipeline") pipeline.to_file( "test pipeline", "/tmp/python_pipeline.d3dpipeline") diff --git a/wrapping/python/examples/read_esprit_data.py b/wrapping/python/examples/read_esprit_data.py new file mode 100644 index 0000000000..17c19dbf0a --- /dev/null +++ b/wrapping/python/examples/read_esprit_data.py @@ -0,0 +1,55 @@ +# Import the DREAM3D Base library and Plugins +import complex as cx + +import itkimageprocessing as cxitk +import orientationanalysis as cxor +import complex_test_dirs as cxtest + +import numpy as np + + + +# ------------------------------------------------------------------------------ +# Create a DataArray that is as long as my CSV file (99 Rows in this case) +# ------------------------------------------------------------------------------ +# Create a Data Structure +data_structure = cx.DataStructure() + +param1 = cxor.OEMEbsdScanSelectionParameter.ValueType() +param1.input_file_path = "LEROY_0089_Section_382.h5" +param1.stacking_order = 0 +param1.scan_names = ["LEROY_0089_Section_382"] + +result = cxor.ReadH5EspritDataFilter.execute(data_structure = data_structure, + cell_attribute_matrix_name = "Cell Data", + cell_ensemble_attribute_matrix_name = "Cell Ensemble Data", + degrees_to_radians = True, + image_geometry_name = cx.DataPath("ImageGeom"), + origin = [0.0, 0.0, 0.0], + read_pattern_data = False, + selected_scan_names = param1, + z_spacing = 1.0) + +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the ReadH5EspritDataFilter") + + + + +#------------------------------------------------------------------------------ +# Write the DataStructure to a .dream3d file +#------------------------------------------------------------------------------ +output_file_path = cxtest.GetTestTempDirectory() + "/import_esprit.dream3d" +result = cx.WriteDREAM3DFilter.execute(data_structure=data_structure, + export_file_path=output_file_path, + write_xdmf_file=True) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the WriteDREAM3DFilter") + +print(f'{output_file_path}') diff --git a/wrapping/python/examples/angle_conversion.ipynb b/wrapping/python/notebooks/angle_conversion.ipynb similarity index 100% rename from wrapping/python/examples/angle_conversion.ipynb rename to wrapping/python/notebooks/angle_conversion.ipynb diff --git a/wrapping/python/examples/basic_arrays.ipynb b/wrapping/python/notebooks/basic_arrays.ipynb similarity index 100% rename from wrapping/python/examples/basic_arrays.ipynb rename to wrapping/python/notebooks/basic_arrays.ipynb diff --git a/wrapping/python/examples/basic_ebsd_ipf.ipynb b/wrapping/python/notebooks/basic_ebsd_ipf.ipynb similarity index 98% rename from wrapping/python/examples/basic_ebsd_ipf.ipynb rename to wrapping/python/notebooks/basic_ebsd_ipf.ipynb index 533687a8cd..66c8d6dbc3 100644 --- a/wrapping/python/examples/basic_ebsd_ipf.ipynb +++ b/wrapping/python/notebooks/basic_ebsd_ipf.ipynb @@ -50,7 +50,7 @@ " cell_attribute_matrix_name='Scan Data',\n", " cell_ensemble_attribute_matrix_name='Phase Data',\n", " data_container_name=cx.DataPath(['Small IN100']),\n", - " input_file='/Users/mjackson/Workspace1/DREAM3D_Data/Data/SmallIN100/Slice_1.ang',\n", + " input_file='SmallIN100/Slice_1.ang',\n", ")\n", "print(f'Warnings: {result.warnings}')\n", "if not result:\n", diff --git a/wrapping/python/examples/basic_numpy.ipynb b/wrapping/python/notebooks/basic_numpy.ipynb similarity index 100% rename from wrapping/python/examples/basic_numpy.ipynb rename to wrapping/python/notebooks/basic_numpy.ipynb diff --git a/wrapping/python/examples/create_image_geom.ipynb b/wrapping/python/notebooks/create_image_geom.ipynb similarity index 100% rename from wrapping/python/examples/create_image_geom.ipynb rename to wrapping/python/notebooks/create_image_geom.ipynb diff --git a/wrapping/python/examples/import_d3d.ipynb b/wrapping/python/notebooks/import_d3d.ipynb similarity index 100% rename from wrapping/python/examples/import_d3d.ipynb rename to wrapping/python/notebooks/import_d3d.ipynb diff --git a/wrapping/python/examples/output_file.ipynb b/wrapping/python/notebooks/output_file.ipynb similarity index 100% rename from wrapping/python/examples/output_file.ipynb rename to wrapping/python/notebooks/output_file.ipynb diff --git a/wrapping/python/examples/pipeline.ipynb b/wrapping/python/notebooks/pipeline.ipynb similarity index 100% rename from wrapping/python/examples/pipeline.ipynb rename to wrapping/python/notebooks/pipeline.ipynb