diff --git a/CMakeLists.txt b/CMakeLists.txt index 3538c206be..dbccf30416 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -259,7 +259,7 @@ set(CoreParameters MultiPathSelectionParameter NumberParameter NumericTypeParameter - ImportCSVDataParameter + ReadCSVFileParameter ImportHDF5DatasetParameter StringParameter VectorParameter @@ -431,7 +431,7 @@ set(COMPLEX_HDRS ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateGridMontageAction.hpp ${COMPLEX_SOURCE_DIR}/Parameters/util/DynamicTableInfo.hpp - ${COMPLEX_SOURCE_DIR}/Parameters/util/CSVWizardData.hpp + ${COMPLEX_SOURCE_DIR}/Parameters/util/ReadCSVData.hpp ${COMPLEX_SOURCE_DIR}/Pipeline/AbstractPipelineNode.hpp ${COMPLEX_SOURCE_DIR}/Pipeline/Pipeline.hpp @@ -459,6 +459,7 @@ set(COMPLEX_HDRS ${COMPLEX_SOURCE_DIR}/Utilities/DataObjectUtilities.hpp ${COMPLEX_SOURCE_DIR}/Utilities/FilePathGenerator.hpp ${COMPLEX_SOURCE_DIR}/Utilities/ColorPresetsUtilities.hpp + ${COMPLEX_SOURCE_DIR}/Utilities/FileUtilities.hpp ${COMPLEX_SOURCE_DIR}/Utilities/FilterUtilities.hpp ${COMPLEX_SOURCE_DIR}/Utilities/GeometryUtilities.hpp ${COMPLEX_SOURCE_DIR}/Utilities/GeometryHelpers.hpp @@ -629,7 +630,7 @@ set(COMPLEX_SRCS ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateAttributeMatrixAction.cpp ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateGridMontageAction.cpp - ${COMPLEX_SOURCE_DIR}/Parameters/util/CSVWizardData.cpp + ${COMPLEX_SOURCE_DIR}/Parameters/util/ReadCSVData.cpp ${COMPLEX_SOURCE_DIR}/Parameters/util/DynamicTableInfo.cpp ${COMPLEX_SOURCE_DIR}/Pipeline/AbstractPipelineNode.cpp diff --git a/src/Plugins/ComplexCore/CMakeLists.txt b/src/Plugins/ComplexCore/CMakeLists.txt index f3fa19f425..cec636bfdf 100644 --- a/src/Plugins/ComplexCore/CMakeLists.txt +++ b/src/Plugins/ComplexCore/CMakeLists.txt @@ -77,11 +77,11 @@ set(FilterList ImageContouringFilter IdentifySample ImportBinaryCTNorthstarFilter - ImportCSVDataFilter + ReadCSVFileFilter ImportDeformKeyFileV12Filter ImportDREAM3DFilter ImportHDF5Dataset - ImportTextFilter + ReadTextDataArrayFilter ImportVolumeGraphicsFileFilter InitializeData InterpolatePointCloudToRegularGridFilter diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_1.png b/src/Plugins/ComplexCore/docs/Images/Import_CSV_1.png deleted file mode 100644 index 26f103e3d6..0000000000 Binary files a/src/Plugins/ComplexCore/docs/Images/Import_CSV_1.png and /dev/null differ diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_2.png b/src/Plugins/ComplexCore/docs/Images/Import_CSV_2.png deleted file mode 100644 index 6625d60211..0000000000 Binary files a/src/Plugins/ComplexCore/docs/Images/Import_CSV_2.png and /dev/null differ diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_3.png b/src/Plugins/ComplexCore/docs/Images/Import_CSV_3.png deleted file mode 100644 index 07c488b7d0..0000000000 Binary files a/src/Plugins/ComplexCore/docs/Images/Import_CSV_3.png and /dev/null differ diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_4.png b/src/Plugins/ComplexCore/docs/Images/Import_CSV_4.png deleted file mode 100644 index bf65a317c0..0000000000 Binary files a/src/Plugins/ComplexCore/docs/Images/Import_CSV_4.png and /dev/null differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV.png new file mode 100644 index 0000000000..1feaf77c84 Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_1.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_1.png new file mode 100644 index 0000000000..4484238e05 Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_1.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_10.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_10.png new file mode 100644 index 0000000000..299fa1f33c Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_10.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_2.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_2.png new file mode 100644 index 0000000000..df88c72816 Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_2.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_3.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_3.png new file mode 100644 index 0000000000..3f40f5a445 Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_3.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_4.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_4.png new file mode 100644 index 0000000000..9c78bd642b Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_4.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_5.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_5.png new file mode 100644 index 0000000000..d6bb0ac643 Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_5.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_6.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_6.png new file mode 100644 index 0000000000..8f251dee6b Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_6.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_7.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_7.png new file mode 100644 index 0000000000..2de01214fe Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_7.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_8.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_8.png new file mode 100644 index 0000000000..f66be2126c Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_8.png differ diff --git a/src/Plugins/ComplexCore/docs/Images/Read_CSV_9.png b/src/Plugins/ComplexCore/docs/Images/Read_CSV_9.png new file mode 100644 index 0000000000..b7e1c812b4 Binary files /dev/null and b/src/Plugins/ComplexCore/docs/Images/Read_CSV_9.png differ diff --git a/src/Plugins/ComplexCore/docs/ImportCSVDataFilter.md b/src/Plugins/ComplexCore/docs/ImportCSVDataFilter.md deleted file mode 100644 index a6ccb9e57c..0000000000 --- a/src/Plugins/ComplexCore/docs/ImportCSVDataFilter.md +++ /dev/null @@ -1,76 +0,0 @@ -# Import CSV Data - -## Group (Subgroup) ## - -IO (Input) - -## Description ## - -This **Filter** reads CSV data from any text-based file and imports the data into DREAM3D-NX-style arrays. The user uses the **Filter's** wizard to specify which file to import, how the data is formatted, what to call each array, and what type each array should be. - -*Note:* This **Filter** is intended to read data that is column-oriented, such that each created DREAM3D-NX array corresponds to a column of data in the CSV file. Therefore, this **Filter** will only import scalar arrays. If multiple columns are in fact different components of the same array, then the columns may be imported as separate arrays and then combined in the correct order using the Combine Attribute Arrays **Filter**. - -### Separating the Data ### - -After clicking the **Import Data...** button and selecting a file, a wizard appears. The user can choose how the data is delimited: comma (,), tab, semicolon (;) or space ( ). The user may also elect to treat consecutive delimiters as one delimiter. - -![Selecting how the data is separated](Images/Import_CSV_1.png) - -### Selecting the Delimiter ### - -![Selecting the delimiting character(s)](Images/Import_CSV_2.png) - -### Selecting Import Rows, Data Representation and Column Headers ### - -On the last page, the user can set the following information: - -+ Header names, either from a line number in the file or manually through a dialog box or select the generated header values -+ Row index to start the import -+ Column data format (choosing the data type or deciding to skip the column) - - -![Setting Numerical Representations, Column Headers and other information](Images/Import_CSV_3.png) - - -If the data does not have any headers, the user can set a string value for each column. These values will be used as the name of the **Data Array** in DREAM3D-NX. - -![Setting Names of each Column which will be used as the name of each **Attribute Array** ](Images/Import_CSV_4.png) - -## Parameters ## - -| Name | Type | Description | -|------|------|-------------| -| Wizard Data Object | CSVWizardData | The object that holds all relevant data collected from the wizard | -| Tuple Dimensions | DynamicTableData | The tuple dimensions for the arrays being imported from the file | -| Use Existing Group | bool | Determines whether or not to store the imported data arrays in an existing group | -| Existing Data Group (Use Existing Group - ON) | DataPath | The data path to the existing group where the imported arrays will be stored | -| New Data Group (Use Existing Group - OFF) | DataPath | The data path to the newly created group where the imported arrays will be stored | - -## Required Geometry ## - -Not Applicable - -## Required Objects ## - -| Kind | Default Name | Description | -|------|--------------|------|----------------------|-------------| -| **Data Group** | None | The existing data group to store the imported data arrays (only if Existing Data Group is turned ON) | - -## Created Objects ## - -| Kind | Default Name | Type | Component Dimensions | Description | -|------|--------------|------|----------------------|-------------| -| One or more **Element/Feature/Ensemble/etc. Data Arrays** | None | Any | 1 | One or more arrays that are created due to importing CSV data via the wizard | - -## Example Pipelines ## - - -## License & Copyright ## - -Please see the description file distributed with this plugin. - -## DREAM3DNX Help - -Check out our GitHub community page at [DREAM3DNX-Issues](https://github.com/BlueQuartzSoftware/DREAM3DNX-Issues) to report bugs, ask the community for help, discuss features, or get help from the developers. - - diff --git a/src/Plugins/ComplexCore/docs/ReadCSVFileFilter.md b/src/Plugins/ComplexCore/docs/ReadCSVFileFilter.md new file mode 100644 index 0000000000..7b10ea7611 --- /dev/null +++ b/src/Plugins/ComplexCore/docs/ReadCSVFileFilter.md @@ -0,0 +1,93 @@ +# Read CSV File + +## Group (Subgroup) ## + +IO (Input) + +## Description ## + +This **Filter** reads text data from any text-based file and imports the data into DREAM3D-NX-style arrays. The user specifies which file to import, how the data is formatted, what to call each array, and what type each array should be. + +*Note:* This **Filter** is intended to read data that is column-oriented, such that each created DREAM3D-NX array corresponds to a column of data in the text file. Therefore, this **Filter** will only import scalar arrays. If multiple columns are in fact different components of the same array, then the columns may be imported as separate arrays and then combined in the correct order using the Combine Attribute Arrays **Filter**. + +### Filling Out The Inputs ### + +The user first selects the **Input Text File** path, which then enables the rest of the interface. + +![Input Text File Field](Images/Read_CSV_1.png) + +If the chosen **Input Text File** already has headers inside the file, the user can select the **Input File Has Headers** checkbox. This +enables the **Headers Line Number** spin box where the user can select which line of the file contains the headers. + +*NOTE*: The interface only allows importing data starting at the line after the chosen **Headers Line Number**. So, in the example below, the **Headers Line Number** is set to 1, so **Start Import Line Number** defaults to 2 and has a range of 2-297 (this particular input file has 297 total lines). The max range of **Headers Line Number** is, of course, set to 296 so that at least 1 line can be imported. + +![Input Text File Field](Images/Read_CSV_2.png) + +The user can choose how the data is delimited: comma (,), tab, semicolon (;) or space ( ). The user may also elect to ignore consecutive delimiters, which treats consecutive delimiters as one delimiter. + +![Input Text File Field](Images/Read_CSV_3.png) + +The user can select the number of preview lines available by changing the **Number of Preview Lines** spin box. The range in the example is set to 1-296 because the import is currently starting at row 2 (from **Start Import Line Number** spin box). + +![Input Text File Field](Images/Read_CSV_4.png) + +The user can then set the data format for each column. Selecting one or more columns will enable the **Column Data Type** combo box, where you can choose a data type or decide to skip importing specific columns as well. + +![Input Text File Field](Images/Read_CSV_5.png) +![Input Text File Field](Images/Read_CSV_6.png) + +If the **Input File Has Headers** checkbox is OFF, then it is also possible to double-click the headers in the Preview Table to edit them. These values will be used as the name of the **Data Array** in DREAM3D-NX. + +*NOTE:* Editing table headers is only available when the **Input File Has Headers** checkbox is OFF. If the **Input File Has Headers** checkbox is ON, then the headers will be read from the **Headers Line Number** in the data file, and the table headers will not be editable. + +![Input Text File Field](Images/Read_CSV_7.png) + +The user can select the tuple dimensions that will be applied to the imported arrays. + +![Input Text File Field](Images/Read_CSV_8.png) + +The imported arrays can be stored in either an existing attribute matrix or a new attribute matrix can be created. + +![Input Text File Field](Images/Read_CSV_9.png) + +Afterwards, you end up with a data structure that looks like this: + +![Input Text File Field](Images/Read_CSV_10.png) + +## Parameters ## + +| Name | Type | Description | +|----------------------------------------------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Importer Data Object | ReadCSVData | The object that holds all data relevant to importing the data, such as input file path, custom headers, start import line number, data types for all the imported arrays, headers line number, header mode, imported array tuple dimensions, delimiters, etc. | +| Use Existing Attribute Matrix | bool | Determines whether or not to store the imported data arrays in an existing attribute matrix | +| Existing Attribute Matrix (Use Existing Attribute Matrix - ON) | DataPath | The data path to the existing attribute matrix where the imported arrays will be stored | +| New Attribute Matrix (Use Existing Attribute Matrix - OFF) | DataPath | The data path to the newly created attribute matrix where the imported arrays will be stored | + +## Required Geometry ## + +Not Applicable + +## Required Objects ## + +| Kind | Default Name | Description | +|------|--------------|------| +| **Attribute Matrix** | None | The existing attribute matrix to store the imported data arrays (only if Existing Attribute Matrix is turned ON) | + +## Created Objects ## + +| Kind | Default Name | Type | Component Dimensions | Description | +|------|--------------|------|----------------------|----------------------------------------------------------------| +| One or more **Element/Feature/Ensemble/etc. Data Arrays** | None | Any | 1 | One or more arrays that are created due to importing text data | + +## Example Pipelines ## + + +## License & Copyright ## + +Please see the description file distributed with this plugin. + +## DREAM3DNX Help + +Check out our GitHub community page at [DREAM3DNX-Issues](https://github.com/BlueQuartzSoftware/DREAM3DNX-Issues) to report bugs, ask the community for help, discuss features, or get help from the developers. + + diff --git a/src/Plugins/ComplexCore/docs/ImportTextFilter.md b/src/Plugins/ComplexCore/docs/ReadTextDataArrayFilter.md similarity index 99% rename from src/Plugins/ComplexCore/docs/ImportTextFilter.md rename to src/Plugins/ComplexCore/docs/ReadTextDataArrayFilter.md index a6dafcbdef..9a5b9c6f63 100644 --- a/src/Plugins/ComplexCore/docs/ImportTextFilter.md +++ b/src/Plugins/ComplexCore/docs/ReadTextDataArrayFilter.md @@ -1,4 +1,4 @@ -# Import ASCII Attribute Array +# Read Text Data Array ## Group (Subgroup) ## diff --git a/src/Plugins/ComplexCore/pipelines/AppendImageGeometryZSlice.d3dpipeline b/src/Plugins/ComplexCore/pipelines/AppendImageGeometryZSlice.d3dpipeline index 9c9f84c8a7..ffe592743f 100644 --- a/src/Plugins/ComplexCore/pipelines/AppendImageGeometryZSlice.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/AppendImageGeometryZSlice.d3dpipeline @@ -46,7 +46,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -67,7 +67,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -88,7 +88,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -109,7 +109,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline b/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline index 357ad1baae..c3e30aa95c 100644 --- a/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline @@ -84,7 +84,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportCSVDataFilter", + "name": "complex::ReadCSVFileFilter", "uuid": "373be1f8-31cf-49f6-aa5d-e356f4f3f261" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/pipelines/Import_ASCII_Data.d3dworkflow b/src/Plugins/ComplexCore/pipelines/Import_ASCII_Data.d3dworkflow index 02a60b4a5f..5fac1fefc2 100644 --- a/src/Plugins/ComplexCore/pipelines/Import_ASCII_Data.d3dworkflow +++ b/src/Plugins/ComplexCore/pipelines/Import_ASCII_Data.d3dworkflow @@ -48,7 +48,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/pipelines/Import_CSV_Data.d3dpipeline b/src/Plugins/ComplexCore/pipelines/Import_CSV_Data.d3dpipeline index e2a4dcdeb0..48d274aacd 100644 --- a/src/Plugins/ComplexCore/pipelines/Import_CSV_Data.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/Import_CSV_Data.d3dpipeline @@ -48,7 +48,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -71,7 +71,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -94,7 +94,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -117,7 +117,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline b/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline index b4f329e3c9..0566560cba 100644 --- a/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline @@ -41,7 +41,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportCSVDataFilter", + "name": "complex::ReadCSVFileFilter", "uuid": "373be1f8-31cf-49f6-aa5d-e356f4f3f261" }, "isDisabled": false @@ -107,7 +107,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -130,7 +130,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -153,7 +153,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -176,7 +176,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -199,7 +199,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false @@ -222,7 +222,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextFilter", + "name": "complex::ReadTextDataArrayFilter", "uuid": "25f7df3e-ca3e-4634-adda-732c0e56efd4" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp b/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp index ccb6b0bfca..73f387c2b3 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp @@ -55,11 +55,11 @@ #include "ComplexCore/Filters/GenerateColorTableFilter.hpp" #include "ComplexCore/Filters/IdentifySample.hpp" #include "ComplexCore/Filters/ImportBinaryCTNorthstarFilter.hpp" -#include "ComplexCore/Filters/ImportCSVDataFilter.hpp" +#include "ComplexCore/Filters/ReadCSVFileFilter.hpp" #include "ComplexCore/Filters/ImportDeformKeyFileV12Filter.hpp" #include "ComplexCore/Filters/ImportDREAM3DFilter.hpp" #include "ComplexCore/Filters/ImportHDF5Dataset.hpp" -#include "ComplexCore/Filters/ImportTextFilter.hpp" +#include "ComplexCore/Filters/ReadTextDataArrayFilter.hpp" #include "ComplexCore/Filters/ImportVolumeGraphicsFileFilter.hpp" #include "ComplexCore/Filters/InitializeData.hpp" #include "ComplexCore/Filters/InterpolatePointCloudToRegularGridFilter.hpp" @@ -166,10 +166,10 @@ namespace complex {complex::Uuid::FromString("0d0a6535-6565-51c5-a3fc-fbc00008606d").value(), complex::FilterTraits::uuid}, // GenerateColorTable {complex::Uuid::FromString("0e8c0818-a3fb-57d4-a5c8-7cb8ae54a40a").value(), complex::FilterTraits::uuid}, // IdentifySample {complex::Uuid::FromString("f2259481-5011-5f22-9fcb-c92fb6f8be10").value(), complex::FilterTraits::uuid}, // ImportBinaryCTNorthstarFilter - {complex::Uuid::FromString("bdb978bc-96bf-5498-972c-b509c38b8d50").value(), complex::FilterTraits::uuid}, // ReadASCIIData + {complex::Uuid::FromString("bdb978bc-96bf-5498-972c-b509c38b8d50").value(), complex::FilterTraits::uuid}, // ReadASCIIData {complex::Uuid::FromString("043cbde5-3878-5718-958f-ae75714df0df").value(), complex::FilterTraits::uuid}, // DataContainerReader {complex::Uuid::FromString("9e98c3b0-5707-5a3b-b8b5-23ef83b02896").value(), complex::FilterTraits::uuid}, // ImportHDF5Dataset - {complex::Uuid::FromString("a7007472-29e5-5d0a-89a6-1aed11b603f8").value(), complex::FilterTraits::uuid}, // ImportAsciDataArray + {complex::Uuid::FromString("a7007472-29e5-5d0a-89a6-1aed11b603f8").value(), complex::FilterTraits::uuid}, // ImportAsciDataArray {complex::Uuid::FromString("5fa10d81-94b4-582b-833f-8eabe659069e").value(), complex::FilterTraits::uuid}, // ImportVolumeGraphicsFileFilter {complex::Uuid::FromString("dfab9921-fea3-521c-99ba-48db98e43ff8").value(), complex::FilterTraits::uuid}, // InitializeData {complex::Uuid::FromString("4b551c15-418d-5081-be3f-d3aeb62408e5").value(), complex::FilterTraits::uuid}, // InterpolatePointCloudToRegularGrid diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.cpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.cpp deleted file mode 100644 index 687f34ab52..0000000000 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.cpp +++ /dev/null @@ -1,492 +0,0 @@ -#include "ImportCSVDataFilter.hpp" - -#include "ComplexCore/utils/CSVDataParser.hpp" - -#include "complex/Common/TypeTraits.hpp" -#include "complex/Common/Types.hpp" -#include "complex/DataStructure/BaseGroup.hpp" -#include "complex/DataStructure/DataArray.hpp" -#include "complex/DataStructure/DataPath.hpp" -#include "complex/DataStructure/IDataArray.hpp" -#include "complex/Filter/Actions/CreateArrayAction.hpp" -#include "complex/Filter/Actions/CreateDataGroupAction.hpp" -#include "complex/Parameters/BoolParameter.hpp" -#include "complex/Parameters/DataGroupCreationParameter.hpp" -#include "complex/Parameters/DataGroupSelectionParameter.hpp" -#include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportCSVDataParameter.hpp" -#include "complex/Utilities/StringUtilities.hpp" - -#include - -using namespace complex; - -using ParsersVector = std::vector>; -using StringVector = std::vector; -using CharVector = std::vector; -using DataTypeVector = std::vector>; -using Dimensions = std::vector; -namespace fs = std::filesystem; - -namespace -{ -enum class IssueCodes -{ - EMPTY_FILE = -100, - FILE_DOES_NOT_EXIST = -101, - EMPTY_NEW_DG = -102, - EMPTY_EXISTING_DG = -103, - INCONSISTENT_COLS = -104, - DUPLICATE_NAMES = -105, - INVALID_ARRAY_TYPE = -106, - ILLEGAL_NAMES = -107, - FILE_NOT_OPEN = -108, - MEMORY_ALLOCATION_FAIL = -109, - UNABLE_TO_READ_DATA = -110, - UNPRINTABLE_CHARACTERS = -111, - BINARY_DETECTED = -112, - INCORRECT_TUPLES = -113, - NEW_DG_EXISTS = -114 -}; - -// ----------------------------------------------------------------------------- -Result validateInputFilePath(const std::string& inputFilePath) -{ - if(inputFilePath.empty()) - { - return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_FILE), "A file has not been chosen to import. Please pick a file to import.")}; - } - - fs::path inputFile(inputFilePath); - if(!fs::exists(inputFile)) - { - return {MakeErrorResult(to_underlying(IssueCodes::FILE_DOES_NOT_EXIST), fmt::format("The input file does not exist: '{}'", inputFilePath))}; - } - - return {}; -} - -// ----------------------------------------------------------------------------- -Result validateTupleDimensions(const std::vector& tDims, usize totalLines) -{ - usize tupleTotal = std::accumulate(tDims.begin(), tDims.end(), static_cast(1), std::multiplies()); - if(tupleTotal != totalLines) - { - return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_TUPLES), - fmt::format("The current number of tuples ({}) do not match the total number of imported lines ({}).", tupleTotal, totalLines))}; - } - - return {}; -} - -// ----------------------------------------------------------------------------- -Result validateExistingGroup(const DataPath& groupPath, const DataStructure& dataStructure, const std::vector& headers) -{ - if(groupPath.empty()) - { - return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_EXISTING_DG), "'Existing Data Group' - Data path is empty.")}; - } - - const BaseGroup& selectedGroup = dataStructure.getDataRefAs(groupPath); - const auto arrays = selectedGroup.findAllChildrenOfType(); - for(const std::shared_ptr& array : arrays) - { - std::string arrayName = array->getName(); - for(const std::string& headerName : headers) - { - if(arrayName == headerName) - { - return {MakeErrorResult(to_underlying(IssueCodes::DUPLICATE_NAMES), - fmt::format("The header name \"{}\" matches an array name that already exists in the selected container.", headerName))}; - } - if(StringUtilities::contains(headerName, '&') || StringUtilities::contains(headerName, ':') || StringUtilities::contains(headerName, '/') || StringUtilities::contains(headerName, '\\')) - { - return {MakeErrorResult(to_underlying(IssueCodes::ILLEGAL_NAMES), - fmt::format("The header name \"{}\" contains a character that will cause problems. Do Not use '&',':', '/' or '\\' in the header names.", headerName))}; - } - } - } - - return {}; -} - -// ----------------------------------------------------------------------------- -Result validateNewGroup(const DataPath& groupPath, const DataStructure& dataStructure) -{ - if(groupPath.empty()) - { - return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_NEW_DG), "'New Data Group' - Data path is empty.")}; - } - - if(dataStructure.getData(groupPath) != nullptr) - { - return {MakeErrorResult(to_underlying(IssueCodes::NEW_DG_EXISTS), fmt::format("The group at the path '{}' cannot be created because it already exists.", groupPath.toString()))}; - } - - return {}; -} - -// ----------------------------------------------------------------------------- -Result createParsers(const DataTypeVector& dataTypes, const DataPath& parentPath, const std::vector& headers, DataStructure& dataStructure) -{ - ParsersVector dataParsers(dataTypes.size()); - - for(usize i = 0; i < dataTypes.size() && i < headers.size(); i++) - { - std::optional dataTypeOpt = dataTypes[i]; - if(!dataTypeOpt.has_value()) - { - continue; - } - - std::string name = headers[i]; - - DataPath arrayPath = parentPath; - arrayPath = arrayPath.createChildPath(name); - - DataType dataType = dataTypeOpt.value(); - switch(dataType) - { - case complex::DataType::int8: { - Int8Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::uint8: { - UInt8Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::int16: { - Int16Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::uint16: { - UInt16Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::int32: { - Int32Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::uint32: { - UInt32Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::int64: { - Int64Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::uint64: { - UInt64Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::float32: { - Float32Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - case complex::DataType::float64: { - Float64Array& data = dataStructure.getDataRefAs(arrayPath); - dataParsers[i] = std::make_unique(data, name, i); - break; - } - // case complex::DataType::string: - // { - // StringArray& data = dataStructure.getDataRefAs(arrayPath); - // dataParsers[i] = std::make_unique(data, name, i); - // break; - // } - default: - return {MakeErrorResult(to_underlying(IssueCodes::INVALID_ARRAY_TYPE), - fmt::format("The data type that was chosen for column number {} is not a valid data array type.", std::to_string(i + 1)))}; - } - } - - return {std::move(dataParsers)}; -} - -// ----------------------------------------------------------------------------- -Result<> parseLine(std::fstream& inStream, const ParsersVector& dataParsers, const CharVector& delimiters, bool consecutiveDelimiters, usize lineNumber, usize beginIndex) -{ - std::string line; - std::getline(inStream, line); - - StringVector tokens = StringUtilities::split(line, delimiters, consecutiveDelimiters); - - if(dataParsers.size() > tokens.size()) - { - return MakeErrorResult(to_underlying(IssueCodes::INCONSISTENT_COLS), fmt::format("Line {} has an inconsistent number of columns.\nExpecting {} but found {}\nInput line was:\n{}", - std::to_string(lineNumber), std::to_string(dataParsers.size()), std::to_string(tokens.size()), line)); - } - - for(const auto& dataParser : dataParsers) - { - if(dataParser == nullptr) - { - continue; - } - - usize index = dataParser->columnIndex(); - - Result<> result = dataParser->parse(tokens[index], lineNumber - beginIndex); - if(result.invalid()) - { - return result; - } - } - - return {}; -} - -// ----------------------------------------------------------------------------- -void notifyProgress(const IFilter::MessageHandler& messageHandler, usize lineNumber, usize numberOfTuples, float32& threshold) -{ - const float32 percentCompleted = (static_cast(lineNumber) / static_cast(numberOfTuples)) * 100.0f; - if(percentCompleted > threshold) - { - // Print the status of the import - messageHandler({IFilter::Message::Type::Info, fmt::format("Importing CSV Data || {:.{}f}% Complete", static_cast(percentCompleted), 1)}); - threshold = threshold + 5.0f; - if(threshold < percentCompleted) - { - threshold = percentCompleted; - } - } -} - -// ----------------------------------------------------------------------------- -void skipNumberOfLines(std::fstream& inStream, usize numberOfLines) -{ - for(usize i = 1; i < numberOfLines; i++) - { - std::string line; - std::getline(inStream, line); - } -} -} // namespace - -namespace complex -{ -//------------------------------------------------------------------------------ -ImportCSVDataFilter::ImportCSVDataFilter() = default; - -// ----------------------------------------------------------------------------- -ImportCSVDataFilter::~ImportCSVDataFilter() noexcept = default; - -// ----------------------------------------------------------------------------- -std::string ImportCSVDataFilter::name() const -{ - return FilterTraits::name.str(); -} - -//------------------------------------------------------------------------------ -std::string ImportCSVDataFilter::className() const -{ - return FilterTraits::className; -} - -//------------------------------------------------------------------------------ -Uuid ImportCSVDataFilter::uuid() const -{ - return FilterTraits::uuid; -} - -//------------------------------------------------------------------------------ -std::string ImportCSVDataFilter::humanName() const -{ - return "Import CSV Data"; -} - -//------------------------------------------------------------------------------ -std::vector ImportCSVDataFilter::defaultTags() const -{ - return {className(), "IO", "Input", "Read", "Import", "ASCII", "ascii", "CSV", "csv", "Column"}; -} - -//------------------------------------------------------------------------------ -Parameters ImportCSVDataFilter::parameters() const -{ - Parameters params; - - params.insertSeparator(Parameters::Separator{"Input Parameters"}); - - params.insert(std::make_unique(k_WizardData_Key, "CSV Wizard Data", "Holds all relevant csv file data collected from the wizard", CSVWizardData())); - - DynamicTableInfo tableInfo; - tableInfo.setColsInfo(DynamicTableInfo::DynamicVectorInfo(1, "Value {}")); - tableInfo.setRowsInfo(DynamicTableInfo::StaticVectorInfo({"Dim 0"})); - params.insert( - std::make_unique(k_TupleDims_Key, "CSV Tuple Dimensions", "The tuple dimensions for the imported CSV data arrays", DynamicTableInfo::TableDataType{{1.0}}, tableInfo)); - - params.insertLinkableParameter(std::make_unique(k_UseExistingGroup_Key, "Use Existing Group", "Store the imported CSV data arrays in an existing group.", false)); - params.insert(std::make_unique(k_SelectedDataGroup_Key, "Existing Data Group", "Store the imported CSV data arrays in this existing group.", DataPath{}, - BaseGroup::GetAllGroupTypes())); - params.insert(std::make_unique(k_CreatedDataGroup_Key, "New Data Group", "Store the imported CSV data arrays in a newly created group.", DataPath{})); - - // Associate the Linkable Parameter(s) to the children parameters that they control - params.linkParameters(k_UseExistingGroup_Key, k_SelectedDataGroup_Key, true); - params.linkParameters(k_UseExistingGroup_Key, k_CreatedDataGroup_Key, false); - - return params; -} - -//------------------------------------------------------------------------------ -IFilter::UniquePointer ImportCSVDataFilter::clone() const -{ - return std::make_unique(); -} - -//------------------------------------------------------------------------------ -IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& dataStructure, const Arguments& filterArgs, const MessageHandler& messageHandler, - const std::atomic_bool& shouldCancel) const -{ - CSVWizardData wizardData = filterArgs.value(k_WizardData_Key); - auto tableData = filterArgs.value(k_TupleDims_Key); - bool useExistingGroup = filterArgs.value(k_UseExistingGroup_Key); - DataPath selectedDataGroup = filterArgs.value(k_SelectedDataGroup_Key); - DataPath createdDataGroup = filterArgs.value(k_CreatedDataGroup_Key); - - std::string inputFilePath = wizardData.inputFilePath; - StringVector headers = wizardData.dataHeaders; - DataTypeVector dataTypes = wizardData.dataTypes; - Dimensions cDims = {1}; - complex::Result resultOutputActions; - - // Validate the input file path - Result result = validateInputFilePath(inputFilePath); - if(result.invalid()) - { - return {std::move(result)}; - } - - // Validate the tuple dimensions - const auto& row = tableData.at(0); - std::vector tDims; - tDims.reserve(row.size()); - for(auto value : row) - { - tDims.push_back(static_cast(value)); - } - usize totalLines = wizardData.numberOfLines - wizardData.beginIndex + 1; - result = validateTupleDimensions(tDims, totalLines); - if(result.invalid()) - { - return {std::move(result)}; - } - - // Validate the existing/created group - DataPath groupPath; - if(useExistingGroup) - { - result = validateExistingGroup(selectedDataGroup, dataStructure, headers); - if(result.invalid()) - { - return {std::move(result)}; - } - groupPath = selectedDataGroup; - } - else - { - result = validateNewGroup(createdDataGroup, dataStructure); - if(result.invalid()) - { - return {std::move(result)}; - } - groupPath = createdDataGroup; - resultOutputActions.value().appendAction(std::make_unique(createdDataGroup)); - } - - // Create the arrays - for(usize i = 0; i < dataTypes.size() && i < headers.size(); i++) - { - std::optional dataTypeOpt = dataTypes[i]; - if(!dataTypeOpt.has_value()) - { - // This data type optional does not have a value because the user decided to skip importing this array - continue; - } - - DataType dataType = dataTypeOpt.value(); - std::string name = headers[i]; - - DataPath arrayPath = groupPath; - arrayPath = arrayPath.createChildPath(name); - - resultOutputActions.value().appendAction(std::make_unique(dataType, tDims, cDims, arrayPath)); - } - - // Create preflight updated values - std::vector preflightUpdatedValues; - preflightUpdatedValues.push_back({"Input File Path", wizardData.inputFilePath}); - - return {std::move(resultOutputActions), std::move(preflightUpdatedValues)}; -} - -//------------------------------------------------------------------------------ -Result<> ImportCSVDataFilter::executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, - const std::atomic_bool& shouldCancel) const -{ - CSVWizardData wizardData = filterArgs.value(k_WizardData_Key); - bool useExistingGroup = filterArgs.value(k_UseExistingGroup_Key); - DataPath selectedDataGroup = filterArgs.value(k_SelectedDataGroup_Key); - DataPath createdDataGroup = filterArgs.value(k_CreatedDataGroup_Key); - - std::string inputFilePath = wizardData.inputFilePath; - StringVector headers = wizardData.dataHeaders; - DataTypeVector dataTypes = wizardData.dataTypes; - CharVector delimiters = wizardData.delimiters; - bool consecutiveDelimiters = wizardData.consecutiveDelimiters; - usize numLines = wizardData.numberOfLines; - usize beginIndex = wizardData.beginIndex; - - DataPath groupPath = createdDataGroup; - if(useExistingGroup) - { - groupPath = selectedDataGroup; - } - - Result parsersResult = createParsers(dataTypes, groupPath, headers, dataStructure); - if(parsersResult.invalid()) - { - return ConvertResult(std::move(parsersResult)); - } - - ParsersVector dataParsers = std::move(parsersResult.value()); - - std::fstream in(inputFilePath.c_str(), std::ios_base::in); - if(!in.is_open()) - { - return MakeErrorResult(to_underlying(IssueCodes::FILE_NOT_OPEN), fmt::format("Could not open file for reading: {}", inputFilePath)); - } - - // Skip to the first data line - skipNumberOfLines(in, beginIndex); - - float32 threshold = 0.0f; - usize numTuples = numLines - beginIndex + 1; - for(usize lineNum = beginIndex; lineNum <= numLines; lineNum++) - { - if(shouldCancel) - { - return {}; - } - - Result<> parsingResult = parseLine(in, dataParsers, delimiters, consecutiveDelimiters, lineNum, beginIndex); - if(parsingResult.invalid()) - { - return std::move(parsingResult); - } - - notifyProgress(messageHandler, lineNum, numTuples, threshold); - } - - return {}; -} -} // namespace complex diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.cpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.cpp new file mode 100644 index 0000000000..f2b94712a7 --- /dev/null +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.cpp @@ -0,0 +1,699 @@ +#include "ReadCSVFileFilter.hpp" + +#include "ComplexCore/utils/CSVDataParser.hpp" + +#include "complex/Common/TypeTraits.hpp" +#include "complex/Common/Types.hpp" +#include "complex/DataStructure/BaseGroup.hpp" +#include "complex/DataStructure/DataArray.hpp" +#include "complex/DataStructure/DataPath.hpp" +#include "complex/DataStructure/IDataArray.hpp" +#include "complex/Filter/Actions/CreateArrayAction.hpp" +#include "complex/Filter/Actions/CreateAttributeMatrixAction.hpp" +#include "complex/Parameters/AttributeMatrixSelectionParameter.hpp" +#include "complex/Parameters/BoolParameter.hpp" +#include "complex/Parameters/DataGroupCreationParameter.hpp" +#include "complex/Parameters/DynamicTableParameter.hpp" +#include "complex/Parameters/ReadCSVFileParameter.hpp" +#include "complex/Utilities/FileUtilities.hpp" +#include "complex/Utilities/FilterUtilities.hpp" +#include "complex/Utilities/StringUtilities.hpp" + +#include + +using namespace complex; + +using ParsersVector = std::vector>; +using StringVector = std::vector; +using CharVector = std::vector; +using DataTypeVector = std::vector; +using Dimensions = std::vector; +namespace fs = std::filesystem; + +namespace +{ +struct ReadCSVFileFilterCache +{ + std::string FilePath; + usize TotalLines = 0; + usize HeadersLine = 0; + std::string Headers; +}; + +std::atomic_int32_t s_InstanceId = 0; +std::map s_HeaderCache; + +enum class IssueCodes +{ + EMPTY_FILE = -100, + EMPTY_NEW_DG = -102, + EMPTY_EXISTING_DG = -103, + INCONSISTENT_COLS = -104, + DUPLICATE_NAMES = -105, + INVALID_ARRAY_TYPE = -106, + ILLEGAL_NAMES = -107, + FILE_NOT_OPEN = -108, + INCORRECT_DATATYPE_COUNT = -109, + INCORRECT_MASK_COUNT = -110, + INCORRECT_TUPLES = -113, + NEW_DG_EXISTS = -114, + CANNOT_SKIP_TO_LINE = -115, + EMPTY_NAMES = -116, + EMPTY_LINE = -119, + HEADER_LINE_OUT_OF_RANGE = -120, + START_IMPORT_ROW_OUT_OF_RANGE = -121, + EMPTY_HEADERS = -122, + IGNORED_TUPLE_DIMS = -200 +}; + +// ----------------------------------------------------------------------------- +Result validateExistingGroup(const DataPath& groupPath, const DataStructure& dataStructure, const std::vector& headers) +{ + if(groupPath.empty()) + { + return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_EXISTING_DG), "'Existing Attribute Matrix' - Data path is empty.")}; + } + + const BaseGroup& selectedGroup = dataStructure.getDataRefAs(groupPath); + const auto arrays = selectedGroup.findAllChildrenOfType(); + for(const std::shared_ptr& array : arrays) + { + std::string arrayName = array->getName(); + for(const std::string& headerName : headers) + { + if(arrayName == headerName) + { + return {MakeErrorResult(to_underlying(IssueCodes::DUPLICATE_NAMES), + fmt::format("The header name \"{}\" matches an array name that already exists in the selected container.", headerName))}; + } + } + } + + return {}; +} + +// ----------------------------------------------------------------------------- +Result validateNewGroup(const DataPath& groupPath, const DataStructure& dataStructure) +{ + if(groupPath.empty()) + { + return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_NEW_DG), "'New Data Group' - Data path is empty.")}; + } + + if(dataStructure.getData(groupPath) != nullptr) + { + return {MakeErrorResult(to_underlying(IssueCodes::NEW_DG_EXISTS), fmt::format("The group at the path '{}' cannot be created because it already exists.", groupPath.toString()))}; + } + + return {}; +} + +// ----------------------------------------------------------------------------- +Result createParsers(const DataTypeVector& dataTypes, const std::vector& skippedArrays, const DataPath& parentPath, const std::vector& headers, + DataStructure& dataStructure) +{ + ParsersVector dataParsers(dataTypes.size()); + + for(usize i = 0; i < dataTypes.size() && i < headers.size() && i < skippedArrays.size(); i++) + { + DataType dataType = dataTypes[i]; + std::string name = headers[i]; + bool skipped = skippedArrays[i]; + + if(skipped) + { + continue; + } + + DataPath arrayPath = parentPath; + arrayPath = arrayPath.createChildPath(name); + + switch(dataType) + { + case complex::DataType::int8: { + Int8Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::uint8: { + UInt8Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::int16: { + Int16Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::uint16: { + UInt16Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::int32: { + Int32Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::uint32: { + UInt32Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::int64: { + Int64Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::uint64: { + UInt64Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::float32: { + Float32Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::float64: { + Float64Array& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + case complex::DataType::boolean: { + BoolArray& data = dataStructure.getDataRefAs(arrayPath); + dataParsers[i] = std::make_unique(data, name, i); + break; + } + default: + return {MakeErrorResult(to_underlying(IssueCodes::INVALID_ARRAY_TYPE), + fmt::format("The data type that was chosen for column number {} is not a valid data array type.", std::to_string(i + 1)))}; + } + } + + return {std::move(dataParsers)}; +} + +// ----------------------------------------------------------------------------- +Result<> parseLine(std::fstream& inStream, const ParsersVector& dataParsers, const StringVector& headers, const CharVector& delimiters, bool consecutiveDelimiters, usize lineNumber, usize beginIndex) +{ + std::string line; + std::getline(inStream, line); + + StringVector tokens = StringUtilities::split(line, delimiters, consecutiveDelimiters); + if(tokens.empty()) + { + // This is an empty line in the middle of the CSV file, which just shouldn't happen + return MakeErrorResult(to_underlying(IssueCodes::EMPTY_LINE), fmt::format("Line #{} is empty! You should not have any empty lines in the file.", std::to_string(lineNumber))); + } + + if(dataParsers.size() != tokens.size()) + { + return MakeErrorResult(to_underlying(IssueCodes::INCONSISTENT_COLS), + fmt::format("Expecting {} tokens but found {} tokens in the file at line #{}.\n\nInput line was:\n{}\n\nThis is because the data-" + "types/headers/skipped-array-mask all have a size of {} but the file data at line #{} has a column count of {}.", + std::to_string(dataParsers.size()), std::to_string(tokens.size()), std::to_string(lineNumber), line, std::to_string(dataParsers.size()), + std::to_string(lineNumber), std::to_string(tokens.size()))); + } + + for(int i = 0; i < dataParsers.size(); i++) + { + const auto& dataParser = dataParsers[i]; + if(dataParser == nullptr) + { + continue; + } + + usize index = dataParser->columnIndex(); + + Result<> result = dataParser->parse(tokens[index], lineNumber - beginIndex); + if(result.invalid()) + { + for(Error& error : result.errors()) + { + error.message = fmt::format("Array \"{}\", Line {}: ", headers[i], lineNumber) + error.message; + } + return result; + } + } + + return {}; +} + +// ----------------------------------------------------------------------------- +void notifyProgress(const IFilter::MessageHandler& messageHandler, usize lineNumber, usize numberOfTuples, float32& threshold) +{ + const float32 percentCompleted = (static_cast(lineNumber) / static_cast(numberOfTuples)) * 100.0f; + if(percentCompleted > threshold) + { + // Print the status of the import + messageHandler({IFilter::Message::Type::Info, fmt::format("Importing CSV Data || {:.{}f}% Complete", static_cast(percentCompleted), 1)}); + threshold = threshold + 5.0f; + if(threshold < percentCompleted) + { + threshold = percentCompleted; + } + } +} + +// ----------------------------------------------------------------------------- +bool skipNumberOfLines(std::fstream& inStream, usize numberOfLines) +{ + for(usize i = 1; i < numberOfLines; i++) + { + if(inStream.eof()) + { + return false; + } + + std::string line; + std::getline(inStream, line); + } + + return true; +} + +std::string tupleDimsToString(const std::vector& tupleDims) +{ + std::string tupleDimsStr; + for(usize i = 0; i < tupleDims.size(); ++i) + { + tupleDimsStr += std::to_string(tupleDims[i]); + if(i != tupleDims.size() - 1) + { + tupleDimsStr += "x"; + } + } + return tupleDimsStr; +} + +//------------------------------------------------------------------------------ +IFilter::PreflightResult readHeaders(const std::string& inputFilePath, usize headersLineNum, ReadCSVFileFilterCache& headerCache) +{ + std::fstream in(inputFilePath.c_str(), std::ios_base::in); + if(!in.is_open()) + { + return {MakeErrorResult(to_underlying(IssueCodes::FILE_NOT_OPEN), fmt::format("Could not open file for reading: {}", inputFilePath)), {}}; + } + + // Skip to the headers line + if(!skipNumberOfLines(in, headersLineNum)) + { + return {MakeErrorResult(to_underlying(IssueCodes::CANNOT_SKIP_TO_LINE), fmt::format("Could not skip to the chosen header line ({}).", headersLineNum)), {}}; + } + + // Read the headers line + std::getline(in, headerCache.Headers); + headerCache.HeadersLine = headersLineNum; + return {}; +} +} // namespace + +namespace complex +{ +//------------------------------------------------------------------------------ +ReadCSVFileFilter::ReadCSVFileFilter() +: m_InstanceId(s_InstanceId.fetch_add(1)) +{ + s_HeaderCache[m_InstanceId] = {}; +} + +// ----------------------------------------------------------------------------- +ReadCSVFileFilter::~ReadCSVFileFilter() noexcept +{ + s_HeaderCache.erase(m_InstanceId); +} + +// ----------------------------------------------------------------------------- +std::string ReadCSVFileFilter::name() const +{ + return FilterTraits::name.str(); +} + +//------------------------------------------------------------------------------ +std::string ReadCSVFileFilter::className() const +{ + return FilterTraits::className; +} + +//------------------------------------------------------------------------------ +Uuid ReadCSVFileFilter::uuid() const +{ + return FilterTraits::uuid; +} + +//------------------------------------------------------------------------------ +std::string ReadCSVFileFilter::humanName() const +{ + return "Read CSV File"; +} + +//------------------------------------------------------------------------------ +std::vector ReadCSVFileFilter::defaultTags() const +{ + return {className(), "IO", "Input", "Read", "Import", "ASCII", "CSV", "Column", "Delimited", "Text"}; +} + +//------------------------------------------------------------------------------ +Parameters ReadCSVFileFilter::parameters() const +{ + Parameters params; + + params.insertSeparator(Parameters::Separator{"Input Parameters"}); + + params.insert(std::make_unique(k_ReadCSVData_Key, "CSV Importer Data", "Holds all relevant csv file data collected from the custom interface", ReadCSVData())); + + DynamicTableInfo tableInfo; + tableInfo.setColsInfo(DynamicTableInfo::DynamicVectorInfo(1, "Value {}")); + tableInfo.setRowsInfo(DynamicTableInfo::StaticVectorInfo({"Dim 0"})); + + params.insertSeparator(Parameters::Separator{"Existing Attribute Matrix"}); + params.insertLinkableParameter( + std::make_unique(k_UseExistingGroup_Key, "Use Existing Attribute Matrix", "Store the imported CSV data arrays in an existing attribute matrix.", false)); + params.insert( + std::make_unique(k_SelectedDataGroup_Key, "Existing Attribute Matrix", "Store the imported CSV data arrays in this existing attribute matrix.", DataPath{})); + + params.insertSeparator(Parameters::Separator{"Created AttributeMatrix"}); + params.insert(std::make_unique(k_CreatedDataGroup_Key, "New Attribute Matrix", "Store the imported CSV data arrays in a newly created attribute matrix.", + DataPath{{"Imported Data"}})); + + // Associate the Linkable Parameter(s) to the children parameters that they control + params.linkParameters(k_UseExistingGroup_Key, k_SelectedDataGroup_Key, true); + params.linkParameters(k_UseExistingGroup_Key, k_CreatedDataGroup_Key, false); + + return params; +} + +//------------------------------------------------------------------------------ +IFilter::UniquePointer ReadCSVFileFilter::clone() const +{ + return std::make_unique(); +} + +//------------------------------------------------------------------------------ +IFilter::PreflightResult ReadCSVFileFilter::preflightImpl(const DataStructure& dataStructure, const Arguments& filterArgs, const MessageHandler& messageHandler, + const std::atomic_bool& shouldCancel) const +{ + ReadCSVData readCSVData = filterArgs.value(k_ReadCSVData_Key); + bool useExistingAM = filterArgs.value(k_UseExistingGroup_Key); + DataPath selectedAM = filterArgs.value(k_SelectedDataGroup_Key); + DataPath createdDataAM = filterArgs.value(k_CreatedDataGroup_Key); + + std::string inputFilePath = readCSVData.inputFilePath; + ReadCSVData::HeaderMode headerMode = readCSVData.headerMode; + + complex::Result resultOutputActions; + + // Validate the input file path + if(inputFilePath.empty()) + { + return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_FILE), "A file has not been chosen to import. Please pick a file to import.")}; + } + + Result<> csvResult = FileUtilities::ValidateCSVFile(inputFilePath); + if(csvResult.invalid()) + { + return {ConvertResultTo(std::move(ConvertResult(std::move(csvResult))), {}), {}}; + } + + StringVector headers; + if(readCSVData.inputFilePath != s_HeaderCache[s_InstanceId].FilePath) + { + std::fstream in(inputFilePath.c_str(), std::ios_base::in); + if(!in.is_open()) + { + return {MakeErrorResult(to_underlying(IssueCodes::FILE_NOT_OPEN), fmt::format("Could not open file for reading: {}", inputFilePath)), {}}; + } + + s_HeaderCache[s_InstanceId].FilePath = readCSVData.inputFilePath; + + usize lineCount = 0; + while(!in.eof()) + { + std::string line; + std::getline(in, line); + lineCount++; + + if(headerMode == ReadCSVData::HeaderMode::LINE && lineCount == readCSVData.headersLine) + { + s_HeaderCache[s_InstanceId].Headers = line; + s_HeaderCache[s_InstanceId].HeadersLine = readCSVData.headersLine; + } + } + + headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, readCSVData.delimiters, readCSVData.consecutiveDelimiters); + s_HeaderCache[s_InstanceId].TotalLines = lineCount; + } + else if(headerMode == ReadCSVData::HeaderMode::LINE) + { + if(readCSVData.headersLine != s_HeaderCache[s_InstanceId].HeadersLine) + { + IFilter::PreflightResult result = readHeaders(readCSVData.inputFilePath, readCSVData.headersLine, s_HeaderCache[s_InstanceId]); + if(result.outputActions.invalid()) + { + return result; + } + } + + headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, readCSVData.delimiters, readCSVData.consecutiveDelimiters); + } + + if(headerMode == ReadCSVData::HeaderMode::CUSTOM) + { + headers = readCSVData.customHeaders; + } + + usize totalLines = s_HeaderCache[s_InstanceId].TotalLines; + + // Check that we have a valid start import row + if(readCSVData.startImportRow == 0) + { + std::string errMsg = "'Start import at row' value is out of range. The 'Start import at row' value cannot be set to line #0."; + return {MakeErrorResult(to_underlying(IssueCodes::START_IMPORT_ROW_OUT_OF_RANGE), errMsg), {}}; + } + + if(readCSVData.startImportRow > totalLines) + { + std::string errMsg = fmt::format("'Start import at row' value ({}) is larger than the total number of lines in the file ({}).", readCSVData.startImportRow, totalLines); + return {MakeErrorResult(to_underlying(IssueCodes::START_IMPORT_ROW_OUT_OF_RANGE), errMsg), {}}; + } + + // Check that we have a valid header line number + if(headerMode == ReadCSVData::HeaderMode::LINE && readCSVData.headersLine == 0) + { + std::string errMsg = "The header line number is out of range. The header line number cannot be set to line #0."; + return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; + } + + if(headerMode == ReadCSVData::HeaderMode::LINE && readCSVData.headersLine > totalLines) + { + std::string errMsg = fmt::format("The header line number is out of range. There are {} lines in the file and the header line number is set to line #{}.", totalLines, readCSVData.headersLine); + return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; + } + + if(headerMode == ReadCSVData::HeaderMode::LINE && readCSVData.headersLine > readCSVData.startImportRow) + { + std::string errMsg = fmt::format( + "The header line number is out of range. The start import row is set to line #{} and the header line number is set to line #{}. The header line number must be in the range 1-{}.", + readCSVData.startImportRow, readCSVData.headersLine, readCSVData.startImportRow - 1); + return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; + } + + // Check that we have valid headers + if(headers.empty()) + { + std::string errMsg = "There are 0 imported array headers. This is either because there are 0 headers being read from the header line or the custom headers are empty. Please either choose a " + "different header line number or input at least 1 custom header."; + return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_HEADERS), errMsg), {}}; + } + + if(readCSVData.dataTypes.size() != headers.size()) + { + std::string errMsg = + fmt::format("The number of data types ({}) does not match the number of imported array headers ({}). The number of data types must match the number of imported array headers.", + readCSVData.dataTypes.size(), headers.size()); + return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_DATATYPE_COUNT), errMsg), {}}; + } + + if(readCSVData.skippedArrayMask.size() != headers.size()) + { + std::string errMsg = fmt::format( + "The number of booleans in the skipped array mask ({}) does not match the number of imported array headers ({}). The number of booleans in the skipped array mask must match the number " + "of imported array headers.", + readCSVData.skippedArrayMask.size(), headers.size()); + return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_MASK_COUNT), errMsg), {}}; + } + + for(int i = 0; i < headers.size(); i++) + { + const auto& headerName = headers[i]; + if(headerName.empty()) + { + std::string errMsg = fmt::format("The header for column #{} is empty. Please fill in a header for column #{}.", i + 1, i + 1); + return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_NAMES), errMsg), {}}; + } + + if(StringUtilities::contains(headerName, '&') || StringUtilities::contains(headerName, ':') || StringUtilities::contains(headerName, '/') || StringUtilities::contains(headerName, '\\')) + { + return {MakeErrorResult(to_underlying(IssueCodes::ILLEGAL_NAMES), + fmt::format("The header name \"{}\" contains a character that will cause problems. Do Not use '&',':', '/' or '\\' in the header names.", headerName))}; + } + + for(int j = 0; j < headers.size(); j++) + { + std::string otherHeaderName = headers[j]; + + if(i != j && !headerName.empty() && !otherHeaderName.empty() && headerName == otherHeaderName) + { + std::string errMsg = fmt::format("Header '{}' (column #{}) and header '{}' (column #{}) have the same name. Headers may not have duplicate names.", headerName, i + 1, otherHeaderName, j + 1); + return {MakeErrorResult(to_underlying(IssueCodes::DUPLICATE_NAMES), errMsg), {}}; + } + } + } + + // Check that we have a valid tuple count + usize totalImportedLines = totalLines - readCSVData.startImportRow + 1; + usize tupleTotal = std::accumulate(readCSVData.tupleDims.begin(), readCSVData.tupleDims.end(), static_cast(1), std::multiplies()); + if(tupleTotal == 0) + { + std::string tupleDimsStr = tupleDimsToString(readCSVData.tupleDims); + std::string errMsg = fmt::format("Error: The current tuple dimensions ({}) has 0 total tuples. At least 1 tuple is required.", tupleDimsStr, tupleTotal, totalImportedLines); + return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_TUPLES), errMsg), {}}; + } + else if(tupleTotal > totalImportedLines && !useExistingAM) + { + std::string tupleDimsStr = tupleDimsToString(readCSVData.tupleDims); + std::string errMsg = fmt::format("Error: The current tuple dimensions ({}) has {} total tuples, but this is larger than the total number of available lines to import ({}).", tupleDimsStr, + tupleTotal, totalImportedLines); + return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_TUPLES), errMsg), {}}; + } + + // Validate the existing/created group + DataPath groupPath; + if(useExistingAM) + { + Result result = validateExistingGroup(selectedAM, dataStructure, headers); + if(result.invalid()) + { + return {std::move(result)}; + } + groupPath = selectedAM; + } + else + { + Result result = validateNewGroup(createdDataAM, dataStructure); + if(result.invalid()) + { + return {std::move(result)}; + } + groupPath = createdDataAM; + resultOutputActions.value().appendAction(std::make_unique(createdDataAM, readCSVData.tupleDims)); + } + + // Create the arrays + std::vector tupleDims(readCSVData.tupleDims.size()); + std::transform(readCSVData.tupleDims.begin(), readCSVData.tupleDims.end(), tupleDims.begin(), [](float64 d) { return static_cast(d); }); + if(useExistingAM) + { + const AttributeMatrix& am = dataStructure.getDataRefAs(groupPath); + tupleDims = am.getShape(); + + auto totalLinesRead = std::accumulate(tupleDims.begin(), tupleDims.end(), 1UL, std::multiplies<>()); + + std::string msg = fmt::format("The Array Tuple Dimensions ({}) will be ignored and the Existing Attribute Matrix tuple dimensions ({}) will be used. The total number of lines read will be {}.", + fmt::join(readCSVData.tupleDims, "x"), fmt::join(tupleDims, "x"), totalLinesRead); + resultOutputActions.warnings().push_back(Warning{to_underlying(IssueCodes::IGNORED_TUPLE_DIMS), msg}); + } + + for(usize i = 0; i < headers.size(); i++) + { + if(readCSVData.skippedArrayMask[i]) + { + // The user decided to skip importing this array + continue; + } + + DataType dataType = readCSVData.dataTypes[i]; + std::string name = headers[i]; + + DataPath arrayPath = groupPath; + arrayPath = arrayPath.createChildPath(name); + resultOutputActions.value().appendAction(std::make_unique(dataType, tupleDims, std::vector{1}, arrayPath)); + } + + return {std::move(resultOutputActions), {}}; +} + +//------------------------------------------------------------------------------ +Result<> ReadCSVFileFilter::executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, + const std::atomic_bool& shouldCancel) const +{ + ReadCSVData readCSVData = filterArgs.value(k_ReadCSVData_Key); + bool useExistingGroup = filterArgs.value(k_UseExistingGroup_Key); + DataPath selectedDataGroup = filterArgs.value(k_SelectedDataGroup_Key); + DataPath createdDataGroup = filterArgs.value(k_CreatedDataGroup_Key); + + std::string inputFilePath = readCSVData.inputFilePath; + StringVector headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, readCSVData.delimiters, readCSVData.consecutiveDelimiters); + DataTypeVector dataTypes = readCSVData.dataTypes; + std::vector skippedArrays = readCSVData.skippedArrayMask; + bool consecutiveDelimiters = readCSVData.consecutiveDelimiters; + usize startImportRow = readCSVData.startImportRow; + + if(readCSVData.headerMode == ReadCSVData::HeaderMode::CUSTOM) + { + headers = readCSVData.customHeaders; + } + + DataPath groupPath = createdDataGroup; + if(useExistingGroup) + { + groupPath = selectedDataGroup; + } + + Result parsersResult = createParsers(dataTypes, skippedArrays, groupPath, headers, dataStructure); + if(parsersResult.invalid()) + { + return ConvertResult(std::move(parsersResult)); + } + + std::fstream in(inputFilePath.c_str(), std::ios_base::in); + if(!in.is_open()) + { + return MakeErrorResult(to_underlying(IssueCodes::FILE_NOT_OPEN), fmt::format("Could not open file for reading: {}", inputFilePath)); + } + + // Skip to the first data line + if(!skipNumberOfLines(in, startImportRow)) + { + return MakeErrorResult(to_underlying(IssueCodes::CANNOT_SKIP_TO_LINE), fmt::format("Could not skip to the first line in the file to import ({}).", startImportRow)); + } + + float32 threshold = 0.0f; + usize numTuples = std::accumulate(readCSVData.tupleDims.cbegin(), readCSVData.tupleDims.cend(), static_cast(1), std::multiplies<>()); + if(useExistingGroup) + { + const AttributeMatrix& am = dataStructure.getDataRefAs(groupPath); + numTuples = std::accumulate(am.getShape().cbegin(), am.getShape().cend(), static_cast(1), std::multiplies<>()); + } + usize lineNum = startImportRow; + for(usize i = 0; i < numTuples && !in.eof(); i++) + { + if(shouldCancel) + { + return {}; + } + + Result<> parsingResult = parseLine(in, parsersResult.value(), headers, readCSVData.delimiters, consecutiveDelimiters, lineNum, startImportRow); + if(parsingResult.invalid()) + { + return std::move(parsingResult); + } + + notifyProgress(messageHandler, lineNum, numTuples, threshold); + lineNum++; + } + + return {}; +} +} // namespace complex diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.hpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.hpp similarity index 82% rename from src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.hpp rename to src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.hpp index 701eb6a1c3..74ea8d8459 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.hpp @@ -11,9 +11,9 @@ class AbstractDataParser; namespace complex { /** - * @class ImportCSVDataFilter + * @class ReadCSVFileFilter * @brief This filter reads CSV data from any text-based file and imports the data into complex-style arrays. - * The user uses the filter's wizard to specify which file to import, how the data is formatted, what to call + * The user uses the parameter user interface to specify which file to import, how the data is formatted, what to call * each array, and what type each array should be. * * Note:* This filter is intended to read data that is column-oriented, such that each created complex array @@ -21,21 +21,20 @@ namespace complex * If multiple columns are in fact different components of the same array, then the columns may be imported as * separate arrays and then combined in the correct order using the Combine Attribute Arrays filter. */ -class COMPLEXCORE_EXPORT ImportCSVDataFilter : public IFilter +class COMPLEXCORE_EXPORT ReadCSVFileFilter : public IFilter { public: - ImportCSVDataFilter(); - ~ImportCSVDataFilter() noexcept override; + ReadCSVFileFilter(); + ~ReadCSVFileFilter() noexcept override; - ImportCSVDataFilter(const ImportCSVDataFilter&) = delete; - ImportCSVDataFilter(ImportCSVDataFilter&&) noexcept = delete; + ReadCSVFileFilter(const ReadCSVFileFilter&) = delete; + ReadCSVFileFilter(ReadCSVFileFilter&&) noexcept = delete; - ImportCSVDataFilter& operator=(const ImportCSVDataFilter&) = delete; - ImportCSVDataFilter& operator=(ImportCSVDataFilter&&) noexcept = delete; + ReadCSVFileFilter& operator=(const ReadCSVFileFilter&) = delete; + ReadCSVFileFilter& operator=(ReadCSVFileFilter&&) noexcept = delete; // Parameter Keys - static inline constexpr StringLiteral k_WizardData_Key = "wizard_data"; - static inline constexpr StringLiteral k_TupleDims_Key = "tuple_dimensions"; + static inline constexpr StringLiteral k_ReadCSVData_Key = "read_csv_data"; static inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; static inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; static inline constexpr StringLiteral k_CreatedDataGroup_Key = "created_data_group"; @@ -104,7 +103,10 @@ class COMPLEXCORE_EXPORT ImportCSVDataFilter : public IFilter */ Result<> executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, const std::atomic_bool& shouldCancel) const override; + +private: + int32 m_InstanceId; }; } // namespace complex -COMPLEX_DEF_FILTER_TRAITS(complex, ImportCSVDataFilter, "373be1f8-31cf-49f6-aa5d-e356f4f3f261"); +COMPLEX_DEF_FILTER_TRAITS(complex, ReadCSVFileFilter, "373be1f8-31cf-49f6-aa5d-e356f4f3f261"); diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextFilter.cpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadTextDataArrayFilter.cpp similarity index 84% rename from src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextFilter.cpp rename to src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadTextDataArrayFilter.cpp index 084bef8bc6..4296c06575 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextFilter.cpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadTextDataArrayFilter.cpp @@ -1,4 +1,4 @@ -#include "ImportTextFilter.hpp" +#include "ReadTextDataArrayFilter.hpp" #include "complex/Common/StringLiteral.hpp" #include "complex/Common/TypeTraits.hpp" @@ -22,32 +22,32 @@ using namespace complex; namespace complex { -std::string ImportTextFilter::name() const +std::string ReadTextDataArrayFilter::name() const { - return FilterTraits::name; + return FilterTraits::name; } -std::string ImportTextFilter::className() const +std::string ReadTextDataArrayFilter::className() const { - return FilterTraits::className; + return FilterTraits::className; } -Uuid ImportTextFilter::uuid() const +Uuid ReadTextDataArrayFilter::uuid() const { - return FilterTraits::uuid; + return FilterTraits::uuid; } -std::vector ImportTextFilter::defaultTags() const +std::vector ReadTextDataArrayFilter::defaultTags() const { - return {className(), "IO", "Input", "Read", "Import", "Text"}; + return {className(), "IO", "Input", "Read", "Import", "Text", "ASCII", "Attribute"}; } -std::string ImportTextFilter::humanName() const +std::string ReadTextDataArrayFilter::humanName() const { - return "Import ASCII Data Array"; + return "Read Text Data Array"; } -Parameters ImportTextFilter::parameters() const +Parameters ReadTextDataArrayFilter::parameters() const { Parameters params; @@ -81,12 +81,12 @@ Parameters ImportTextFilter::parameters() const return params; } -IFilter::UniquePointer ImportTextFilter::clone() const +IFilter::UniquePointer ReadTextDataArrayFilter::clone() const { - return std::make_unique(); + return std::make_unique(); } -IFilter::PreflightResult ImportTextFilter::preflightImpl(const DataStructure& data, const Arguments& args, const MessageHandler& messageHandler, const std::atomic_bool& shouldCancel) const +IFilter::PreflightResult ReadTextDataArrayFilter::preflightImpl(const DataStructure& data, const Arguments& args, const MessageHandler& messageHandler, const std::atomic_bool& shouldCancel) const { auto numericType = args.value(k_ScalarTypeKey); auto arrayPath = args.value(k_DataArrayKey); @@ -141,7 +141,8 @@ IFilter::PreflightResult ImportTextFilter::preflightImpl(const DataStructure& da return {std::move(resultOutputActions)}; } -Result<> ImportTextFilter::executeImpl(DataStructure& data, const Arguments& args, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, const std::atomic_bool& shouldCancel) const +Result<> ReadTextDataArrayFilter::executeImpl(DataStructure& data, const Arguments& args, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, + const std::atomic_bool& shouldCancel) const { auto inputFilePath = args.value(k_InputFileKey); auto numericType = args.value(k_ScalarTypeKey); @@ -194,7 +195,7 @@ Result<> ImportTextFilter::executeImpl(DataStructure& data, const Arguments& arg return CsvParser::ReadFile(inputFilePath, *dataArray, skipLines, delimiter); } default: - return MakeErrorResult(-1001, fmt::format("ImportTextFilter: Parameter NumericType which has a value of '{}' does not match any in complex.", to_underlying(numericType))); + return MakeErrorResult(-1001, fmt::format("ReadTextDataArrayFilter: Parameter NumericType which has a value of '{}' does not match any in complex.", to_underlying(numericType))); } } } // namespace complex diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextFilter.hpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadTextDataArrayFilter.hpp similarity index 78% rename from src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextFilter.hpp rename to src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadTextDataArrayFilter.hpp index a583347be7..fe348533fa 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextFilter.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadTextDataArrayFilter.hpp @@ -7,17 +7,17 @@ namespace complex { -class COMPLEXCORE_EXPORT ImportTextFilter : public IFilter +class COMPLEXCORE_EXPORT ReadTextDataArrayFilter : public IFilter { public: - ImportTextFilter() = default; - ~ImportTextFilter() noexcept override = default; + ReadTextDataArrayFilter() = default; + ~ReadTextDataArrayFilter() noexcept override = default; - ImportTextFilter(const ImportTextFilter&) = delete; - ImportTextFilter(ImportTextFilter&&) noexcept = delete; + ReadTextDataArrayFilter(const ReadTextDataArrayFilter&) = delete; + ReadTextDataArrayFilter(ReadTextDataArrayFilter&&) noexcept = delete; - ImportTextFilter& operator=(const ImportTextFilter&) = delete; - ImportTextFilter& operator=(ImportTextFilter&&) noexcept = delete; + ReadTextDataArrayFilter& operator=(const ReadTextDataArrayFilter&) = delete; + ReadTextDataArrayFilter& operator=(ReadTextDataArrayFilter&&) noexcept = delete; static inline constexpr StringLiteral k_InputFileKey = "input_file"; static inline constexpr StringLiteral k_ScalarTypeKey = "scalar_type"; @@ -93,4 +93,4 @@ class COMPLEXCORE_EXPORT ImportTextFilter : public IFilter }; } // namespace complex -COMPLEX_DEF_FILTER_TRAITS(complex, ImportTextFilter, "25f7df3e-ca3e-4634-adda-732c0e56efd4"); +COMPLEX_DEF_FILTER_TRAITS(complex, ReadTextDataArrayFilter, "25f7df3e-ca3e-4634-adda-732c0e56efd4"); diff --git a/src/Plugins/ComplexCore/src/ComplexCore/utils/CSVDataParser.hpp b/src/Plugins/ComplexCore/src/ComplexCore/utils/CSVDataParser.hpp index fe24a2e000..023de3207b 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/utils/CSVDataParser.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/utils/CSVDataParser.hpp @@ -125,4 +125,6 @@ using UInt64Parser = CSVDataParser; using Float32Parser = CSVDataParser; using Float64Parser = CSVDataParser; +using BoolParser = CSVDataParser; + // using StringParser = Parser; diff --git a/src/Plugins/ComplexCore/test/AlignSectionsFeatureCentroidTest.cpp b/src/Plugins/ComplexCore/test/AlignSectionsFeatureCentroidTest.cpp index 3088384b00..76dfd093e8 100644 --- a/src/Plugins/ComplexCore/test/AlignSectionsFeatureCentroidTest.cpp +++ b/src/Plugins/ComplexCore/test/AlignSectionsFeatureCentroidTest.cpp @@ -1,6 +1,6 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" #include "ComplexCore/Filters/AlignSectionsFeatureCentroidFilter.hpp" -#include "ComplexCore/Filters/ImportTextFilter.hpp" +#include "ComplexCore/Filters/ReadTextDataArrayFilter.hpp" #include "complex/Core/Application.hpp" #include "complex/Parameters/ArraySelectionParameter.hpp" @@ -95,7 +95,7 @@ TEST_CASE("ComplexCore::AlignSectionsFeatureCentroidFilter", "[Reconstruction][A // Compare the output of the shifts file with the exemplar file - ImportTextFilter filter; + ReadTextDataArrayFilter filter; Arguments args; // read in the exemplar shift data file @@ -129,7 +129,7 @@ TEST_CASE("ComplexCore::AlignSectionsFeatureCentroidFilter", "[Reconstruction][A // Compare the output of the shifts file with the exemplar file - ImportTextFilter filter; + ReadTextDataArrayFilter filter; Arguments args; args.insertOrAssign(k_InputFileKey, std::make_any(computedShiftsFile)); diff --git a/src/Plugins/ComplexCore/test/CMakeLists.txt b/src/Plugins/ComplexCore/test/CMakeLists.txt index 4693917832..3d7c1fc089 100644 --- a/src/Plugins/ComplexCore/test/CMakeLists.txt +++ b/src/Plugins/ComplexCore/test/CMakeLists.txt @@ -77,11 +77,11 @@ set(${PLUGIN_NAME}UnitTest_SRCS ImageContouringTest.cpp ImageGeomTest.cpp ImportBinaryCTNorthstarTest.cpp - ImportCSVDataTest.cpp + ReadCSVFileTest.cpp # ImportDeformKeyFileV12Test.cpp ImportHDF5DatasetTest.cpp - ImportTextTest.cpp + ReadTextDataArrayTest.cpp ImportVolumeGraphicsFileTest.cpp InitializeDataTest.cpp InterpolatePointCloudToRegularGridTest.cpp diff --git a/src/Plugins/ComplexCore/test/CoreFilterTest.cpp b/src/Plugins/ComplexCore/test/CoreFilterTest.cpp index 16e960a826..46f8d14a27 100644 --- a/src/Plugins/ComplexCore/test/CoreFilterTest.cpp +++ b/src/Plugins/ComplexCore/test/CoreFilterTest.cpp @@ -1,6 +1,6 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" #include "ComplexCore/Filters/CreateDataGroup.hpp" -#include "ComplexCore/Filters/ImportTextFilter.hpp" +#include "ComplexCore/Filters/ReadTextDataArrayFilter.hpp" #include "complex/Common/StringLiteral.hpp" #include "complex/Core/Application.hpp" @@ -76,13 +76,13 @@ TEST_CASE("CoreFilterTest:RunCoreFilter") file << i << "," << i + 1 << "," << i + 2 << "\n"; } } - SECTION("Run ImportTextFilter") + SECTION("Run ReadTextDataArrayFilter") { static constexpr uint64 k_NComp = 3; static constexpr uint64 k_NSkipLines = 0; const static DynamicTableInfo::TableDataType k_TupleDims = {{static_cast(k_NLines)}}; - ImportTextFilter filter; + ReadTextDataArrayFilter filter; DataStructure dataStructure; Arguments args; DataPath dataPath({"foo"}); diff --git a/src/Plugins/ComplexCore/test/GenerateColorTableTest.cpp b/src/Plugins/ComplexCore/test/GenerateColorTableTest.cpp index 6857d2ca05..ba49b17de8 100644 --- a/src/Plugins/ComplexCore/test/GenerateColorTableTest.cpp +++ b/src/Plugins/ComplexCore/test/GenerateColorTableTest.cpp @@ -2,7 +2,7 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" #include "ComplexCore/Filters/GenerateColorTableFilter.hpp" -#include "ComplexCore/Filters/ImportTextFilter.hpp" +#include "ComplexCore/Filters/ReadTextDataArrayFilter.hpp" #include "complex/Parameters/ArrayCreationParameter.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" @@ -79,14 +79,14 @@ TEST_CASE("ComplexCore::GenerateColorTableFilter: Valid filter execution") // Read Image File { - const ImportTextFilter filter; + const ReadTextDataArrayFilter filter; Arguments args; - args.insertOrAssign(ImportTextFilter::k_InputFileKey, std::make_any(k_InputImageFilePath)); - args.insertOrAssign(ImportTextFilter::k_ScalarTypeKey, std::make_any(NumericType::float32)); - args.insertOrAssign(ImportTextFilter::k_NCompKey, std::make_any(1)); - args.insertOrAssign(ImportTextFilter::k_NTuplesKey, std::make_any(DynamicTableInfo::TableDataType{{static_cast(37989)}})); - args.insertOrAssign(ImportTextFilter::k_DataArrayKey, std::make_any(DataPath{{Constants::k_Confidence_Index.str()}})); + args.insertOrAssign(ReadTextDataArrayFilter::k_InputFileKey, std::make_any(k_InputImageFilePath)); + args.insertOrAssign(ReadTextDataArrayFilter::k_ScalarTypeKey, std::make_any(NumericType::float32)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NCompKey, std::make_any(1)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NTuplesKey, std::make_any(DynamicTableInfo::TableDataType{{static_cast(37989)}})); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataArrayKey, std::make_any(DataPath{{Constants::k_Confidence_Index.str()}})); IFilter::ExecuteResult executeResult = filter.execute(dataStructure, args); COMPLEX_RESULT_REQUIRE_VALID(executeResult.result); diff --git a/src/Plugins/ComplexCore/test/ImportCSVDataTest.cpp b/src/Plugins/ComplexCore/test/ImportCSVDataTest.cpp deleted file mode 100644 index b845fdd15f..0000000000 --- a/src/Plugins/ComplexCore/test/ImportCSVDataTest.cpp +++ /dev/null @@ -1,343 +0,0 @@ -#include "ComplexCore/ComplexCore_test_dirs.hpp" -#include "ComplexCore/Filters/CreateDataGroup.hpp" -#include "ComplexCore/Filters/ImportCSVDataFilter.hpp" - -#include "complex/Common/TypesUtility.hpp" -#include "complex/DataStructure/DataArray.hpp" -#include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportCSVDataParameter.hpp" -#include "complex/UnitTest/UnitTestCommon.hpp" -#include "complex/Utilities/DataArrayUtilities.hpp" -#include "complex/Utilities/StringUtilities.hpp" - -#include - -#include - -namespace fs = std::filesystem; -using namespace complex; - -namespace -{ -const fs::path k_TestInput = fs::path(unit_test::k_BinaryDir.view()) / "ImportCSVDataTest" / "Input.txt"; -constexpr int32 k_InvalidArgumentErrorCode = -100; -constexpr int32 k_OverflowErrorCode = -101; -} // namespace - -// ----------------------------------------------------------------------------- -void CreateTestDataFile(nonstd::span colValues, const std::string& header) -{ - std::ofstream file(k_TestInput); - REQUIRE(file.is_open()); - - file << header << "\n"; - - usize rowCount = colValues.size(); - for(int i = 0; i < rowCount; i++) - { - file << colValues[i] << "\n"; - } -} - -// ----------------------------------------------------------------------------- -DataStructure createDataStructure(const std::string& dummyGroupName) -{ - // Instantiate the filter, a DataStructure object and an Arguments Object - DataStructure dataStructure; - - // Create a dummy group so that Existing Group parameter doesn't error out. This should be removed once - // disabled linked parameters are no longer automatically validated! - { - CreateDataGroup filter; - Arguments args; - - args.insertOrAssign(CreateDataGroup::k_DataObjectPath, std::make_any(DataPath({dummyGroupName}))); - - auto executeResult = filter.execute(dataStructure, args); - REQUIRE(executeResult.result.valid()); - } - - return dataStructure; -} - -// ----------------------------------------------------------------------------- -Arguments createArguments(const std::string& arrayName, std::optional dataType, nonstd::span values, const std::string& newGroupName, const std::string& dummyGroupName) -{ - Arguments args; - - CSVWizardData data; - data.inputFilePath = k_TestInput.string(); - data.dataHeaders = {arrayName}; - data.dataTypes = {dataType}; - data.beginIndex = 2; - data.commaAsDelimiter = true; - data.delimiters = {','}; - data.headerLine = 1; - data.numberOfLines = values.size() + 1; - - args.insertOrAssign(ImportCSVDataFilter::k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(ImportCSVDataFilter::k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(values.size())}})); - args.insertOrAssign(ImportCSVDataFilter::k_UseExistingGroup_Key, std::make_any(false)); - args.insertOrAssign(ImportCSVDataFilter::k_CreatedDataGroup_Key, std::make_any(DataPath({newGroupName}))); - args.insertOrAssign(ImportCSVDataFilter::k_SelectedDataGroup_Key, std::make_any(DataPath({dummyGroupName}))); - - return args; -} - -// ----------------------------------------------------------------------------- -template -void TestCase_TestPrimitives(nonstd::span values) -{ - INFO(fmt::format("T = {}", DataTypeToString(GetDataType()))) - INFO(fmt::format("Values = {}", values)) - - std::string newGroupName = "New Group"; - std::string dummyGroupName = "Dummy Group"; - - std::string arrayName = "Array"; - DataPath arrayPath = DataPath({newGroupName, arrayName}); - - ImportCSVDataFilter filter; - DataStructure dataStructure = createDataStructure(dummyGroupName); - Arguments args = createArguments(arrayName, GetDataType(), values, newGroupName, dummyGroupName); - - // Create the test input data file - CreateTestDataFile(values, arrayName); - - // Preflight the filter and check result - auto preflightResult = filter.preflight(dataStructure, args); - COMPLEX_RESULT_REQUIRE_VALID(preflightResult.outputActions); - - // Execute the filter and check the result - auto executeResult = filter.execute(dataStructure, args); - COMPLEX_RESULT_REQUIRE_VALID(executeResult.result); - - // Check the results - const DataArray* array = dataStructure.getDataAs>(arrayPath); - REQUIRE(array != nullptr); - - REQUIRE(values.size() == array->getSize()); - for(int i = 0; i < values.size(); i++) - { - Result parseResult = ConvertTo::convert(values[i]); - COMPLEX_RESULT_REQUIRE_VALID(parseResult); - const auto& exemplaryValue = parseResult.value(); - const auto& testValue = array->at(i); - REQUIRE(testValue == exemplaryValue); - } -} - -// ----------------------------------------------------------------------------- -template -void TestCase_TestPrimitives_Error(nonstd::span values, int32 expectedErrorCode) -{ - INFO(fmt::format("T = {}", DataTypeToString(GetDataType()))) - INFO(fmt::format("Values = {}", values)) - - std::string newGroupName = "New Group"; - std::string dummyGroupName = "Dummy Group"; - - std::string arrayName = "Array"; - DataPath arrayPath = DataPath({newGroupName, arrayName}); - - ImportCSVDataFilter filter; - DataStructure dataStructure = createDataStructure(dummyGroupName); - Arguments args = createArguments(arrayName, GetDataType(), values, newGroupName, dummyGroupName); - - // Create the test input data file - fs::create_directories(k_TestInput.parent_path()); - CreateTestDataFile(values, arrayName); - - // Preflight the filter and check result - auto preflightResult = filter.preflight(dataStructure, args); - COMPLEX_RESULT_REQUIRE_VALID(preflightResult.outputActions); - - // Execute the filter and check the result - auto executeResult = filter.execute(dataStructure, args); - COMPLEX_RESULT_REQUIRE_INVALID(executeResult.result); - REQUIRE(executeResult.result.errors().size() == 1); - REQUIRE(executeResult.result.errors()[0].code == expectedErrorCode); -} - -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 1): Valid filter execution") -{ - // Create the parent directory path - fs::create_directories(k_TestInput.parent_path()); - - std::vector v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); - - v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; - TestCase_TestPrimitives(v); -} - -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 2): Valid filter execution - Skipped Array") -{ - std::string newGroupName = "New Group"; - std::string dummyGroupName = "Dummy Group"; - - std::string arrayName = "Array"; - DataPath arrayPath = DataPath({newGroupName, arrayName}); - - ImportCSVDataFilter filter; - DataStructure dataStructure = createDataStructure(dummyGroupName); - std::vector values = {"0"}; - Arguments args = createArguments(arrayName, {}, values, newGroupName, dummyGroupName); - - // Create the test input data file - CreateTestDataFile(values, arrayName); - - // Preflight the filter and check result - auto preflightResult = filter.preflight(dataStructure, args); - COMPLEX_RESULT_REQUIRE_VALID(preflightResult.outputActions); - - // Execute the filter and check the result - auto executeResult = filter.execute(dataStructure, args); - COMPLEX_RESULT_REQUIRE_VALID(executeResult.result); - - // Check that the array does not exist - const IDataArray* array = dataStructure.getDataAs(arrayPath); - REQUIRE(array == nullptr); -} - -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 3): Invalid filter execution - Out of Bounds") -{ - // Create the parent directory path - fs::create_directories(k_TestInput.parent_path()); - - // Int8 - Out of bounds - std::vector v = {"-129"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"128"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // Int16 - Out of bounds - v = {"-32769"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"32768"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // Int32 - Out of bounds - v = {"-2147483649"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"2147483648"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // Int64 - Out of bounds - v = {"-9223372036854775809"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"9223372036854775808"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // UInt8 - Out of bounds - v = {"-1"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"256"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // UInt16 - Out of bounds - v = {"-1"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"65536"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // UInt32 - Out of bounds - v = {"-1"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"4294967296"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // UInt64 - Out of bounds - v = {"-1"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"18446744073709551616"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // Float32 - Out of bounds - v = {"-3.5E38"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"3.5E38"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - // Float64 - Out of bounds - v = {"-1.8E308"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); - - v = {"1.8E308"}; - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); -} - -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 4): Invalid filter execution - Invalid arguments") -{ - // Create the parent directory path - fs::create_directories(k_TestInput.parent_path()); - - std::vector v = {" "}; - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - - v = {"a"}; - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - - v = {"&"}; - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); -} diff --git a/src/Plugins/ComplexCore/test/ReadCSVFileTest.cpp b/src/Plugins/ComplexCore/test/ReadCSVFileTest.cpp new file mode 100644 index 0000000000..e2ee5022f2 --- /dev/null +++ b/src/Plugins/ComplexCore/test/ReadCSVFileTest.cpp @@ -0,0 +1,578 @@ +#include "ComplexCore/ComplexCore_test_dirs.hpp" +#include "ComplexCore/Filters/CreateDataGroup.hpp" +#include "ComplexCore/Filters/ReadCSVFileFilter.hpp" + +#include "complex/Common/TypesUtility.hpp" +#include "complex/DataStructure/DataArray.hpp" +#include "complex/Parameters/DynamicTableParameter.hpp" +#include "complex/Parameters/ReadCSVFileParameter.hpp" +#include "complex/UnitTest/UnitTestCommon.hpp" +#include "complex/Utilities/DataArrayUtilities.hpp" +#include "complex/Utilities/StringUtilities.hpp" + +#include + +#include + +namespace fs = std::filesystem; +using namespace complex; + +namespace +{ +const fs::path k_TestInput = fs::path(unit_test::k_BinaryDir.view()) / "ReadCSVFileTest" / "Input.txt"; +constexpr int32 k_InvalidArgumentErrorCode = -100; +constexpr int32 k_OverflowErrorCode = -101; +constexpr int32 k_BlankLineErrorCode = -119; +constexpr int32 k_EmptyFile = -100; +constexpr int32 k_InconsistentCols = -104; +constexpr int32 k_DuplicateNames = -105; +constexpr int32 k_InvalidArrayType = -106; +constexpr int32 k_IllegalNames = -107; +constexpr int32 k_IncorrectDataTypeCount = -109; +constexpr int32 k_IncorrectMaskCount = -110; +constexpr int32 k_IncorrectTuples = -113; +constexpr int32 k_EmptyNames = -116; +constexpr int32 k_HeaderLineOutOfRange = -120; +constexpr int32 k_StartImportRowOutOfRange = -121; +constexpr int32 k_EmptyHeaders = -122; +constexpr int32 k_FileDoesNotExist = -300; +} // namespace + +// ----------------------------------------------------------------------------- +void CreateTestDataFile(const fs::path& inputFilePath, nonstd::span colValues, const std::vector& headers) +{ + if(fs::exists(inputFilePath)) + { + fs::remove(inputFilePath); + } + + std::ofstream file(inputFilePath); + REQUIRE(file.is_open()); + + for(int i = 0; i < headers.size(); i++) + { + file << headers[i]; + if(i < headers.size() - 1) + { + file << ","; + } + } + + file << "\n"; + + usize rowCount = colValues.size(); + for(int i = 0; i < rowCount; i++) + { + for(int j = 0; j < headers.size(); j++) + { + file << colValues[i]; + if(j < headers.size() - 1) + { + file << ","; + } + } + + if(i < rowCount - 1) + { + file << "\n"; + } + } +} + +// ----------------------------------------------------------------------------- +Arguments createArguments(const std::string& inputFilePath, usize startImportRow, ReadCSVData::HeaderMode headerMode, usize headersLine, const std::vector& delimiters, + const std::vector& customHeaders, const std::vector& dataTypes, const std::vector& skippedArrayMask, const std::vector& tupleDims, + nonstd::span values, const std::string& newGroupName) +{ + Arguments args; + + ReadCSVData data; + data.inputFilePath = inputFilePath; + data.customHeaders = customHeaders; + data.dataTypes = dataTypes; + data.startImportRow = startImportRow; + data.delimiters = delimiters; + data.headersLine = headersLine; + data.headerMode = headerMode; + data.tupleDims = tupleDims; + data.skippedArrayMask = skippedArrayMask; + + args.insertOrAssign(ReadCSVFileFilter::k_ReadCSVData_Key, std::make_any(data)); + args.insertOrAssign(ReadCSVFileFilter::k_UseExistingGroup_Key, std::make_any(false)); + args.insertOrAssign(ReadCSVFileFilter::k_CreatedDataGroup_Key, std::make_any(DataPath({newGroupName}))); + + return args; +} + +// ----------------------------------------------------------------------------- +template +void TestCase_TestPrimitives(nonstd::span values) +{ + INFO(fmt::format("T = {}", DataTypeToString(GetDataType()))) + INFO(fmt::format("Values = {}", values)) + + std::string newGroupName = "New Group"; + + std::string arrayName = "Array"; + DataPath arrayPath = DataPath({newGroupName, arrayName}); + + ReadCSVFileFilter filter; + DataStructure dataStructure; + Arguments args = + createArguments(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); + + // Create the test input data file + CreateTestDataFile(k_TestInput, values, {arrayName}); + + // Preflight the filter and check result + auto preflightResult = filter.preflight(dataStructure, args); + COMPLEX_RESULT_REQUIRE_VALID(preflightResult.outputActions); + + // Execute the filter and check the result + auto executeResult = filter.execute(dataStructure, args); + COMPLEX_RESULT_REQUIRE_VALID(executeResult.result); + + // Check the results + const DataArray* array = dataStructure.getDataAs>(arrayPath); + REQUIRE(array != nullptr); + + REQUIRE(values.size() == array->getSize()); + for(int i = 0; i < values.size(); i++) + { + Result parseResult = ConvertTo::convert(values[i]); + COMPLEX_RESULT_REQUIRE_VALID(parseResult); + const auto& exemplaryValue = parseResult.value(); + const auto& testValue = array->at(i); + REQUIRE(testValue == exemplaryValue); + } +} + +// ----------------------------------------------------------------------------- +template +void TestCase_TestPrimitives_Error(nonstd::span values, int32 expectedErrorCode) +{ + INFO(fmt::format("T = {}", DataTypeToString(GetDataType()))) + INFO(fmt::format("Values = {}", values)) + + std::string newGroupName = "New Group"; + + std::string arrayName = "Array"; + DataPath arrayPath = DataPath({newGroupName, arrayName}); + + ReadCSVFileFilter filter; + DataStructure dataStructure; + Arguments args = + createArguments(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); + + // Create the test input data file + fs::create_directories(k_TestInput.parent_path()); + CreateTestDataFile(k_TestInput, values, {arrayName}); + + // Preflight the filter and check result + auto preflightResult = filter.preflight(dataStructure, args); + COMPLEX_RESULT_REQUIRE_VALID(preflightResult.outputActions); + + // Execute the filter and check the result + auto executeResult = filter.execute(dataStructure, args); + COMPLEX_RESULT_REQUIRE_INVALID(executeResult.result); + REQUIRE(executeResult.result.errors().size() == 1); + REQUIRE(executeResult.result.errors()[0].code == expectedErrorCode); +} + +// ----------------------------------------------------------------------------- +void TestCase_TestImporterData_Error(const std::string& inputFilePath, usize startImportRow, ReadCSVData::HeaderMode headerMode, usize headersLine, const std::vector& delimiters, + const std::vector& headers, const std::vector& dataTypes, const std::vector& skippedArrayMask, const std::vector& tupleDims, + nonstd::span values, int32 expectedErrorCode) +{ + std::string newGroupName = "New Group"; + ReadCSVFileFilter filter; + DataStructure dataStructure; + Arguments args = createArguments(inputFilePath, startImportRow, headerMode, headersLine, delimiters, headers, dataTypes, skippedArrayMask, tupleDims, values, newGroupName); + + // Execute the filter and check the result + auto executeResult = filter.execute(dataStructure, args); + COMPLEX_RESULT_REQUIRE_INVALID(executeResult.result); + REQUIRE(executeResult.result.errors().size() == 1); + REQUIRE(executeResult.result.errors()[0].code == expectedErrorCode); +} + +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 1): Valid filter execution") +{ + // Create the parent directory path + fs::create_directories(k_TestInput.parent_path()); + + std::vector v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives(v); +} + +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 2): Valid filter execution - Skipped Array") +{ + std::string newGroupName = "New Group"; + + std::string arrayName = "Array"; + DataPath arrayPath = DataPath({newGroupName, arrayName}); + + ReadCSVFileFilter filter; + DataStructure dataStructure; + std::vector values = {"0"}; + Arguments args = createArguments(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {arrayName}, {DataType::int8}, {true}, {static_cast(values.size())}, values, newGroupName); + + // Create the test input data file + CreateTestDataFile(k_TestInput, values, {arrayName}); + + // Preflight the filter and check result + auto preflightResult = filter.preflight(dataStructure, args); + COMPLEX_RESULT_REQUIRE_VALID(preflightResult.outputActions); + + // Execute the filter and check the result + auto executeResult = filter.execute(dataStructure, args); + COMPLEX_RESULT_REQUIRE_VALID(executeResult.result); + + // Check that the array does not exist + const IDataArray* array = dataStructure.getDataAs(arrayPath); + REQUIRE(array == nullptr); +} + +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 3): Invalid filter execution - Out of Bounds") +{ + // Create the parent directory path + fs::create_directories(k_TestInput.parent_path()); + + // Int8 - Out of bounds + std::vector v = {"-129"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"128"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // Int16 - Out of bounds + v = {"-32769"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"32768"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // Int32 - Out of bounds + v = {"-2147483649"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"2147483648"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // Int64 - Out of bounds + v = {"-9223372036854775809"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"9223372036854775808"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // UInt8 - Out of bounds + v = {"-1"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"256"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // UInt16 - Out of bounds + v = {"-1"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"65536"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // UInt32 - Out of bounds + v = {"-1"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"4294967296"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // UInt64 - Out of bounds + v = {"-1"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"18446744073709551616"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // Float32 - Out of bounds + v = {"-3.5E38"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"3.5E38"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + // Float64 - Out of bounds + v = {"-1.8E308"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); + + v = {"1.8E308"}; + TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); +} + +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 4): Invalid filter execution - Invalid arguments") +{ + // Create the parent directory path + fs::create_directories(k_TestInput.parent_path()); + + std::vector v = {" "}; + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + + v = {"a"}; + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + + v = {"&"}; + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); + TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); +} + +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 5): Invalid filter execution - Invalid ReadCSVData values") +{ + std::vector v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + fs::create_directories(k_TestInput.parent_path()); + CreateTestDataFile(k_TestInput, v, {"Array"}); + std::vector tupleDims = {static_cast(v.size())}; + + // Empty input file path + TestCase_TestImporterData_Error("", 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_EmptyFile); + + // Input file does not exist + fs::path tmp_file = fs::temp_directory_path() / "ThisFileDoesNotExist.txt"; + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_FileDoesNotExist); + + // Start Import Row Out-of-Range + TestCase_TestImporterData_Error(k_TestInput.string(), 0, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 500, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); + + // Header Line Number Out-of-Range + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 0, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 600, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 3, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + + // Empty array headers + tmp_file = fs::temp_directory_path() / "BlankLines.txt"; + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + CreateTestDataFile(tmp_file, v, {"Array"}); + TestCase_TestImporterData_Error(tmp_file.string(), 4, ReadCSVData::HeaderMode::LINE, 3, {','}, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); + fs::remove(tmp_file); + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + + // Incorrect Data Type Count + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, + k_IncorrectDataTypeCount); + + // Incorrect Skipped Array Mask Count + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); + + // Empty Header Names + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {""}, {DataType::int8}, {false}, tupleDims, v, k_EmptyNames); + + // Duplicate Header Names + tmp_file = fs::temp_directory_path() / "DuplicateHeaders.txt"; + std::vector duplicateHeaders = {"Custom Array", "Custom Array"}; + CreateTestDataFile(tmp_file, v, duplicateHeaders); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, duplicateHeaders, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, k_DuplicateNames); + fs::remove(tmp_file); + + // Illegal Header Names + tmp_file = fs::temp_directory_path() / "IllegalHeaders.txt"; + + std::vector illegalHeaders = {"Illegal/Header"}; + CreateTestDataFile(tmp_file, v, illegalHeaders); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + + illegalHeaders = {"Illegal\\Header"}; + CreateTestDataFile(tmp_file, v, illegalHeaders); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + + illegalHeaders = {"Illegal&Header"}; + CreateTestDataFile(tmp_file, v, illegalHeaders); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + + illegalHeaders = {"Illegal:Header"}; + CreateTestDataFile(tmp_file, v, illegalHeaders); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + + fs::remove(tmp_file); + + // Incorrect Tuple Dimensions + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {0}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30, 2}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30, 5, 7}, v, k_IncorrectTuples); + + // Inconsistent Columns + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array", "Custom Array2"}, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, + k_InconsistentCols); +} + +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 6): Invalid filter execution - Blank Lines") +{ + // Create the parent directory path + fs::create_directories(k_TestInput.parent_path()); + + // First line blank tests + std::vector v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {"", std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + // Middle line blank tests + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + // End line blank tests + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); + + v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max()), ""}; + TestCase_TestPrimitives_Error(v, k_BlankLineErrorCode); +} diff --git a/src/Plugins/ComplexCore/test/ImportTextTest.cpp b/src/Plugins/ComplexCore/test/ReadTextDataArrayTest.cpp similarity index 63% rename from src/Plugins/ComplexCore/test/ImportTextTest.cpp rename to src/Plugins/ComplexCore/test/ReadTextDataArrayTest.cpp index ba165bde40..8bd296fbac 100644 --- a/src/Plugins/ComplexCore/test/ImportTextTest.cpp +++ b/src/Plugins/ComplexCore/test/ReadTextDataArrayTest.cpp @@ -1,11 +1,11 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" -#include "ComplexCore/Filters/ImportTextFilter.hpp" +#include "ComplexCore/Filters/ReadTextDataArrayFilter.hpp" -#include "ComplexCore/Filters/ImportCSVDataFilter.hpp" +#include "ComplexCore/Filters/ReadCSVFileFilter.hpp" #include "complex/Common/TypesUtility.hpp" #include "complex/DataStructure/DataArray.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportCSVDataParameter.hpp" +#include "complex/Parameters/ReadCSVFileParameter.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include "complex/Utilities/DataArrayUtilities.hpp" #include "complex/Utilities/StringUtilities.hpp" @@ -87,18 +87,18 @@ void RunInvalidTest() { writeInvalidFile(inputFilePath, inputCharErrorVector, delimiter); - ImportTextFilter filter; + ReadTextDataArrayFilter filter; DataStructure dataStructure; AttributeMatrix* am = AttributeMatrix::Create(dataStructure, k_GroupAName, tupleDims); Arguments args; - args.insertOrAssign(ImportTextFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); - args.insertOrAssign(ImportTextFilter::k_ScalarTypeKey, std::make_any(GetNumericType())); - args.insertOrAssign(ImportTextFilter::k_NCompKey, std::make_any(1)); - args.insertOrAssign(ImportTextFilter::k_NSkipLinesKey, std::make_any(0)); - args.insertOrAssign(ImportTextFilter::k_DelimiterChoiceKey, std::make_any(4)); - args.insertOrAssign(ImportTextFilter::k_DataArrayKey, std::make_any(createdArrayPath)); - args.insertOrAssign(ImportTextFilter::k_DataFormat_Key, std::make_any("")); - args.insertOrAssign(ImportTextFilter::k_AdvancedOptions_Key, std::make_any(false)); + args.insertOrAssign(ReadTextDataArrayFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); + args.insertOrAssign(ReadTextDataArrayFilter::k_ScalarTypeKey, std::make_any(GetNumericType())); + args.insertOrAssign(ReadTextDataArrayFilter::k_NCompKey, std::make_any(1)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NSkipLinesKey, std::make_any(0)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DelimiterChoiceKey, std::make_any(4)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataArrayKey, std::make_any(createdArrayPath)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataFormat_Key, std::make_any("")); + args.insertOrAssign(ReadTextDataArrayFilter::k_AdvancedOptions_Key, std::make_any(false)); // Preflight the filter and check result auto preflightResult = filter.preflight(dataStructure, args); @@ -120,18 +120,18 @@ void RunInvalidTest() tupleDims = {1}; - ImportTextFilter filter; + ReadTextDataArrayFilter filter; DataStructure dataStructure; AttributeMatrix* am = AttributeMatrix::Create(dataStructure, k_GroupAName, tupleDims); Arguments args; - args.insertOrAssign(ImportTextFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); - args.insertOrAssign(ImportTextFilter::k_ScalarTypeKey, std::make_any(GetNumericType())); - args.insertOrAssign(ImportTextFilter::k_NCompKey, std::make_any(1)); - args.insertOrAssign(ImportTextFilter::k_NSkipLinesKey, std::make_any(0)); - args.insertOrAssign(ImportTextFilter::k_DelimiterChoiceKey, std::make_any(4)); - args.insertOrAssign(ImportTextFilter::k_DataArrayKey, std::make_any(createdArrayPath)); - args.insertOrAssign(ImportTextFilter::k_DataFormat_Key, std::make_any("")); - args.insertOrAssign(ImportTextFilter::k_AdvancedOptions_Key, std::make_any(false)); + args.insertOrAssign(ReadTextDataArrayFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); + args.insertOrAssign(ReadTextDataArrayFilter::k_ScalarTypeKey, std::make_any(GetNumericType())); + args.insertOrAssign(ReadTextDataArrayFilter::k_NCompKey, std::make_any(1)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NSkipLinesKey, std::make_any(0)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DelimiterChoiceKey, std::make_any(4)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataArrayKey, std::make_any(createdArrayPath)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataFormat_Key, std::make_any("")); + args.insertOrAssign(ReadTextDataArrayFilter::k_AdvancedOptions_Key, std::make_any(false)); // Preflight the filter and check result auto preflightResult = filter.preflight(dataStructure, args); @@ -170,18 +170,18 @@ void RunInvalidTest() tupleDims = {1}; - ImportTextFilter filter; + ReadTextDataArrayFilter filter; DataStructure dataStructure; AttributeMatrix* am = AttributeMatrix::Create(dataStructure, k_GroupAName, tupleDims); Arguments args; - args.insertOrAssign(ImportTextFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); - args.insertOrAssign(ImportTextFilter::k_ScalarTypeKey, std::make_any(GetNumericType())); - args.insertOrAssign(ImportTextFilter::k_NCompKey, std::make_any(1)); - args.insertOrAssign(ImportTextFilter::k_NSkipLinesKey, std::make_any(0)); - args.insertOrAssign(ImportTextFilter::k_DelimiterChoiceKey, std::make_any(4)); - args.insertOrAssign(ImportTextFilter::k_DataArrayKey, std::make_any(createdArrayPath)); - args.insertOrAssign(ImportTextFilter::k_DataFormat_Key, std::make_any("")); - args.insertOrAssign(ImportTextFilter::k_AdvancedOptions_Key, std::make_any(false)); + args.insertOrAssign(ReadTextDataArrayFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); + args.insertOrAssign(ReadTextDataArrayFilter::k_ScalarTypeKey, std::make_any(GetNumericType())); + args.insertOrAssign(ReadTextDataArrayFilter::k_NCompKey, std::make_any(1)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NSkipLinesKey, std::make_any(0)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DelimiterChoiceKey, std::make_any(4)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataArrayKey, std::make_any(createdArrayPath)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataFormat_Key, std::make_any("")); + args.insertOrAssign(ReadTextDataArrayFilter::k_AdvancedOptions_Key, std::make_any(false)); // Preflight the filter and check result auto preflightResult = filter.preflight(dataStructure, args); @@ -210,16 +210,16 @@ void RunTest(char sep, int delimiter) int numberOfComponents = 1; int skipHeaderLines = 0; - ImportTextFilter filter; + ReadTextDataArrayFilter filter; Arguments args; - args.insertOrAssign(ImportTextFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); - args.insertOrAssign(ImportTextFilter::k_ScalarTypeKey, std::make_any(scalarType)); - args.insertOrAssign(ImportTextFilter::k_NCompKey, std::make_any(numberOfComponents)); - args.insertOrAssign(ImportTextFilter::k_NSkipLinesKey, std::make_any(skipHeaderLines)); - args.insertOrAssign(ImportTextFilter::k_DelimiterChoiceKey, std::make_any(delimiter)); - args.insertOrAssign(ImportTextFilter::k_DataArrayKey, std::make_any(createdAttributeArrayPath)); - args.insertOrAssign(ImportTextFilter::k_DataFormat_Key, std::make_any("")); - args.insertOrAssign(ImportTextFilter::k_AdvancedOptions_Key, std::make_any(false)); + args.insertOrAssign(ReadTextDataArrayFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); + args.insertOrAssign(ReadTextDataArrayFilter::k_ScalarTypeKey, std::make_any(scalarType)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NCompKey, std::make_any(numberOfComponents)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NSkipLinesKey, std::make_any(skipHeaderLines)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DelimiterChoiceKey, std::make_any(delimiter)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataArrayKey, std::make_any(createdAttributeArrayPath)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataFormat_Key, std::make_any("")); + args.insertOrAssign(ReadTextDataArrayFilter::k_AdvancedOptions_Key, std::make_any(false)); // Preflight the filter and check result auto preflightResult = filter.preflight(dataStructure, args); @@ -243,7 +243,7 @@ void RunTest(char sep, int delimiter) } } -TEST_CASE("ComplexCore::ImportTextFilter: Valid filter execution", "[ComplexCore][ImportTextFilter]") +TEST_CASE("ComplexCore::ReadTextDataArrayFilter: Valid filter execution", "[ComplexCore][ReadTextDataArrayFilter]") { RunTest(',', 0); RunTest(',', 0); @@ -301,7 +301,7 @@ TEST_CASE("ComplexCore::ImportTextFilter: Valid filter execution", "[ComplexCore RunTest('\t', 4); } -TEST_CASE("ComplexCore::ImportTextFilter: Invalid filter execution", "[ComplexCore][ImportTextFilter]") +TEST_CASE("ComplexCore::ReadTextDataArrayFilter: Invalid filter execution", "[ComplexCore][ReadTextDataArrayFilter]") { // Reading alphabetical/special characters, and min/max overflow RunInvalidTest(); @@ -320,18 +320,18 @@ TEST_CASE("ComplexCore::ImportTextFilter: Invalid filter execution", "[ComplexCo // DataGroup parent but no tuple dimensions set { - ImportTextFilter filter; + ReadTextDataArrayFilter filter; DataStructure dataStructure; DataGroup* dataGroup = DataGroup::Create(dataStructure, k_GroupAName); Arguments args; - args.insertOrAssign(ImportTextFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); - args.insertOrAssign(ImportTextFilter::k_ScalarTypeKey, std::make_any(NumericType::int32)); - args.insertOrAssign(ImportTextFilter::k_NCompKey, std::make_any(1)); - args.insertOrAssign(ImportTextFilter::k_NSkipLinesKey, std::make_any(0)); - args.insertOrAssign(ImportTextFilter::k_DelimiterChoiceKey, std::make_any(4)); - args.insertOrAssign(ImportTextFilter::k_DataArrayKey, std::make_any(createdArrayPath)); - args.insertOrAssign(ImportTextFilter::k_DataFormat_Key, std::make_any("")); - args.insertOrAssign(ImportTextFilter::k_AdvancedOptions_Key, std::make_any(false)); + args.insertOrAssign(ReadTextDataArrayFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); + args.insertOrAssign(ReadTextDataArrayFilter::k_ScalarTypeKey, std::make_any(NumericType::int32)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NCompKey, std::make_any(1)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NSkipLinesKey, std::make_any(0)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DelimiterChoiceKey, std::make_any(4)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataArrayKey, std::make_any(createdArrayPath)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataFormat_Key, std::make_any("")); + args.insertOrAssign(ReadTextDataArrayFilter::k_AdvancedOptions_Key, std::make_any(false)); // Preflight the filter and check result auto preflightResult = filter.preflight(dataStructure, args); @@ -344,19 +344,19 @@ TEST_CASE("ComplexCore::ImportTextFilter: Invalid filter execution", "[ComplexCo // DataGroup parent with tuple dimension of 0 { - ImportTextFilter filter; + ReadTextDataArrayFilter filter; DataStructure dataStructure; DataGroup* dataGroup = DataGroup::Create(dataStructure, k_GroupAName); Arguments args; - args.insertOrAssign(ImportTextFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); - args.insertOrAssign(ImportTextFilter::k_ScalarTypeKey, std::make_any(NumericType::int32)); - args.insertOrAssign(ImportTextFilter::k_NCompKey, std::make_any(1)); - args.insertOrAssign(ImportTextFilter::k_NSkipLinesKey, std::make_any(0)); - args.insertOrAssign(ImportTextFilter::k_DelimiterChoiceKey, std::make_any(4)); - args.insertOrAssign(ImportTextFilter::k_DataArrayKey, std::make_any(createdArrayPath)); - args.insertOrAssign(ImportTextFilter::k_DataFormat_Key, std::make_any("")); - args.insertOrAssign(ImportTextFilter::k_AdvancedOptions_Key, std::make_any(false)); - args.insertOrAssign(ImportTextFilter::k_NTuplesKey, std::make_any(DynamicTableInfo::TableDataType{{0}})); + args.insertOrAssign(ReadTextDataArrayFilter::k_InputFileKey, std::make_any(fs::path(inputFilePath))); + args.insertOrAssign(ReadTextDataArrayFilter::k_ScalarTypeKey, std::make_any(NumericType::int32)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NCompKey, std::make_any(1)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NSkipLinesKey, std::make_any(0)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DelimiterChoiceKey, std::make_any(4)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataArrayKey, std::make_any(createdArrayPath)); + args.insertOrAssign(ReadTextDataArrayFilter::k_DataFormat_Key, std::make_any("")); + args.insertOrAssign(ReadTextDataArrayFilter::k_AdvancedOptions_Key, std::make_any(false)); + args.insertOrAssign(ReadTextDataArrayFilter::k_NTuplesKey, std::make_any(DynamicTableInfo::TableDataType{{0}})); // Preflight the filter and check result auto preflightResult = filter.preflight(dataStructure, args); diff --git a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp index 66e5f8a904..78de6fea30 100644 --- a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp +++ b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp @@ -68,13 +68,13 @@ #include #include #include -#include #include #include #include #include #include #include +#include #include #include #include @@ -428,28 +428,24 @@ PYBIND11_MODULE(complex, mod) arrayThresholdSet.def_property("thresholds", &ArrayThresholdSet::getArrayThresholds, &ArrayThresholdSet::setArrayThresholds); arrayThresholdSet.def("__repr__", [](const ArrayThresholdSet& self) { return "ArrayThresholdSet()"; }); - py::class_ csvWizardData(mod, "CSVWizardData"); - - py::enum_ csvHeaderMode(csvWizardData, "HeaderMode"); - csvHeaderMode.value("Line", CSVWizardData::HeaderMode::LINE); - csvHeaderMode.value("Custom", CSVWizardData::HeaderMode::CUSTOM); - csvHeaderMode.value("Defaults", CSVWizardData::HeaderMode::DEFAULTS); - - csvWizardData.def(py::init<>()); - csvWizardData.def_readwrite("input_file_path", &CSVWizardData::inputFilePath); - csvWizardData.def_readwrite("data_headers", &CSVWizardData::dataHeaders); - csvWizardData.def_readwrite("begin_index", &CSVWizardData::beginIndex); - csvWizardData.def_readwrite("number_of_lines", &CSVWizardData::numberOfLines); - csvWizardData.def_readwrite("data_types", &CSVWizardData::dataTypes); - csvWizardData.def_readwrite("delimiters", &CSVWizardData::delimiters); - csvWizardData.def_readwrite("header_line", &CSVWizardData::headerLine); - csvWizardData.def_readwrite("header_mode", &CSVWizardData::headerMode); - csvWizardData.def_readwrite("tab_as_delimiter", &CSVWizardData::tabAsDelimiter); - csvWizardData.def_readwrite("semicolon_as_delimiter", &CSVWizardData::semicolonAsDelimiter); - csvWizardData.def_readwrite("comma_as_delimiter", &CSVWizardData::commaAsDelimiter); - csvWizardData.def_readwrite("space_as_delimiter", &CSVWizardData::spaceAsDelimiter); - csvWizardData.def_readwrite("consecutive_delimiters", &CSVWizardData::consecutiveDelimiters); - csvWizardData.def("__repr__", [](const CSVWizardData& self) { return "CSVWizardData()"; }); + py::class_ readCSVData(mod, "ReadCSVData"); + + py::enum_ csvHeaderMode(readCSVData, "HeaderMode"); + csvHeaderMode.value("Line", ReadCSVData::HeaderMode::LINE); + csvHeaderMode.value("Custom", ReadCSVData::HeaderMode::CUSTOM); + + readCSVData.def(py::init<>()); + readCSVData.def_readwrite("input_file_path", &ReadCSVData::inputFilePath); + readCSVData.def_readwrite("custom_headers", &ReadCSVData::customHeaders); + readCSVData.def_readwrite("start_import_row", &ReadCSVData::startImportRow); + readCSVData.def_readwrite("column_data_types", &ReadCSVData::dataTypes); + readCSVData.def_readwrite("skipped_array_mask", &ReadCSVData::skippedArrayMask); + readCSVData.def_readwrite("headers_line", &ReadCSVData::headersLine); + readCSVData.def_readwrite("header_mode", &ReadCSVData::headerMode); + readCSVData.def_readwrite("tuple_dims", &ReadCSVData::tupleDims); + readCSVData.def_readwrite("delimiters", &ReadCSVData::delimiters); + readCSVData.def_readwrite("consecutive_delimiters", &ReadCSVData::consecutiveDelimiters); + readCSVData.def("__repr__", [](const ReadCSVData& self) { return "ReadCSVData()"; }); py::class_> abstractPlugin(mod, "AbstractPlugin"); py::class_> pythonPlugin(mod, "PythonPlugin"); @@ -716,7 +712,7 @@ PYBIND11_MODULE(complex, mod) auto generateColorTableParameter = COMPLEX_PY_BIND_PARAMETER(mod, GenerateColorTableParameter); auto generatedFileListParameter = COMPLEX_PY_BIND_PARAMETER(mod, GeneratedFileListParameter); auto geometrySelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, GeometrySelectionParameter); - auto importCSVDataParameter = COMPLEX_PY_BIND_PARAMETER(mod, ImportCSVDataParameter); + auto importTextDataParameter = COMPLEX_PY_BIND_PARAMETER(mod, ReadCSVFileParameter); auto importHDF5DatasetParameter = COMPLEX_PY_BIND_PARAMETER(mod, ImportHDF5DatasetParameter); auto multiArraySelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, MultiArraySelectionParameter); auto multiPathSelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, MultiPathSelectionParameter); @@ -876,7 +872,7 @@ PYBIND11_MODULE(complex, mod) geometrySelectionParameter.def(py::init(), "name"_a, "human_name"_a, "help_text"_a, "default_value"_a, "allowed_types"_a); - BindParameterConstructor(importCSVDataParameter); + BindParameterConstructor(importTextDataParameter); BindParameterConstructor(importHDF5DatasetParameter); @@ -1057,7 +1053,7 @@ PYBIND11_MODULE(complex, mod) internals->addConversion(); internals->addConversion(); internals->addConversion(); - internals->addConversion(); + internals->addConversion(); internals->addConversion(); internals->addConversion(); internals->addConversion(); diff --git a/src/Plugins/OrientationAnalysis/test/AlignSectionsMisorientationTest.cpp b/src/Plugins/OrientationAnalysis/test/AlignSectionsMisorientationTest.cpp index 1e793c67f0..9f0d733c0a 100644 --- a/src/Plugins/OrientationAnalysis/test/AlignSectionsMisorientationTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/AlignSectionsMisorientationTest.cpp @@ -79,7 +79,7 @@ TEST_CASE("OrientationAnalysis::AlignSectionsMisorientation Small IN100 Pipeline // Compare the output of the shifts file with the exemplar file - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); Arguments args; @@ -150,7 +150,7 @@ TEST_CASE("OrientationAnalysis::AlignSectionsMisorientation Small IN100 Pipeline // Compare the output of the shifts file with the exemplar file - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); Arguments args; diff --git a/src/Plugins/OrientationAnalysis/test/AlignSectionsMutualInformationTest.cpp b/src/Plugins/OrientationAnalysis/test/AlignSectionsMutualInformationTest.cpp index 699e0a4014..c3a263df7c 100644 --- a/src/Plugins/OrientationAnalysis/test/AlignSectionsMutualInformationTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/AlignSectionsMutualInformationTest.cpp @@ -85,7 +85,7 @@ TEST_CASE("OrientationAnalysis::AlignSectionsMutualInformationFilter: Valid filt // Compare the output of the shifts file with the exemplar file - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); Arguments args; @@ -120,7 +120,7 @@ TEST_CASE("OrientationAnalysis::AlignSectionsMutualInformationFilter: Valid filt // Compare the output of the shifts file with the exemplar file - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); Arguments args; diff --git a/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp b/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp index 05d8013a96..26798af9b3 100644 --- a/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp @@ -11,7 +11,7 @@ #include "complex/Parameters/GeometrySelectionParameter.hpp" #include "complex/Parameters/NumberParameter.hpp" #include "complex/Parameters/VectorParameter.hpp" -#include "complex/Parameters/util/CSVWizardData.hpp" +#include "complex/Parameters/util/ReadCSVData.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include @@ -24,7 +24,7 @@ namespace { inline constexpr StringLiteral k_FaceEnsembleDataPath("FaceEnsembleData [NX]"); -inline constexpr StringLiteral k_WizardData_Key = "wizard_data"; +inline constexpr StringLiteral k_ReadCSVData_Key = "read_csv_data"; inline constexpr StringLiteral k_TupleDims_Key = "tuple_dimensions"; inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; @@ -94,22 +94,21 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_3_1.dat", unit_test::k_TestFilesDir); - data.dataHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; + data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; - data.beginIndex = 2; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false}; + data.startImportRow = 2; data.delimiters = {' '}; - data.numberOfLines = 3752; + data.tupleDims = {3751}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(3751)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -120,17 +119,16 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); - data.dataHeaders = {k_GMT1, k_GMT2, k_GMT3}; + data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; - data.beginIndex = 2; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false}; + data.startImportRow = 2; data.delimiters = {' '}; - data.numberOfLines = 3752; + data.tupleDims = {3751}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(3751)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); @@ -182,22 +180,21 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_9_1.dat", unit_test::k_TestFilesDir); - data.dataHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; + data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; - data.beginIndex = 2; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false}; + data.startImportRow = 2; data.delimiters = {' '}; - data.numberOfLines = 3752; + data.tupleDims = {3751}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(3751)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -208,17 +205,16 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); - data.dataHeaders = {k_GMT1, k_GMT2, k_GMT3}; + data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; - data.beginIndex = 2; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false}; + data.startImportRow = 2; data.delimiters = {' '}; - data.numberOfLines = 3752; + data.tupleDims = {3751}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(3751)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); @@ -270,22 +266,21 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_11_1.dat", unit_test::k_TestFilesDir); - data.dataHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; + data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; - data.beginIndex = 2; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false}; + data.startImportRow = 2; data.delimiters = {' '}; - data.numberOfLines = 3752; + data.tupleDims = {3751}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(3751)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -296,17 +291,16 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); - data.dataHeaders = {k_GMT1, k_GMT2, k_GMT3}; + data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; - data.beginIndex = 2; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false}; + data.startImportRow = 2; data.delimiters = {' '}; - data.numberOfLines = 3752; + data.tupleDims = {3751}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(3751)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); diff --git a/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp b/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp index 7de88be315..a5c274507b 100644 --- a/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp @@ -6,7 +6,7 @@ #include "complex/Parameters/DynamicTableParameter.hpp" #include "complex/Parameters/FileSystemPathParameter.hpp" -#include "complex/Parameters/util/CSVWizardData.hpp" +#include "complex/Parameters/util/ReadCSVData.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include @@ -17,7 +17,7 @@ using namespace complex::UnitTest; namespace { -inline constexpr StringLiteral k_WizardData_Key = "wizard_data"; +inline constexpr StringLiteral k_ReadCSVData_Key = "read_csv_data"; inline constexpr StringLiteral k_TupleDims_Key = "tuple_dimensions"; inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; @@ -84,24 +84,23 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu } // Compare the Output triangles files - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); // read in exemplar { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/6_6_Small_IN100_GBCD_Triangles.ph", unit_test::k_TestFilesDir); - data.dataHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; + data.customHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float64, DataType::float64, DataType::float64, DataType::float64}; - data.beginIndex = 6; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false, false, false, false, false, false, false, false}; + data.startImportRow = 6; data.delimiters = {' '}; - data.numberOfLines = 636479; + data.tupleDims = {636474}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(636474)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(exemplarResultsGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(DataPath{})); @@ -113,18 +112,17 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu // read in generated { Arguments args; - CSVWizardData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); - data.dataHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; + data.customHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float64, DataType::float64, DataType::float64, DataType::float64}; - data.beginIndex = 5; - data.spaceAsDelimiter = true; + data.skippedArrayMask = {false, false, false, false, false, false, false, false, false, false}; + data.startImportRow = 5; data.delimiters = {' '}; - data.numberOfLines = 636478; + data.tupleDims = {636474}; - args.insertOrAssign(k_WizardData_Key, std::make_any(data)); - args.insertOrAssign(k_TupleDims_Key, std::make_any(DynamicTableInfo::TableDataType{{static_cast(636474)}})); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(generatedResultsGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(generatedResultsGroupPath)); diff --git a/src/Plugins/OrientationAnalysis/test/FindAvgOrientationsTest.cpp b/src/Plugins/OrientationAnalysis/test/FindAvgOrientationsTest.cpp index 7ccfc7815f..d92bfedd7b 100644 --- a/src/Plugins/OrientationAnalysis/test/FindAvgOrientationsTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/FindAvgOrientationsTest.cpp @@ -58,8 +58,8 @@ static constexpr StringLiteral k_DelimiterChoiceKey = "delimiter_choice"; static constexpr StringLiteral k_DataArrayKey = "output_data_array"; } // namespace FindAvgOrientationsTest -void runImportTextFilter(const std::string k_InputFileName, complex::NumericType k_NumericType, const uint64 k_NumTuples, const uint64 k_NumComponents, const DataPath k_InputFileDataPath, - DataStructure& dataStructure) +void runReadTextDataArrayFilter(const std::string k_InputFileName, complex::NumericType k_NumericType, const uint64 k_NumTuples, const uint64 k_NumComponents, const DataPath k_InputFileDataPath, + DataStructure& dataStructure) { auto* filterList = Application::Instance()->getFilterList(); @@ -73,7 +73,7 @@ void runImportTextFilter(const std::string k_InputFileName, complex::NumericType args.insertOrAssign(FindAvgOrientationsTest::k_DelimiterChoiceKey, std::make_any(0)); args.insertOrAssign(FindAvgOrientationsTest::k_DataArrayKey, std::make_any(k_InputFileDataPath)); - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); // Preflight the filter and check result @@ -126,11 +126,11 @@ TEST_CASE("OrientationAnalysis::FindAvgOrientations", "[OrientationAnalysis][Fin (*crystalStructuresPtr)[1] = 1; // Cubic Laue Class // Run the "Import Text" Filter to import the data for the FeatureIds, Phases, Quats and Exemplar AvgQuats and AvgEulers - runImportTextFilter(k_Phases, NumericType::int32, k_NumTuples, 1, k_PhasesDataPath, dataStructure); - runImportTextFilter(k_Quats, NumericType::float32, k_NumTuples, 4, k_QuatsDataPath, dataStructure); - runImportTextFilter(k_FeatureIds, NumericType::int32, k_NumTuples, 1, k_FeatureIdsDataPath, dataStructure); - runImportTextFilter(k_AvgQuats, NumericType::float32, k_FeatureNumTuples, 4, k_ExemplarAvgQuatsDataPath, dataStructure); - runImportTextFilter(k_AvgEulers, NumericType::float32, k_FeatureNumTuples, 3, k_ExemplarAvgEulersDataPath, dataStructure); + runReadTextDataArrayFilter(k_Phases, NumericType::int32, k_NumTuples, 1, k_PhasesDataPath, dataStructure); + runReadTextDataArrayFilter(k_Quats, NumericType::float32, k_NumTuples, 4, k_QuatsDataPath, dataStructure); + runReadTextDataArrayFilter(k_FeatureIds, NumericType::int32, k_NumTuples, 1, k_FeatureIdsDataPath, dataStructure); + runReadTextDataArrayFilter(k_AvgQuats, NumericType::float32, k_FeatureNumTuples, 4, k_ExemplarAvgQuatsDataPath, dataStructure); + runReadTextDataArrayFilter(k_AvgEulers, NumericType::float32, k_FeatureNumTuples, 3, k_ExemplarAvgEulersDataPath, dataStructure); // Create the cell feature attribute matrix where the output arrays will be stored const Int32Array& featureIds = dataStructure.getDataRefAs(k_FeatureIdsDataPath); diff --git a/src/Plugins/OrientationAnalysis/test/FindGBCDMetricBasedTest.cpp b/src/Plugins/OrientationAnalysis/test/FindGBCDMetricBasedTest.cpp index 9c20b4ecb3..8869edc1f3 100644 --- a/src/Plugins/OrientationAnalysis/test/FindGBCDMetricBasedTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/FindGBCDMetricBasedTest.cpp @@ -102,7 +102,7 @@ TEST_CASE("OrientationAnalysis::FindGBCDMetricBasedFilter: Valid Filter Executio static constexpr StringLiteral k_NSkipLinesKey = "n_skip_lines"; static constexpr StringLiteral k_DelimiterChoiceKey = "delimiter_choice"; static constexpr StringLiteral k_DataArrayKey = "output_data_array"; - auto filter = filterListPtr->createFilter(k_ImportTextFilterHandle); + auto filter = filterListPtr->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); // exemplar distribution { diff --git a/src/Plugins/OrientationAnalysis/test/FindGBPDMetricBasedTest.cpp b/src/Plugins/OrientationAnalysis/test/FindGBPDMetricBasedTest.cpp index d179cd49ec..9876ff2114 100644 --- a/src/Plugins/OrientationAnalysis/test/FindGBPDMetricBasedTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/FindGBPDMetricBasedTest.cpp @@ -99,7 +99,7 @@ TEST_CASE("OrientationAnalysis::FindGBPDMetricBasedFilter: Valid Filter Executio static constexpr StringLiteral k_NSkipLinesKey = "n_skip_lines"; static constexpr StringLiteral k_DelimiterChoiceKey = "delimiter_choice"; static constexpr StringLiteral k_DataArrayKey = "output_data_array"; - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); // exemplar distribution { diff --git a/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp b/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp index d32c59e6bb..6a94b2b96c 100644 --- a/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp +++ b/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp @@ -40,8 +40,8 @@ inline const std::string MaterialName("MaterialName"); namespace complex { // Make sure we can instantiate the Import Text Filter -const Uuid k_ImportTextFilterId = *Uuid::FromString("25f7df3e-ca3e-4634-adda-732c0e56efd4"); -const FilterHandle k_ImportTextFilterHandle(k_ImportTextFilterId, k_ComplexCorePluginId); +const Uuid k_ReadTextDataArrayFilterId = *Uuid::FromString("25f7df3e-ca3e-4634-adda-732c0e56efd4"); +const FilterHandle k_ReadTextDataArrayFilterHandle(k_ReadTextDataArrayFilterId, k_ComplexCorePluginId); // Make sure we can instantiate the Read DREAM3D Data File const Uuid k_ImportDream3dFilterId = *Uuid::FromString("0dbd31c7-19e0-4077-83ef-f4a6459a0e2d"); const FilterHandle k_ImportDream3dFilterHandle(k_ImportDream3dFilterId, k_ComplexCorePluginId); @@ -60,8 +60,8 @@ const FilterHandle k_RemoveMinimumSizeFeaturesFilterHandle(k_RemoveMinimumSizeFe // Make sure we can instantiate the CalculateFeatureSizesFilter const Uuid k_CalculateFeatureSizesFilterId = *Uuid::FromString("c666ee17-ca58-4969-80d0-819986c72485"); const FilterHandle k_CalculateFeatureSizesFilterHandle(k_CalculateFeatureSizesFilterId, k_ComplexCorePluginId); -const Uuid k_ImportCSVDataFilterId = *Uuid::FromString("373be1f8-31cf-49f6-aa5d-e356f4f3f261"); -const FilterHandle k_ImportCSVDataFilterHandle(k_ImportCSVDataFilterId, k_ComplexCorePluginId); +const Uuid k_ReadCSVFileFilterId = *Uuid::FromString("373be1f8-31cf-49f6-aa5d-e356f4f3f261"); +const FilterHandle k_ReadCSVFileFilterHandle(k_ReadCSVFileFilterId, k_ComplexCorePluginId); const Uuid k_OrientationAnalysisPluginId = *Uuid::FromString("c09cf01b-014e-5adb-84eb-ea76fc79eeb1"); // Make sure we can instantiate the Convert Orientations diff --git a/src/Plugins/OrientationAnalysis/test/RotateEulerRefFrameTest.cpp b/src/Plugins/OrientationAnalysis/test/RotateEulerRefFrameTest.cpp index 1fb1727cd3..2d2b0724a9 100644 --- a/src/Plugins/OrientationAnalysis/test/RotateEulerRefFrameTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/RotateEulerRefFrameTest.cpp @@ -66,7 +66,7 @@ TEST_CASE("OrientationAnalysis::RotateEulerRefFrame", "[OrientationAnalysis]") args.insertOrAssign(k_DelimiterChoiceKey, std::make_any(0)); args.insertOrAssign(k_DataArrayKey, std::make_any(k_EulerAnglesDataPath)); - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); // Preflight the filter and check result @@ -88,7 +88,7 @@ TEST_CASE("OrientationAnalysis::RotateEulerRefFrame", "[OrientationAnalysis]") args.insertOrAssign(k_DelimiterChoiceKey, std::make_any(0)); args.insertOrAssign(k_DataArrayKey, std::make_any(k_EulersRotatedDataPath)); - auto filter = filterList->createFilter(k_ImportTextFilterHandle); + auto filter = filterList->createFilter(k_ReadTextDataArrayFilterHandle); REQUIRE(nullptr != filter); // Preflight the filter and check result diff --git a/src/complex/Common/TypesUtility.hpp b/src/complex/Common/TypesUtility.hpp index 84fdbc516b..74d4cf7b9e 100644 --- a/src/complex/Common/TypesUtility.hpp +++ b/src/complex/Common/TypesUtility.hpp @@ -260,6 +260,63 @@ inline const std::vector& GetAllDataTypesAsStrings() return dataTypes; } +inline constexpr StringLiteral DataTypeToHumanString(DataType dataType) +{ + switch(dataType) + { + case DataType::int8: { + return "signed int 8 bit"; + } + case DataType::uint8: { + return "unsigned int 8 bit"; + } + case DataType::int16: { + return "signed int 16 bit"; + } + case DataType::uint16: { + return "unsigned int 16 bit"; + } + case DataType::int32: { + return "signed int 32 bit"; + } + case DataType::uint32: { + return "unsigned int 32 bit"; + } + case DataType::int64: { + return "signed int 64 bit"; + } + case DataType::uint64: { + return "unsigned int 64 bit"; + } + case DataType::float32: { + return "float 32"; + } + case DataType::float64: { + return "double 64"; + } + case DataType::boolean: { + return "boolean"; + } + default: + throw std::runtime_error("complex::DataTypeToString: Unknown DataType"); + } +} + +/** + * + * @param humanReadable Strings that would be good for a User interface + * @return + */ +inline const std::vector& GetAllDataTypesAsHumanStrings() +{ + static const std::vector dataTypes = { + DataTypeToHumanString(complex::DataType::int8), DataTypeToHumanString(complex::DataType::uint8), DataTypeToHumanString(complex::DataType::int16), + DataTypeToHumanString(complex::DataType::uint16), DataTypeToHumanString(complex::DataType::int32), DataTypeToHumanString(complex::DataType::uint32), + DataTypeToHumanString(complex::DataType::int64), DataTypeToHumanString(complex::DataType::uint64), DataTypeToHumanString(complex::DataType::float32), + DataTypeToHumanString(complex::DataType::float64), DataTypeToHumanString(complex::DataType::boolean)}; + return dataTypes; +} + /** * @brief Returns a DataType for the passed in string representation * @param dataTypeString diff --git a/src/complex/Parameters/ImportCSVDataParameter.cpp b/src/complex/Parameters/ReadCSVFileParameter.cpp similarity index 71% rename from src/complex/Parameters/ImportCSVDataParameter.cpp rename to src/complex/Parameters/ReadCSVFileParameter.cpp index 6449fb8b95..2373226b7e 100644 --- a/src/complex/Parameters/ImportCSVDataParameter.cpp +++ b/src/complex/Parameters/ReadCSVFileParameter.cpp @@ -28,59 +28,59 @@ * * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -#include "ImportCSVDataParameter.hpp" +#include "ReadCSVFileParameter.hpp" namespace complex { // ----------------------------------------------------------------------------- -ImportCSVDataParameter::ImportCSVDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue) +ReadCSVFileParameter::ReadCSVFileParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue) : ValueParameter(name, humanName, helpText) , m_DefaultValue(defaultValue) { } // ----------------------------------------------------------------------------- -Uuid ImportCSVDataParameter::uuid() const +Uuid ReadCSVFileParameter::uuid() const { - return ParameterTraits::uuid; + return ParameterTraits::uuid; } // ----------------------------------------------------------------------------- -IParameter::AcceptedTypes ImportCSVDataParameter::acceptedTypes() const +IParameter::AcceptedTypes ReadCSVFileParameter::acceptedTypes() const { return {typeid(ValueType)}; } // ----------------------------------------------------------------------------- -nlohmann::json ImportCSVDataParameter::toJson(const std::any& value) const +nlohmann::json ReadCSVFileParameter::toJson(const std::any& value) const { - const auto& CSVWizardData = GetAnyRef(value); - nlohmann::json json = CSVWizardData.writeJson(); + const auto& ReadCSVData = GetAnyRef(value); + nlohmann::json json = ReadCSVData.writeJson(); return json; } // ----------------------------------------------------------------------------- -Result ImportCSVDataParameter::fromJson(const nlohmann::json& json) const +Result ReadCSVFileParameter::fromJson(const nlohmann::json& json) const { - return {ConvertResultTo(CSVWizardData::ReadJson(json))}; + return {ConvertResultTo(ReadCSVData::ReadJson(json))}; } // ----------------------------------------------------------------------------- -IParameter::UniquePointer ImportCSVDataParameter::clone() const +IParameter::UniquePointer ReadCSVFileParameter::clone() const { - return std::make_unique(name(), humanName(), helpText(), m_DefaultValue); + return std::make_unique(name(), humanName(), helpText(), m_DefaultValue); } // ----------------------------------------------------------------------------- -std::any ImportCSVDataParameter::defaultValue() const +std::any ReadCSVFileParameter::defaultValue() const { return m_DefaultValue; } // ----------------------------------------------------------------------------- -Result<> ImportCSVDataParameter::validate(const std::any& value) const +Result<> ReadCSVFileParameter::validate(const std::any& value) const { - [[maybe_unused]] auto data = std::any_cast(value); + [[maybe_unused]] auto data = std::any_cast(value); return {}; } } // namespace complex diff --git a/src/complex/Parameters/ImportCSVDataParameter.hpp b/src/complex/Parameters/ReadCSVFileParameter.hpp similarity index 79% rename from src/complex/Parameters/ImportCSVDataParameter.hpp rename to src/complex/Parameters/ReadCSVFileParameter.hpp index f9abbcd1b1..6c98ece45d 100644 --- a/src/complex/Parameters/ImportCSVDataParameter.hpp +++ b/src/complex/Parameters/ReadCSVFileParameter.hpp @@ -32,25 +32,25 @@ #include "complex/Filter/ParameterTraits.hpp" #include "complex/Filter/ValueParameter.hpp" -#include "complex/Parameters/util/CSVWizardData.hpp" +#include "complex/Parameters/util/ReadCSVData.hpp" #include "complex/complex_export.hpp" namespace complex { -class COMPLEX_EXPORT ImportCSVDataParameter : public ValueParameter +class COMPLEX_EXPORT ReadCSVFileParameter : public ValueParameter { public: - using ValueType = CSVWizardData; + using ValueType = ReadCSVData; - ImportCSVDataParameter() = delete; - ImportCSVDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue); - ~ImportCSVDataParameter() override = default; + ReadCSVFileParameter() = delete; + ReadCSVFileParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue); + ~ReadCSVFileParameter() override = default; - ImportCSVDataParameter(const ImportCSVDataParameter&) = delete; - ImportCSVDataParameter(ImportCSVDataParameter&&) noexcept = delete; + ReadCSVFileParameter(const ReadCSVFileParameter&) = delete; + ReadCSVFileParameter(ReadCSVFileParameter&&) noexcept = delete; - ImportCSVDataParameter& operator=(const ImportCSVDataParameter&) = delete; - ImportCSVDataParameter& operator=(ImportCSVDataParameter&&) noexcept = delete; + ReadCSVFileParameter& operator=(const ReadCSVFileParameter&) = delete; + ReadCSVFileParameter& operator=(ReadCSVFileParameter&&) noexcept = delete; /** * @brief Returns the parameter's uuid. @@ -103,4 +103,4 @@ class COMPLEX_EXPORT ImportCSVDataParameter : public ValueParameter }; } // namespace complex -COMPLEX_DEF_PARAMETER_TRAITS(complex::ImportCSVDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45"); +COMPLEX_DEF_PARAMETER_TRAITS(complex::ReadCSVFileParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45"); diff --git a/src/complex/Parameters/util/CSVWizardData.cpp b/src/complex/Parameters/util/CSVWizardData.cpp deleted file mode 100644 index 22f81b5109..0000000000 --- a/src/complex/Parameters/util/CSVWizardData.cpp +++ /dev/null @@ -1,273 +0,0 @@ -/* ============================================================================ - * Copyright (c) 2022-2022 BlueQuartz Software, LLC - * - * Redistribution and use in source and binary forms, with or without modification, - * are permitted provided that the following conditions are met: - * - * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * - * Redistributions in binary form must reproduce the above copyright notice, this - * list of conditions and the following disclaimer in the documentation and/or - * other materials provided with the distribution. - * - * Neither the name of BlueQuartz Software, the US Air Force, nor the names of its - * contributors may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THEs - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - * - * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ - -#include "CSVWizardData.hpp" - -#include - -using namespace complex; - -namespace -{ -const std::string k_DataHeadersKey = "Data Headers"; -const std::string k_DataTypesKey = "Data Types"; -const std::string k_DelimitersKey = "Delimiters"; -const std::string k_TupleDimensionsKey = "Tuple Dimensions"; -const std::string k_InputFilePathKey = "Input File Path"; -const std::string k_BeginIndexKey = "Begin Index"; -const std::string k_NumberOfLinesKey = "Number of Lines"; -const std::string k_HeaderLineKey = "Header Line"; -const std::string k_HeaderModeKey = "Header Mode"; -const std::string k_TabAsDelimiterKey = "Tab As Delimiter"; -const std::string k_SemicolonAsDelimiterKey = "Semicolon As Delimiter"; -const std::string k_CommaAsDelimiterKey = "Comma As Delimiter"; -const std::string k_SpaceAsDelimiterKey = "Space As Delimiter"; -const std::string k_ConsecutiveDelimitersKey = "Consecutive Delimiters"; -} // namespace - -// ----------------------------------------------------------------------------- -nlohmann::json CSVWizardData::writeJson() const -{ - nlohmann::json json; - - nlohmann::json dHeaders; - for(const auto& header : dataHeaders) - { - dHeaders.push_back(header); - } - json[k_DataHeadersKey] = dHeaders; - - nlohmann::json dTypes; - for(const auto& dType : dataTypes) - { - if(!dType.has_value()) - { - dTypes.push_back(k_SkipDataTypeString); - } - else - { - dTypes.push_back(dType.value()); - } - } - json[k_DataTypesKey] = dTypes; - - nlohmann::json delimitersObj; - for(const auto& delimiter : delimiters) - { - delimitersObj.push_back(delimiter); - } - json[k_DelimitersKey] = delimitersObj; - - json[k_InputFilePathKey] = inputFilePath; - json[k_BeginIndexKey] = beginIndex; - json[k_NumberOfLinesKey] = numberOfLines; - json[k_HeaderLineKey] = headerLine; - json[k_HeaderModeKey] = headerMode; - json[k_TabAsDelimiterKey] = tabAsDelimiter; - json[k_SemicolonAsDelimiterKey] = semicolonAsDelimiter; - json[k_SpaceAsDelimiterKey] = spaceAsDelimiter; - json[k_CommaAsDelimiterKey] = commaAsDelimiter; - json[k_ConsecutiveDelimitersKey] = consecutiveDelimiters; - - return json; -} - -// ----------------------------------------------------------------------------- -Result CSVWizardData::ReadJson(const nlohmann::json& json) -{ - CSVWizardData data; - - if(!json.contains(k_DataHeadersKey)) - { - return MakeErrorResult(-100, fmt::format("CSVWizardData: Cannot find the Data Headers key \"{}\" in the CSVWizardData json object.", k_DataHeadersKey)); - } - - nlohmann::json dHeaders = json[k_DataHeadersKey]; - for(usize i = 0; i < dHeaders.size(); i++) - { - auto header = dHeaders[i]; - if(!header.is_string()) - { - return MakeErrorResult(-101, fmt::format("CSVWizardData: Data header at index {} is of type {} and is not a string.", std::to_string(i), header.type_name())); - } - - data.dataHeaders.push_back(header); - } - - if(!json.contains(k_DataTypesKey)) - { - return MakeErrorResult(-102, fmt::format("CSVWizardData: Cannot find the Data Types key \"{}\" in the CSVWizardData json object.", k_DataTypesKey)); - } - - nlohmann::json dTypes = json[k_DataTypesKey]; - for(usize i = 0; i < dTypes.size(); i++) - { - auto dType = dTypes[i]; - if(dType.is_string()) - { - if(dType != k_SkipDataTypeString) - { - return MakeErrorResult(-103, fmt::format("CSVWizardData: Data type at index {} is not a valid data type and is not marked as \"skipped\".", std::to_string(i))); - } - } - else if(!dType.is_number_integer()) - { - return MakeErrorResult(-104, fmt::format("CSVWizardData: Data type at index {} is of type {} and is not an integer.", std::to_string(i), dType.type_name())); - } - - if(dType.is_string()) - { - data.dataTypes.push_back({}); - } - else - { - data.dataTypes.push_back(dType); - } - } - - if(!json.contains(k_DelimitersKey)) - { - return MakeErrorResult(-105, fmt::format("CSVWizardData: Cannot find the Delimiters key \"{}\" in the CSVWizardData json object.", k_DelimitersKey)); - } - - nlohmann::json delimiters = json[k_DelimitersKey]; - for(usize i = 0; i < delimiters.size(); i++) - { - auto delimiterObj = delimiters[i]; - if(!delimiterObj.is_number_integer()) - { - return MakeErrorResult(-106, fmt::format("CSVWizardData: Delimiter at index {} is of type {} and is not an integer.", std::to_string(i), fmt::underlying(delimiterObj.type()))); - } - - data.delimiters.push_back(delimiterObj.get()); - } - - if(!json.contains(k_InputFilePathKey)) - { - return MakeErrorResult(-107, fmt::format("CSVWizardData: Cannot find the 'Input File Path' key \"{}\" in the CSVWizardData json object.", k_InputFilePathKey)); - } - else if(!json[k_InputFilePathKey].is_string()) - { - return MakeErrorResult(-108, fmt::format("CSVWizardData: 'Input File Path' value is of type {} and is not a string.", json[k_InputFilePathKey].type_name())); - } - data.inputFilePath = json[k_InputFilePathKey]; - - if(!json.contains(k_BeginIndexKey)) - { - return MakeErrorResult(-109, fmt::format("CSVWizardData: Cannot find the 'Begin Index' key \"{}\" in the CSVWizardData json object.", k_BeginIndexKey)); - } - else if(!json[k_BeginIndexKey].is_number_integer()) - { - return MakeErrorResult(-110, fmt::format("CSVWizardData: 'Begin Index' value is of type {} and is not an integer.", json[k_BeginIndexKey].type_name())); - } - data.beginIndex = json[k_BeginIndexKey]; - - if(!json.contains(k_NumberOfLinesKey)) - { - return MakeErrorResult(-111, fmt::format("CSVWizardData: Cannot find the 'Number of Lines' key \"{}\" in the CSVWizardData json object.", k_NumberOfLinesKey)); - } - else if(!json[k_NumberOfLinesKey].is_number_integer()) - { - return MakeErrorResult(-112, fmt::format("CSVWizardData: 'Number of Lines' value is of type {} and is not an integer.", json[k_NumberOfLinesKey].type_name())); - } - data.numberOfLines = json[k_NumberOfLinesKey]; - - if(!json.contains(k_HeaderLineKey)) - { - return MakeErrorResult(-113, fmt::format("CSVWizardData: Cannot find the 'Header Line' key \"{}\" in the CSVWizardData json object.", k_HeaderLineKey)); - } - else if(!json[k_HeaderLineKey].is_number_integer()) - { - return MakeErrorResult(-114, fmt::format("CSVWizardData: 'Header Line' value is of type {} and is not an integer.", json[k_HeaderLineKey].type_name())); - } - data.headerLine = json[k_HeaderLineKey]; - - if(!json.contains(k_HeaderModeKey)) - { - return MakeErrorResult(-115, fmt::format("CSVWizardData: Cannot find the 'Header Mode' key \"{}\" in the CSVWizardData json object.", k_HeaderModeKey)); - } - else if(!json[k_HeaderModeKey].is_number_integer()) - { - return MakeErrorResult(-116, fmt::format("CSVWizardData: 'Header Mode' value is of type {} and is not an integer.", json[k_HeaderModeKey].type_name())); - } - data.headerMode = json[k_HeaderModeKey]; - - if(!json.contains(k_TabAsDelimiterKey)) - { - return MakeErrorResult(-117, fmt::format("CSVWizardData: Cannot find the 'Tab As Delimiter' key \"{}\" in the CSVWizardData json object.", k_TabAsDelimiterKey)); - } - else if(!json[k_TabAsDelimiterKey].is_boolean()) - { - return MakeErrorResult(-118, fmt::format("CSVWizardData: 'Tab As Delimiter' value is of type {} and is not a boolean.", json[k_TabAsDelimiterKey].type_name())); - } - data.tabAsDelimiter = json[k_TabAsDelimiterKey]; - - if(!json.contains(k_SemicolonAsDelimiterKey)) - { - return MakeErrorResult(-119, fmt::format("CSVWizardData: Cannot find the 'Semicolon As Delimiter' key \"{}\" in the CSVWizardData json object.", k_SemicolonAsDelimiterKey)); - } - else if(!json[k_SemicolonAsDelimiterKey].is_boolean()) - { - return MakeErrorResult(-120, fmt::format("CSVWizardData: 'Semicolon As Delimiter' value is of type {} and is not a boolean.", json[k_SemicolonAsDelimiterKey].type_name())); - } - data.semicolonAsDelimiter = json[k_SemicolonAsDelimiterKey]; - - if(!json.contains(k_SpaceAsDelimiterKey)) - { - return MakeErrorResult(-121, fmt::format("CSVWizardData: Cannot find the 'Space As Delimiter' key \"{}\" in the CSVWizardData json object.", k_SpaceAsDelimiterKey)); - } - else if(!json[k_SpaceAsDelimiterKey].is_boolean()) - { - return MakeErrorResult(-122, fmt::format("CSVWizardData: 'Space As Delimiter' value is of type {} and is not a boolean.", json[k_SpaceAsDelimiterKey].type_name())); - } - data.spaceAsDelimiter = json[k_SpaceAsDelimiterKey]; - - if(!json.contains(k_CommaAsDelimiterKey)) - { - return MakeErrorResult(-123, fmt::format("CSVWizardData: Cannot find the 'Comma As Delimiter' key \"{}\" in the CSVWizardData json object.", k_CommaAsDelimiterKey)); - } - else if(!json[k_CommaAsDelimiterKey].is_boolean()) - { - return MakeErrorResult(-124, fmt::format("CSVWizardData: 'Comma As Delimiter' value is of type {} and is not a boolean.", json[k_CommaAsDelimiterKey].type_name())); - } - data.commaAsDelimiter = json[k_CommaAsDelimiterKey]; - - if(!json.contains(k_ConsecutiveDelimitersKey)) - { - return MakeErrorResult(-125, fmt::format("CSVWizardData: Cannot find the 'Consecutive Delimiters' key \"{}\" in the CSVWizardData json object.", k_ConsecutiveDelimitersKey)); - } - else if(!json[k_ConsecutiveDelimitersKey].is_boolean()) - { - return MakeErrorResult(-126, fmt::format("CSVWizardData: 'Consecutive Delimiters' value is of type {} and is not a boolean.", json[k_ConsecutiveDelimitersKey].type_name())); - } - data.consecutiveDelimiters = json[k_ConsecutiveDelimitersKey]; - - return {data}; -} diff --git a/src/complex/Parameters/util/ReadCSVData.cpp b/src/complex/Parameters/util/ReadCSVData.cpp new file mode 100644 index 0000000000..d0568adb22 --- /dev/null +++ b/src/complex/Parameters/util/ReadCSVData.cpp @@ -0,0 +1,241 @@ +/* ============================================================================ + * Copyright (c) 2022-2022 BlueQuartz Software, LLC + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of BlueQuartz Software, the US Air Force, nor the names of its + * contributors may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THEs + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ + +#include "ReadCSVData.hpp" + +#include + +using namespace complex; + +namespace +{ +const std::string k_CustomHeadersKey = "Custom Headers"; +const std::string k_DataTypesKey = "Data Types"; +const std::string k_SkippedArrayMaskKey = "Skipped Array Mask"; +const std::string k_TupleDimensionsKey = "Tuple Dimensions"; +const std::string k_InputFilePathKey = "Input File Path"; +const std::string k_StartImportRowKey = "Start Import Row"; +const std::string k_HeaderLineKey = "Header Line"; +const std::string k_HeaderModeKey = "Header Mode"; +const std::string k_Delimiters = "Delimiters"; +const std::string k_ConsecutiveDelimitersKey = "Consecutive Delimiters"; +} // namespace + +// ----------------------------------------------------------------------------- +nlohmann::json ReadCSVData::writeJson() const +{ + nlohmann::json json; + + nlohmann::json dHeaders; + for(const auto& header : customHeaders) + { + dHeaders.push_back(header); + } + json[k_CustomHeadersKey] = dHeaders; + + nlohmann::json dTypes; + for(const auto& dType : dataTypes) + { + dTypes.push_back(dType); + } + json[k_DataTypesKey] = dTypes; + + nlohmann::json tDims; + for(const auto& tDim : tupleDims) + { + tDims.push_back(tDim); + } + json[k_TupleDimensionsKey] = tDims; + + nlohmann::json dSkippedArrays; + for(const auto& skippedArrayVal : skippedArrayMask) + { + dSkippedArrays.push_back(skippedArrayVal); + } + json[k_SkippedArrayMaskKey] = dSkippedArrays; + + json[k_InputFilePathKey] = inputFilePath; + json[k_StartImportRowKey] = startImportRow; + json[k_HeaderLineKey] = headersLine; + json[k_HeaderModeKey] = headerMode; + + std::vector stringVec(delimiters.size()); + std::transform(delimiters.begin(), delimiters.end(), stringVec.begin(), [](char c) { return std::string(1, c); }); + json[k_Delimiters] = stringVec; + + json[k_ConsecutiveDelimitersKey] = consecutiveDelimiters; + + return json; +} + +// ----------------------------------------------------------------------------- +Result ReadCSVData::ReadJson(const nlohmann::json& json) +{ + ReadCSVData data; + + if(!json.contains(k_CustomHeadersKey)) + { + return MakeErrorResult(-100, fmt::format("ReadCSVData: Cannot find the Data Headers key \"{}\" in the ReadCSVData json object.", k_CustomHeadersKey)); + } + + nlohmann::json dHeaders = json[k_CustomHeadersKey]; + for(usize i = 0; i < dHeaders.size(); i++) + { + auto header = dHeaders[i]; + if(!header.is_string()) + { + return MakeErrorResult(-101, fmt::format("ReadCSVData: Custom header at index {} is of type {} and is not a string.", std::to_string(i), header.type_name())); + } + + data.customHeaders.push_back(header); + } + + if(!json.contains(k_DataTypesKey)) + { + return MakeErrorResult(-102, fmt::format("ReadCSVData: Cannot find the Data Types key \"{}\" in the ReadCSVData json object.", k_DataTypesKey)); + } + + nlohmann::json dTypes = json[k_DataTypesKey]; + for(usize i = 0; i < dTypes.size(); i++) + { + auto dType = dTypes[i]; + if(!dType.is_number_integer()) + { + return MakeErrorResult(-103, fmt::format("ReadCSVData: Data type at index {} is of type {} and is not an integer.", std::to_string(i), dType.type_name())); + } + + data.dataTypes.push_back(dType); + } + + if(!json.contains(k_TupleDimensionsKey)) + { + return MakeErrorResult(-104, fmt::format("ReadCSVData: Cannot find the Tuple Dimensions key \"{}\" in the ReadCSVData json object.", k_TupleDimensionsKey)); + } + + nlohmann::json tDims = json[k_TupleDimensionsKey]; + data.tupleDims.clear(); + for(usize i = 0; i < tDims.size(); i++) + { + auto tDim = tDims[i]; + data.tupleDims.push_back(tDim); + } + + if(!json.contains(k_SkippedArrayMaskKey)) + { + return MakeErrorResult(-105, fmt::format("ReadCSVData: Cannot find the Skipped Arrays key \"{}\" in the ReadCSVData json object.", k_DataTypesKey)); + } + + nlohmann::json dSkippedArrays = json[k_SkippedArrayMaskKey]; + for(usize i = 0; i < dSkippedArrays.size(); i++) + { + auto skippedArrayVal = dSkippedArrays[i]; + if(!skippedArrayVal.is_boolean()) + { + return MakeErrorResult(-106, fmt::format("ReadCSVData: Skipped array value at index {} is of type {} and is not a boolean.", std::to_string(i), skippedArrayVal.type_name())); + } + + data.skippedArrayMask.push_back(skippedArrayVal); + } + + if(!json.contains(k_InputFilePathKey)) + { + return MakeErrorResult(-107, fmt::format("ReadCSVData: Cannot find the 'Input File Path' key \"{}\" in the ReadCSVData json object.", k_InputFilePathKey)); + } + else if(!json[k_InputFilePathKey].is_string()) + { + return MakeErrorResult(-108, fmt::format("ReadCSVData: 'Input File Path' value is of type {} and is not a string.", json[k_InputFilePathKey].type_name())); + } + data.inputFilePath = json[k_InputFilePathKey]; + + if(!json.contains(k_StartImportRowKey)) + { + return MakeErrorResult(-109, fmt::format("ReadCSVData: Cannot find the 'Begin Index' key \"{}\" in the ReadCSVData json object.", k_StartImportRowKey)); + } + else if(!json[k_StartImportRowKey].is_number_integer()) + { + return MakeErrorResult(-110, fmt::format("ReadCSVData: 'Begin Index' value is of type {} and is not an integer.", json[k_StartImportRowKey].type_name())); + } + data.startImportRow = json[k_StartImportRowKey]; + + if(!json.contains(k_HeaderLineKey)) + { + return MakeErrorResult(-113, fmt::format("ReadCSVData: Cannot find the 'Header Line' key \"{}\" in the ReadCSVData json object.", k_HeaderLineKey)); + } + else if(!json[k_HeaderLineKey].is_number_integer()) + { + return MakeErrorResult(-114, fmt::format("ReadCSVData: 'Header Line' value is of type {} and is not an integer.", json[k_HeaderLineKey].type_name())); + } + data.headersLine = json[k_HeaderLineKey]; + + if(!json.contains(k_HeaderModeKey)) + { + return MakeErrorResult(-115, fmt::format("ReadCSVData: Cannot find the 'Header Mode' key \"{}\" in the ReadCSVData json object.", k_HeaderModeKey)); + } + else if(!json[k_HeaderModeKey].is_number_integer()) + { + return MakeErrorResult(-116, fmt::format("ReadCSVData: 'Header Mode' value is of type {} and is not an integer.", json[k_HeaderModeKey].type_name())); + } + data.headerMode = json[k_HeaderModeKey]; + + if(!json.contains(k_Delimiters)) + { + return MakeErrorResult(-117, fmt::format("ReadCSVData: Cannot find the 'Delimiters' key \"{}\" in the ReadCSVData json object.", k_Delimiters)); + } + + nlohmann::json dDelimiters = json[k_Delimiters]; + for(usize i = 0; i < dDelimiters.size(); i++) + { + auto dDelimiter = dDelimiters[i]; + if(!dDelimiter.is_string() || dDelimiter.get().size() != 1) + { + return MakeErrorResult(-118, fmt::format("ReadCSVData: Delimiter at index {} is of type {} and is not a string.", std::to_string(i), dDelimiter.type_name())); + } + + std::string delimiter = dDelimiter.get(); + if(delimiter.empty()) + { + return MakeErrorResult(-119, fmt::format("ReadCSVData: Delimiter at index {} is empty.", std::to_string(i))); + } + + data.delimiters.push_back(delimiter[0]); + } + + if(!json.contains(k_ConsecutiveDelimitersKey)) + { + return MakeErrorResult(-125, fmt::format("ReadCSVData: Cannot find the 'Consecutive Delimiters' key \"{}\" in the ReadCSVData json object.", k_ConsecutiveDelimitersKey)); + } + else if(!json[k_ConsecutiveDelimitersKey].is_boolean()) + { + return MakeErrorResult(-126, fmt::format("ReadCSVData: 'Consecutive Delimiters' value is of type {} and is not a boolean.", json[k_ConsecutiveDelimitersKey].type_name())); + } + data.consecutiveDelimiters = json[k_ConsecutiveDelimitersKey]; + + return {data}; +} diff --git a/src/complex/Parameters/util/CSVWizardData.hpp b/src/complex/Parameters/util/ReadCSVData.hpp similarity index 77% rename from src/complex/Parameters/util/CSVWizardData.hpp rename to src/complex/Parameters/util/ReadCSVData.hpp index eff7e13cf6..e3e01bf855 100644 --- a/src/complex/Parameters/util/CSVWizardData.hpp +++ b/src/complex/Parameters/util/ReadCSVData.hpp @@ -33,6 +33,7 @@ #include "complex/Common/Result.hpp" #include "complex/Common/StringLiteral.hpp" #include "complex/Common/Types.hpp" +#include "complex/Parameters/DynamicTableParameter.hpp" #include "complex/complex_export.hpp" #include @@ -41,35 +42,28 @@ namespace complex { -struct COMPLEX_EXPORT CSVWizardData +struct COMPLEX_EXPORT ReadCSVData { public: enum class HeaderMode { LINE, - CUSTOM, - DEFAULTS + CUSTOM }; // Json Reader and Writer nlohmann::json writeJson() const; - static Result ReadJson(const nlohmann::json& json); + static Result ReadJson(const nlohmann::json& json); std::string inputFilePath; - std::vector dataHeaders; - usize beginIndex = 1; - int64 numberOfLines = -1; - std::vector> dataTypes; - std::vector delimiters; - usize headerLine = 1; - HeaderMode headerMode = HeaderMode::LINE; - bool tabAsDelimiter = false; - bool semicolonAsDelimiter = false; - bool commaAsDelimiter = false; - bool spaceAsDelimiter = false; + std::vector customHeaders; + usize startImportRow = 1; + std::vector dataTypes; + std::vector skippedArrayMask; + usize headersLine = 1; + HeaderMode headerMode = HeaderMode::CUSTOM; + std::vector tupleDims = {1}; + std::vector delimiters = {}; bool consecutiveDelimiters = false; - - static inline constexpr usize k_TotalPreviewLines = 50; - static inline constexpr StringLiteral k_SkipDataTypeString = "Skip"; }; } // namespace complex diff --git a/src/complex/Utilities/DataArrayUtilities.hpp b/src/complex/Utilities/DataArrayUtilities.hpp index eef286b19b..402d9f0ea4 100644 --- a/src/complex/Utilities/DataArrayUtilities.hpp +++ b/src/complex/Utilities/DataArrayUtilities.hpp @@ -130,10 +130,28 @@ struct ConvertTo { static Result convert(const std::string& input) { - if(input == "TRUE" || input == "true" || input == "1" || input == "True") + if(input == "TRUE" || input == "true" || input == "True") { return {true}; } + + if(input == "FALSE" || input == "false" || input == "False") + { + return {false}; + } + + Result intResult = ConvertTo::convert(input); + if(intResult.valid()) + { + return {intResult.value() != 0}; + } + + Result floatResult = ConvertTo::convert(input); + if(floatResult.valid()) + { + return {floatResult.value() != 0.0}; + } + return {false}; } }; diff --git a/src/complex/Utilities/FileUtilities.hpp b/src/complex/Utilities/FileUtilities.hpp new file mode 100644 index 0000000000..41979a5c0f --- /dev/null +++ b/src/complex/Utilities/FileUtilities.hpp @@ -0,0 +1,125 @@ +/* ============================================================================ + * Copyright (c) 2020 BlueQuartz Software, LLC + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the names of any of the BlueQuartz Software contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ +#pragma once + +#include +#include +#include +#include + +#include "complex/Common/Result.hpp" + +namespace fs = std::filesystem; + +namespace complex +{ +namespace FileUtilities +{ +Result<> ValidateCSVFile(const std::string& filePath) +{ + constexpr int64_t bufferSize = 2048; + + auto absPath = fs::absolute(filePath); + + if(!fs::exists({absPath})) + { + return MakeErrorResult(-300, fmt::format("File does not exist: {}", absPath.string())); + } + + // Obtain the file size + const size_t fileSize = fs::file_size(absPath); + + // Open the file + std::ifstream in(absPath.c_str(), std::ios_base::binary); + if(!in.is_open()) + { + return MakeErrorResult(-301, fmt::format("Could not open file for reading: {}", absPath.string())); + } + + size_t actualSize = bufferSize; + if(fileSize <= bufferSize) + { + actualSize = fileSize; + } + + // Allocate the buffer + std::vector buffer(actualSize, 0); + + // Copy the file contents into the buffer + try + { + in.read(buffer.data(), actualSize); + } catch(const std::exception& e) + { + return MakeErrorResult(-302, fmt::format("There was an error reading the data from file: {}. Exception: {}", absPath.string(), e.what())); + } + + // Check the buffer for invalid characters, tab characters, new-line characters, and carriage return characters + bool hasNewLines = false; + bool hasCarriageReturns = false; + bool hasTabs = false; + // If the first line of the file is > 2048 then this will fail! (MJ) + for(size_t i = 0; i < actualSize; i++) + { + const char currentChar = buffer[i]; + + if(currentChar < 32 && currentChar != 9 && currentChar != 10 && currentChar != 13) + { + // This is an unprintable character + return MakeErrorResult(-303, fmt::format("Unprintable characters have been detected in file: {}. Please import a different file.", absPath.string())); + } + if(currentChar == 9) + { + hasTabs = true; + } + else if(currentChar == 10) + { + hasNewLines = true; + } + else if(currentChar == 13) + { + hasCarriageReturns = true; + } + } + + if(!hasNewLines && !hasCarriageReturns && !hasTabs) + { + // This might be a binary file + return MakeErrorResult(-304, fmt::format("The file \"{}\" might be a binary file, because line-feed, tab, or carriage return characters have not been detected. Using this file may crash the " + "program or cause unexpected results. Please import a different file.", + absPath.string())); + } + + return {}; +} +} // namespace FileUtilities +} // namespace complex diff --git a/src/complex/Utilities/FilterUtilities.cpp b/src/complex/Utilities/FilterUtilities.cpp index 28655846c9..59e2168f2d 100644 --- a/src/complex/Utilities/FilterUtilities.cpp +++ b/src/complex/Utilities/FilterUtilities.cpp @@ -23,5 +23,4 @@ Result<> CreateOutputDirectories(const fs::path& outputPath) } return {}; } - } // namespace complex diff --git a/src/complex/Utilities/FilterUtilities.hpp b/src/complex/Utilities/FilterUtilities.hpp index a7e3196f6f..b2dc1bd9c8 100644 --- a/src/complex/Utilities/FilterUtilities.hpp +++ b/src/complex/Utilities/FilterUtilities.hpp @@ -189,4 +189,10 @@ auto ExecuteNeighborFunction(FuncT&& func, DataType dataType, ArgsT&&... args) */ COMPLEX_EXPORT Result<> CreateOutputDirectories(const fs::path& outputPath); +/** + * @brief Creates a delimiters vector from the given delimiters booleans + * @return + */ +COMPLEX_EXPORT std::vector CreateDelimitersVector(bool tabAsDelimiter, bool semicolonAsDelimiter, bool commaAsDelimiter, bool spaceAsDelimiter); + } // namespace complex diff --git a/wrapping/python/docs/generate_sphinx_docs.cpp b/wrapping/python/docs/generate_sphinx_docs.cpp index 73566567cc..597311c48d 100644 --- a/wrapping/python/docs/generate_sphinx_docs.cpp +++ b/wrapping/python/docs/generate_sphinx_docs.cpp @@ -55,7 +55,7 @@ void GenerateParameterList() ADD_PARAMETER_TRAIT(complex.DataGroupCreationParameter, "bff2d4ac-04a6-5251-b188-4f83f7865074") ADD_PARAMETER_TRAIT(complex.DataPathSelectionParameter, "cd12d081-fbf0-46c4-8f4a-15e2e06e98b8") ADD_PARAMETER_TRAIT(complex.CalculatorParameter, "ba2d4937-dbec-5536-8c5c-c0a406e80f77") - ADD_PARAMETER_TRAIT(complex.ImportCSVDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45") + ADD_PARAMETER_TRAIT(complex.ReadCSVFileParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45") ADD_PARAMETER_TRAIT(complex.Int8Parameter, "cae73834-68f8-4235-b010-8bea87d8ff7a") ADD_PARAMETER_TRAIT(complex.UInt8Parameter, "6c3efeff-ce8f-47c0-83d1-262f2b2dd6cc") ADD_PARAMETER_TRAIT(complex.Int16Parameter, "44ae56e8-e6e7-4e4d-8128-dd3dc2c6696e") diff --git a/wrapping/python/docs/source/API.rst b/wrapping/python/docs/source/API.rst index 133b116d1c..aa008175e1 100644 --- a/wrapping/python/docs/source/API.rst +++ b/wrapping/python/docs/source/API.rst @@ -383,71 +383,65 @@ General Parameters This parameter represents the :ref:`DataPath` to a valid :ref:`complex.Geometry() ` -.. _ImportCSVDataParameter: -.. py:class:: ImportCSVDataParameter +.. _ReadCSVFileParameter: +.. py:class:: ReadCSVFileParameter - This parameter is used for the :ref:`complex.ImportCSVDataFilter() ` and holds + This parameter is used for the :ref:`complex.ReadCSVFileFilter() ` and holds the information to import a file formatted as table data where each column of data is a single array. + The file can be comma, space, tab or semicolon separated. - + The file optionally can have a line of headers. The user can specify what line the headers are on + + The file optionally can have a line of headers. The user can specify what line number the header is located. + The import can start at a user specified line number but will continue to the end of the file. - The primary python object that will hold the information to pass to the filter is the CSVWizardData class described below. + The primary python object that will hold the information to pass to the filter is the ReadCSVData class described below. - :ivar ValueType: CSVWizardData + :ivar ValueType: ReadCSVData - .. py:class:: ImportCSVDataParameter.CSVWizardData + .. py:class:: ReadCSVFileParameter.ReadCSVData - The CSVWizardData class holds all the necessary information to import a CSV formatted file into DREAM3D-NX. There are + The ReadCSVData class holds all the necessary information to import a CSV formatted file into DREAM3D-NX. There are a number of member variables that need to be set correctly before the filter will execute correctly. - :ivar input_file_path: "PathLike" The path to the input file on the file syatem. - :ivar begin_index: Int What line number does the data start on. 1 Based numbering scheme. - :ivar comma_as_delimiter: Bool Are the values comma separated - :ivar semicolon_as_delimiter: Bool Are the values semicolon separated - :ivar space_as_delimiter: Bool Are the values space separated - :ivar tab_as_delimiter: Bool Are the values tab separated - :ivar consecutive_delimiters: Bool Should consectutive delimiters be counted as a single delimiter. Bool - :ivar data_headers: List[string]. If the file does not have headers, this is a list of string values, 1 per column of data, that will also become the names of ecah of the created :ref:`DataArray` - :ivar data_types: List[cx.DataType]. The DataType, one per column, that indicates the kind of native numerical values (int, float... ) that will be used in the created :ref:`DataArray` - :ivar delimiters: List[string]. The actual delimiter to use. If you specified a comma above, then [","] would be used. The list should have a single value. - :ivar header_line: Int. The line number of the headers - :ivar header_mode: 'cx.CSVWizardData.HeaderMode.'. Can be one of 'cx.CSVWizardData.HeaderMode.Line' or 'cx.CSVWizardData.HeaderMode.Custom' - :ivar number_of_lines: Int. Total lines in the file. + :ivar input_file_path: "PathLike". The path to the input file on the file system. + :ivar start_import_row: Int. What line number does the data start on. 1 Based numbering scheme. + :ivar delimiters: List[string]. List of delimiters that will be used to separate the lines of the file into columns. + :ivar consecutive_delimiters: Bool. Should consecutive delimiters be counted as a single delimiter. + :ivar custom_headers: List[string]. If the file does not have headers, this is a list of string values, 1 per column of data, that will also become the names of the created :ref:`DataArray`. + :ivar data_types: List[cx.DataType]. The DataType, one per column, that indicates the kind of native numerical values (int, float... ) that will be used in the created :ref:`DataArray`. + :ivar skipped_array_mask: List[bool]. Booleans, one per column, that indicate whether or not to skip importing each created :ref:`DataArray `. + :ivar tuple_dims: List[int]. The tuple dimensions for the created :ref:`DataArrays `. + :ivar headers_line: Int. The line number of the headers. + :ivar header_mode: 'cx.ReadCSVData.HeaderMode.'. Can be one of 'cx.ReadCSVData.HeaderMode.Line' or 'cx.ReadCSVData.HeaderMode.Custom'. .. code:: python - import_csv_wizard_data = cx.CSVWizardData() - import_csv_wizard_data.input_file_path = "/tmp/test_csv_data.csv" - import_csv_wizard_data.begin_index = 2 - import_csv_wizard_data.comma_as_delimiter = True - import_csv_wizard_data.semicolon_as_delimiter = False - import_csv_wizard_data.space_as_delimiter = False - import_csv_wizard_data.tab_as_delimiter = False - import_csv_wizard_data.consecutive_delimiters = False - import_csv_wizard_data.data_headers = [] - import_csv_wizard_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] - import_csv_wizard_data.delimiters = [","] - import_csv_wizard_data.header_line = 1 - import_csv_wizard_data.header_mode = cx.CSVWizardData.HeaderMode.Line - import_csv_wizard_data.number_of_lines = 37990 - - result = cx.ImportCSVDataFilter.execute(data_structure=data_structure, - # This will store the imported arrays into a newly generated DataGroup - created_data_group=cx.DataPath(["Imported Data"]), - # We are not using this parameter but it still needs a value - selected_data_group=cx.DataPath(), - # The dimensions of the tuples. Can be 1-N dimensions - tuple_dimensions=[[37989]], - # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match - use_existing_group=False, - # The CSVWizardData object with all member variables set. - wizard_data=import_csv_wizard_data # The CSVWizardData object with all member variables set. - ) + data_structure = cx.DataStructure() + # Example File has 7 columns to import + read_csv_data = cx.ReadCSVData() + read_csv_data.input_file_path = "/tmp/test_csv_data.csv" + read_csv_data.start_import_row = 2 + read_csv_data.delimiters = [','] + read_csv_data.custom_headers = [] + read_csv_data.column_data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] + read_csv_data.skipped_array_mask = [False,False,False,False,False,False,False ] + read_csv_data.tuple_dims = [37989] + read_csv_data.headers_line = 1 + read_csv_data.header_mode = cx.ReadCSVData.HeaderMode.Line + + # This will store the imported arrays into a newly generated DataGroup + result = cx.ReadCSVFileFilter.execute(data_structure=data_structure, + # This will store the imported arrays into a newly generated DataGroup + created_data_group=cx.DataPath(["Imported Data"]), + # We are not using this parameter but it still needs a value + selected_data_group=cx.DataPath(), + # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match + use_existing_group=False, + # The ReadCSVData object with all member variables set. + read_csv_data=read_csv_data # The ReadCSVData object with all member variables set. + ) .. _ImportHDF5DatasetParameter: diff --git a/wrapping/python/examples/import_csv.py b/wrapping/python/examples/import_csv.py deleted file mode 100644 index 02aa2c4557..0000000000 --- a/wrapping/python/examples/import_csv.py +++ /dev/null @@ -1,47 +0,0 @@ -import complex as cx -import itkimageprocessing as cxitk -import orientationanalysis as cxor - -import numpy as np - -# Create the DataStructure object -data_structure = cx.DataStructure() - -import_csv_wizard_data = cx.CSVWizardData() -import_csv_wizard_data.input_file_path = "test_csv_data.csv" -import_csv_wizard_data.begin_index = 2 - -import_csv_wizard_data.comma_as_delimiter = True -import_csv_wizard_data.semicolon_as_delimiter = False -import_csv_wizard_data.space_as_delimiter = False -import_csv_wizard_data.tab_as_delimiter = False -import_csv_wizard_data.consecutive_delimiters = False - -import_csv_wizard_data.data_headers = [] -import_csv_wizard_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] -import_csv_wizard_data.delimiters = [","] - -import_csv_wizard_data.header_line = 1 -import_csv_wizard_data.header_mode = cx.CSVWizardData.HeaderMode.Line - -import_csv_wizard_data.number_of_lines = 37990 - - -# This will store the imported arrays into a newly generated DataGroup -result = cx.ImportCSVDataFilter.execute(data_structure=data_structure, - # This will store the imported arrays into a newly generated DataGroup - created_data_group=cx.DataPath(["Imported Data"]), - # We are not using this parameter but it still needs a value - selected_data_group=cx.DataPath(), - # The dimensions of the tuples. Can be 1-N dimensions - tuple_dimensions=[[37989]], - # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match - use_existing_group=False, - # The CSVWizardData object with all member variables set. - wizard_data=import_csv_wizard_data # The CSVWizardData object with all member variables set. - ) -if len(result.errors) != 0: - print('Errors: {}', result.errors) - print('Warnings: {}', result.warnings) -else: - print("No errors running the ImportCSVDataFilter filter") diff --git a/wrapping/python/examples/read_csv_file.py b/wrapping/python/examples/read_csv_file.py new file mode 100644 index 0000000000..37a484e0b4 --- /dev/null +++ b/wrapping/python/examples/read_csv_file.py @@ -0,0 +1,36 @@ +import complex as cx +import itkimageprocessing as cxitk +import orientationanalysis as cxor + +import numpy as np + +# Create the DataStructure object +data_structure = cx.DataStructure() +# This file has 7 columns to import +read_csv_data = cx.ReadCSVData() +read_csv_data.input_file_path = "wrapping/python/examples/test_csv_data.csv" +read_csv_data.start_import_row = 2 +read_csv_data.delimiters = [','] +read_csv_data.custom_headers = [] +read_csv_data.column_data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] +read_csv_data.skipped_array_mask = [False,False,False,False,False,False,False ] +read_csv_data.tuple_dims = [37989] +read_csv_data.headers_line = 1 +read_csv_data.header_mode = cx.ReadCSVData.HeaderMode.Line + +# This will store the imported arrays into a newly generated DataGroup +result = cx.ReadCSVFileFilter.execute(data_structure=data_structure, + # This will store the imported arrays into a newly generated DataGroup + created_data_group=cx.DataPath(["Imported Data"]), + # We are not using this parameter but it still needs a value + selected_data_group=cx.DataPath(), + # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match + use_existing_group=False, + # The ReadCSVData object with all member variables set. + read_csv_data=read_csv_data # The ReadCSVData object with all member variables set. + ) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the ReadCSVFileFilter filter")