From 1a3042de60d25fb72d03281d7cc36fd2969dbc45 Mon Sep 17 00:00:00 2001 From: Joey Kleingers Date: Mon, 9 Oct 2023 10:47:32 -0400 Subject: [PATCH] Rename ImportCSVData to ImportTextData. Signed-off-by: Joey Kleingers --- CMakeLists.txt | 6 +- src/Plugins/ComplexCore/CMakeLists.txt | 2 +- .../{Import_CSV.png => Import_Text.png} | Bin .../{Import_CSV_1.png => Import_Text_1.png} | Bin .../{Import_CSV_10.png => Import_Text_10.png} | Bin .../{Import_CSV_2.png => Import_Text_2.png} | Bin .../{Import_CSV_3.png => Import_Text_3.png} | Bin .../{Import_CSV_4.png => Import_Text_4.png} | Bin .../{Import_CSV_5.png => Import_Text_5.png} | Bin .../{Import_CSV_6.png => Import_Text_6.png} | Bin .../{Import_CSV_7.png => Import_Text_7.png} | Bin .../{Import_CSV_8.png => Import_Text_8.png} | Bin .../{Import_CSV_9.png => Import_Text_9.png} | Bin ...VDataFilter.md => ImportTextDataFilter.md} | 38 ++--- .../pipelines/Import_ASCII.d3dpipeline | 2 +- .../ResampleRectGridToImageGeom.d3dpipeline | 2 +- .../ComplexCoreLegacyUUIDMapping.hpp | 4 +- ...ataFilter.cpp => ImportTextDataFilter.cpp} | 146 +++++++++--------- ...ataFilter.hpp => ImportTextDataFilter.hpp} | 20 +-- src/Plugins/ComplexCore/test/CMakeLists.txt | 2 +- ...CSVDataTest.cpp => ImportTextDataTest.cpp} | 112 +++++++------- .../ComplexCore/test/ImportTextTest.cpp | 4 +- .../ComplexCore/wrapping/python/complexpy.cpp | 50 +++--- .../test/ExportGBCDGMTFileTest.cpp | 34 ++-- .../test/ExportGBCDTriangleDataTest.cpp | 14 +- .../test/OrientationAnalysisTestUtils.hpp | 4 +- ...ameter.cpp => ImportTextDataParameter.cpp} | 30 ++-- ...ameter.hpp => ImportTextDataParameter.hpp} | 22 +-- ...VImporterData.cpp => TextImporterData.cpp} | 62 ++++---- ...VImporterData.hpp => TextImporterData.hpp} | 4 +- wrapping/python/docs/generate_sphinx_docs.cpp | 2 +- wrapping/python/docs/source/API.rst | 50 +++--- wrapping/python/examples/import_csv.py | 43 ------ wrapping/python/examples/import_text.py | 43 ++++++ 34 files changed, 350 insertions(+), 346 deletions(-) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV.png => Import_Text.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_1.png => Import_Text_1.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_10.png => Import_Text_10.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_2.png => Import_Text_2.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_3.png => Import_Text_3.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_4.png => Import_Text_4.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_5.png => Import_Text_5.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_6.png => Import_Text_6.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_7.png => Import_Text_7.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_8.png => Import_Text_8.png} (100%) rename src/Plugins/ComplexCore/docs/Images/{Import_CSV_9.png => Import_Text_9.png} (100%) rename src/Plugins/ComplexCore/docs/{ImportCSVDataFilter.md => ImportTextDataFilter.md} (74%) rename src/Plugins/ComplexCore/src/ComplexCore/Filters/{ImportCSVDataFilter.cpp => ImportTextDataFilter.cpp} (79%) rename src/Plugins/ComplexCore/src/ComplexCore/Filters/{ImportCSVDataFilter.hpp => ImportTextDataFilter.hpp} (85%) rename src/Plugins/ComplexCore/test/{ImportCSVDataTest.cpp => ImportTextDataTest.cpp} (72%) rename src/complex/Parameters/{ImportCSVDataParameter.cpp => ImportTextDataParameter.cpp} (70%) rename src/complex/Parameters/{ImportCSVDataParameter.hpp => ImportTextDataParameter.hpp} (79%) rename src/complex/Parameters/util/{CSVImporterData.cpp => TextImporterData.cpp} (56%) rename src/complex/Parameters/util/{CSVImporterData.hpp => TextImporterData.hpp} (95%) delete mode 100644 wrapping/python/examples/import_csv.py create mode 100644 wrapping/python/examples/import_text.py diff --git a/CMakeLists.txt b/CMakeLists.txt index 42dd1af306..d3aba8c524 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -259,7 +259,7 @@ set(CoreParameters MultiPathSelectionParameter NumberParameter NumericTypeParameter - ImportCSVDataParameter + ImportTextDataParameter ImportHDF5DatasetParameter StringParameter VectorParameter @@ -431,7 +431,7 @@ set(COMPLEX_HDRS ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateGridMontageAction.hpp ${COMPLEX_SOURCE_DIR}/Parameters/util/DynamicTableInfo.hpp - ${COMPLEX_SOURCE_DIR}/Parameters/util/CSVImporterData.hpp + ${COMPLEX_SOURCE_DIR}/Parameters/util/TextImporterData.hpp ${COMPLEX_SOURCE_DIR}/Pipeline/AbstractPipelineNode.hpp ${COMPLEX_SOURCE_DIR}/Pipeline/Pipeline.hpp @@ -630,7 +630,7 @@ set(COMPLEX_SRCS ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateAttributeMatrixAction.cpp ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateGridMontageAction.cpp - ${COMPLEX_SOURCE_DIR}/Parameters/util/CSVImporterData.cpp + ${COMPLEX_SOURCE_DIR}/Parameters/util/TextImporterData.cpp ${COMPLEX_SOURCE_DIR}/Parameters/util/DynamicTableInfo.cpp ${COMPLEX_SOURCE_DIR}/Pipeline/AbstractPipelineNode.cpp diff --git a/src/Plugins/ComplexCore/CMakeLists.txt b/src/Plugins/ComplexCore/CMakeLists.txt index f3fa19f425..7a471047b2 100644 --- a/src/Plugins/ComplexCore/CMakeLists.txt +++ b/src/Plugins/ComplexCore/CMakeLists.txt @@ -77,7 +77,7 @@ set(FilterList ImageContouringFilter IdentifySample ImportBinaryCTNorthstarFilter - ImportCSVDataFilter + ImportTextDataFilter ImportDeformKeyFileV12Filter ImportDREAM3DFilter ImportHDF5Dataset diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV.png b/src/Plugins/ComplexCore/docs/Images/Import_Text.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_1.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_1.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_1.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_1.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_10.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_10.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_10.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_10.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_2.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_2.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_2.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_2.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_3.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_3.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_3.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_3.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_4.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_4.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_4.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_4.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_5.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_5.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_5.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_5.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_6.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_6.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_6.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_6.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_7.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_7.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_7.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_7.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_8.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_8.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_8.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_8.png diff --git a/src/Plugins/ComplexCore/docs/Images/Import_CSV_9.png b/src/Plugins/ComplexCore/docs/Images/Import_Text_9.png similarity index 100% rename from src/Plugins/ComplexCore/docs/Images/Import_CSV_9.png rename to src/Plugins/ComplexCore/docs/Images/Import_Text_9.png diff --git a/src/Plugins/ComplexCore/docs/ImportCSVDataFilter.md b/src/Plugins/ComplexCore/docs/ImportTextDataFilter.md similarity index 74% rename from src/Plugins/ComplexCore/docs/ImportCSVDataFilter.md rename to src/Plugins/ComplexCore/docs/ImportTextDataFilter.md index b6d3f116aa..ab582d5ea9 100644 --- a/src/Plugins/ComplexCore/docs/ImportCSVDataFilter.md +++ b/src/Plugins/ComplexCore/docs/ImportTextDataFilter.md @@ -1,4 +1,4 @@ -# Import CSV Data +# Import Text Data ## Group (Subgroup) ## @@ -6,59 +6,59 @@ IO (Input) ## Description ## -This **Filter** reads CSV data from any text-based file and imports the data into DREAM3D-NX-style arrays. The user specifies which file to import, how the data is formatted, what to call each array, and what type each array should be. +This **Filter** reads text data from any text-based file and imports the data into DREAM3D-NX-style arrays. The user specifies which file to import, how the data is formatted, what to call each array, and what type each array should be. -*Note:* This **Filter** is intended to read data that is column-oriented, such that each created DREAM3D-NX array corresponds to a column of data in the CSV file. Therefore, this **Filter** will only import scalar arrays. If multiple columns are in fact different components of the same array, then the columns may be imported as separate arrays and then combined in the correct order using the Combine Attribute Arrays **Filter**. +*Note:* This **Filter** is intended to read data that is column-oriented, such that each created DREAM3D-NX array corresponds to a column of data in the text file. Therefore, this **Filter** will only import scalar arrays. If multiple columns are in fact different components of the same array, then the columns may be imported as separate arrays and then combined in the correct order using the Combine Attribute Arrays **Filter**. ### Filling Out The Inputs ### -The user first selects the **Input CSV File** path, which then enables the rest of the interface. +The user first selects the **Input Text File** path, which then enables the rest of the interface. -![Input CSV File Field](Images/Import_CSV_1.png) +![Input Text File Field](Images/Import_Text_1.png) -If the chosen **Input CSV File** already has headers inside the file, the user can select the **Input File Has Headers** checkbox. This +If the chosen **Input Text File** already has headers inside the file, the user can select the **Input File Has Headers** checkbox. This enables the **Headers Line Number** spin box where the user can select which line of the file contains the headers. *NOTE*: The interface only allows importing data starting at the line after the chosen **Headers Line Number**. So, in the example below, the **Headers Line Number** is set to 1, so **Start Import Line Number** defaults to 2 and has a range of 2-297 (this particular input file has 297 total lines). The max range of **Headers Line Number** is, of course, set to 296 so that at least 1 line can be imported. -![Input CSV File Field](Images/Import_CSV_2.png) +![Input Text File Field](Images/Import_Text_2.png) The user can choose how the data is delimited: comma (,), tab, semicolon (;) or space ( ). The user may also elect to ignore consecutive delimiters, which treats consecutive delimiters as one delimiter. -![Input CSV File Field](Images/Import_CSV_3.png) +![Input Text File Field](Images/Import_Text_3.png) The user can select the number of preview lines available by changing the **Number of Preview Lines** spin box. The range in the example is set to 1-296 because the import is currently starting at row 2 (from **Start Import Line Number** spin box). -![Input CSV File Field](Images/Import_CSV_4.png) +![Input Text File Field](Images/Import_Text_4.png) The user can then set the data format for each column. Selecting one or more columns will enable the **Column Data Type** combo box, where you can choose a data type or decide to skip importing specific columns as well. -![Input CSV File Field](Images/Import_CSV_5.png) -![Input CSV File Field](Images/Import_CSV_6.png) +![Input Text File Field](Images/Import_Text_5.png) +![Input Text File Field](Images/Import_Text_6.png) If the **Input File Has Headers** checkbox is OFF, then it is also possible to double-click the headers in the Preview Table to edit them. These values will be used as the name of the **Data Array** in DREAM3D-NX. *NOTE:* Editing table headers is only available when the **Input File Has Headers** checkbox is OFF. If the **Input File Has Headers** checkbox is ON, then the headers will be read from the **Headers Line Number** in the data file, and the table headers will not be editable. -![Input CSV File Field](Images/Import_CSV_7.png) +![Input Text File Field](Images/Import_Text_7.png) The user can select the tuple dimensions that will be applied to the imported arrays. -![Input CSV File Field](Images/Import_CSV_8.png) +![Input Text File Field](Images/Import_Text_8.png) The imported arrays can be stored in either an existing attribute matrix or a new attribute matrix can be created. -![Input CSV File Field](Images/Import_CSV_9.png) +![Input Text File Field](Images/Import_Text_9.png) Afterwards, you end up with a data structure that looks like this: -![Input CSV File Field](Images/Import_CSV_10.png) +![Input Text File Field](Images/Import_Text_10.png) ## Parameters ## | Name | Type | Description | |----------------------------------------------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Importer Data Object | CSVImporterData | The object that holds all data relevant to importing the data, such as input file path, custom headers, start import line number, data types for all the imported arrays, headers line number, header mode, imported array tuple dimensions, delimiters, etc. | +| Importer Data Object | TextImporterData | The object that holds all data relevant to importing the data, such as input file path, custom headers, start import line number, data types for all the imported arrays, headers line number, header mode, imported array tuple dimensions, delimiters, etc. | | Use Existing Attribute Matrix | bool | Determines whether or not to store the imported data arrays in an existing attribute matrix | | Existing Attribute Matrix (Use Existing Attribute Matrix - ON) | DataPath | The data path to the existing attribute matrix where the imported arrays will be stored | | New Attribute Matrix (Use Existing Attribute Matrix - OFF) | DataPath | The data path to the newly created attribute matrix where the imported arrays will be stored | @@ -75,9 +75,9 @@ Not Applicable ## Created Objects ## -| Kind | Default Name | Type | Component Dimensions | Description | -|------|--------------|------|----------------------|-------------| -| One or more **Element/Feature/Ensemble/etc. Data Arrays** | None | Any | 1 | One or more arrays that are created due to importing CSV data | +| Kind | Default Name | Type | Component Dimensions | Description | +|------|--------------|------|----------------------|----------------------------------------------------------------| +| One or more **Element/Feature/Ensemble/etc. Data Arrays** | None | Any | 1 | One or more arrays that are created due to importing text data | ## Example Pipelines ## diff --git a/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline b/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline index 357ad1baae..e6a49c3731 100644 --- a/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline @@ -84,7 +84,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportCSVDataFilter", + "name": "complex::ImportTextDataFilter", "uuid": "373be1f8-31cf-49f6-aa5d-e356f4f3f261" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline b/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline index b4f329e3c9..1f27974a01 100644 --- a/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline @@ -41,7 +41,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportCSVDataFilter", + "name": "complex::ImportTextDataFilter", "uuid": "373be1f8-31cf-49f6-aa5d-e356f4f3f261" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp b/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp index ccb6b0bfca..d6b8422d6d 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp @@ -55,7 +55,7 @@ #include "ComplexCore/Filters/GenerateColorTableFilter.hpp" #include "ComplexCore/Filters/IdentifySample.hpp" #include "ComplexCore/Filters/ImportBinaryCTNorthstarFilter.hpp" -#include "ComplexCore/Filters/ImportCSVDataFilter.hpp" +#include "ComplexCore/Filters/ImportTextDataFilter.hpp" #include "ComplexCore/Filters/ImportDeformKeyFileV12Filter.hpp" #include "ComplexCore/Filters/ImportDREAM3DFilter.hpp" #include "ComplexCore/Filters/ImportHDF5Dataset.hpp" @@ -166,7 +166,7 @@ namespace complex {complex::Uuid::FromString("0d0a6535-6565-51c5-a3fc-fbc00008606d").value(), complex::FilterTraits::uuid}, // GenerateColorTable {complex::Uuid::FromString("0e8c0818-a3fb-57d4-a5c8-7cb8ae54a40a").value(), complex::FilterTraits::uuid}, // IdentifySample {complex::Uuid::FromString("f2259481-5011-5f22-9fcb-c92fb6f8be10").value(), complex::FilterTraits::uuid}, // ImportBinaryCTNorthstarFilter - {complex::Uuid::FromString("bdb978bc-96bf-5498-972c-b509c38b8d50").value(), complex::FilterTraits::uuid}, // ReadASCIIData + {complex::Uuid::FromString("bdb978bc-96bf-5498-972c-b509c38b8d50").value(), complex::FilterTraits::uuid}, // ReadASCIIData {complex::Uuid::FromString("043cbde5-3878-5718-958f-ae75714df0df").value(), complex::FilterTraits::uuid}, // DataContainerReader {complex::Uuid::FromString("9e98c3b0-5707-5a3b-b8b5-23ef83b02896").value(), complex::FilterTraits::uuid}, // ImportHDF5Dataset {complex::Uuid::FromString("a7007472-29e5-5d0a-89a6-1aed11b603f8").value(), complex::FilterTraits::uuid}, // ImportAsciDataArray diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.cpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.cpp similarity index 79% rename from src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.cpp rename to src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.cpp index 3d489817cc..e84ad37c0a 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.cpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.cpp @@ -1,4 +1,4 @@ -#include "ImportCSVDataFilter.hpp" +#include "ImportTextDataFilter.hpp" #include "ComplexCore/utils/CSVDataParser.hpp" @@ -14,7 +14,7 @@ #include "complex/Parameters/BoolParameter.hpp" #include "complex/Parameters/DataGroupCreationParameter.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportCSVDataParameter.hpp" +#include "complex/Parameters/ImportTextDataParameter.hpp" #include "complex/Utilities/FileUtilities.hpp" #include "complex/Utilities/FilterUtilities.hpp" #include "complex/Utilities/StringUtilities.hpp" @@ -32,7 +32,7 @@ namespace fs = std::filesystem; namespace { -struct ImportCSVDataFilterCache +struct ImportTextDataFilterCache { std::string FilePath; usize TotalLines = 0; @@ -41,7 +41,7 @@ struct ImportCSVDataFilterCache }; std::atomic_int32_t s_InstanceId = 0; -std::map s_HeaderCache; +std::map s_HeaderCache; enum class IssueCodes { @@ -289,7 +289,7 @@ std::string tupleDimsToString(const std::vector& tupleDims) } //------------------------------------------------------------------------------ -IFilter::PreflightResult readHeaders(const std::string& inputFilePath, usize headersLineNum, ImportCSVDataFilterCache& headerCache, bool useTab, bool useSemicolon, bool useSpace, bool useComma, +IFilter::PreflightResult readHeaders(const std::string& inputFilePath, usize headersLineNum, ImportTextDataFilterCache& headerCache, bool useTab, bool useSemicolon, bool useSpace, bool useComma, bool useConsecutive) { std::fstream in(inputFilePath.c_str(), std::ios_base::in); @@ -314,56 +314,56 @@ IFilter::PreflightResult readHeaders(const std::string& inputFilePath, usize hea namespace complex { //------------------------------------------------------------------------------ -ImportCSVDataFilter::ImportCSVDataFilter() +ImportTextDataFilter::ImportTextDataFilter() : m_InstanceId(s_InstanceId.fetch_add(1)) { s_HeaderCache[m_InstanceId] = {}; } // ----------------------------------------------------------------------------- -ImportCSVDataFilter::~ImportCSVDataFilter() noexcept +ImportTextDataFilter::~ImportTextDataFilter() noexcept { s_HeaderCache.erase(m_InstanceId); } // ----------------------------------------------------------------------------- -std::string ImportCSVDataFilter::name() const +std::string ImportTextDataFilter::name() const { - return FilterTraits::name.str(); + return FilterTraits::name.str(); } //------------------------------------------------------------------------------ -std::string ImportCSVDataFilter::className() const +std::string ImportTextDataFilter::className() const { - return FilterTraits::className; + return FilterTraits::className; } //------------------------------------------------------------------------------ -Uuid ImportCSVDataFilter::uuid() const +Uuid ImportTextDataFilter::uuid() const { - return FilterTraits::uuid; + return FilterTraits::uuid; } //------------------------------------------------------------------------------ -std::string ImportCSVDataFilter::humanName() const +std::string ImportTextDataFilter::humanName() const { return "Import CSV Data"; } //------------------------------------------------------------------------------ -std::vector ImportCSVDataFilter::defaultTags() const +std::vector ImportTextDataFilter::defaultTags() const { return {className(), "IO", "Input", "Read", "Import", "ASCII", "ascii", "CSV", "csv", "Column"}; } //------------------------------------------------------------------------------ -Parameters ImportCSVDataFilter::parameters() const +Parameters ImportTextDataFilter::parameters() const { Parameters params; params.insertSeparator(Parameters::Separator{"Input Parameters"}); - params.insert(std::make_unique(k_CSVImporterData_Key, "CSV Importer Data", "Holds all relevant csv file data collected from the custom interface", CSVImporterData())); + params.insert(std::make_unique(k_TextImporterData_Key, "CSV Importer Data", "Holds all relevant csv file data collected from the custom interface", TextImporterData())); DynamicTableInfo tableInfo; tableInfo.setColsInfo(DynamicTableInfo::DynamicVectorInfo(1, "Value {}")); @@ -384,22 +384,22 @@ Parameters ImportCSVDataFilter::parameters() const } //------------------------------------------------------------------------------ -IFilter::UniquePointer ImportCSVDataFilter::clone() const +IFilter::UniquePointer ImportTextDataFilter::clone() const { - return std::make_unique(); + return std::make_unique(); } //------------------------------------------------------------------------------ -IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& dataStructure, const Arguments& filterArgs, const MessageHandler& messageHandler, - const std::atomic_bool& shouldCancel) const +IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure& dataStructure, const Arguments& filterArgs, const MessageHandler& messageHandler, + const std::atomic_bool& shouldCancel) const { - CSVImporterData csvImporterData = filterArgs.value(k_CSVImporterData_Key); + TextImporterData textImporterData = filterArgs.value(k_TextImporterData_Key); bool useExistingAM = filterArgs.value(k_UseExistingGroup_Key); DataPath selectedAM = filterArgs.value(k_SelectedDataGroup_Key); DataPath createdDataAM = filterArgs.value(k_CreatedDataGroup_Key); - std::string inputFilePath = csvImporterData.inputFilePath; - CSVImporterData::HeaderMode headerMode = csvImporterData.headerMode; + std::string inputFilePath = textImporterData.inputFilePath; + TextImporterData::HeaderMode headerMode = textImporterData.headerMode; complex::Result resultOutputActions; @@ -416,8 +416,8 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& } StringVector headers; - std::vector delimiters = CreateDelimitersVector(csvImporterData.tabAsDelimiter, csvImporterData.semicolonAsDelimiter, csvImporterData.commaAsDelimiter, csvImporterData.spaceAsDelimiter); - if(csvImporterData.inputFilePath != s_HeaderCache[s_InstanceId].FilePath) + std::vector delimiters = CreateDelimitersVector(textImporterData.tabAsDelimiter, textImporterData.semicolonAsDelimiter, textImporterData.commaAsDelimiter, textImporterData.spaceAsDelimiter); + if(textImporterData.inputFilePath != s_HeaderCache[s_InstanceId].FilePath) { std::fstream in(inputFilePath.c_str(), std::ios_base::in); if(!in.is_open()) @@ -425,7 +425,7 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& return {MakeErrorResult(to_underlying(IssueCodes::FILE_NOT_OPEN), fmt::format("Could not open file for reading: {}", inputFilePath)), {}}; } - s_HeaderCache[s_InstanceId].FilePath = csvImporterData.inputFilePath; + s_HeaderCache[s_InstanceId].FilePath = textImporterData.inputFilePath; usize lineCount = 0; while(!in.eof()) @@ -434,69 +434,71 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& std::getline(in, line); lineCount++; - if(headerMode == CSVImporterData::HeaderMode::LINE && lineCount == csvImporterData.headersLine) + if(headerMode == TextImporterData::HeaderMode::LINE && lineCount == textImporterData.headersLine) { s_HeaderCache[s_InstanceId].Headers = line; - s_HeaderCache[s_InstanceId].HeadersLine = csvImporterData.headersLine; + s_HeaderCache[s_InstanceId].HeadersLine = textImporterData.headersLine; } } - headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, delimiters, csvImporterData.consecutiveDelimiters); + headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, delimiters, textImporterData.consecutiveDelimiters); s_HeaderCache[s_InstanceId].TotalLines = lineCount; } - else if(headerMode == CSVImporterData::HeaderMode::LINE) + else if(headerMode == TextImporterData::HeaderMode::LINE) { - if(csvImporterData.headersLine != s_HeaderCache[s_InstanceId].HeadersLine) + if(textImporterData.headersLine != s_HeaderCache[s_InstanceId].HeadersLine) { - IFilter::PreflightResult result = readHeaders(csvImporterData.inputFilePath, csvImporterData.headersLine, s_HeaderCache[s_InstanceId], csvImporterData.tabAsDelimiter, - csvImporterData.semicolonAsDelimiter, csvImporterData.spaceAsDelimiter, csvImporterData.commaAsDelimiter, csvImporterData.consecutiveDelimiters); + IFilter::PreflightResult result = + readHeaders(textImporterData.inputFilePath, textImporterData.headersLine, s_HeaderCache[s_InstanceId], textImporterData.tabAsDelimiter, textImporterData.semicolonAsDelimiter, + textImporterData.spaceAsDelimiter, textImporterData.commaAsDelimiter, textImporterData.consecutiveDelimiters); if(result.outputActions.invalid()) { return result; } } - headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, delimiters, csvImporterData.consecutiveDelimiters); + headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, delimiters, textImporterData.consecutiveDelimiters); } - if(headerMode == CSVImporterData::HeaderMode::CUSTOM) + if(headerMode == TextImporterData::HeaderMode::CUSTOM) { - headers = csvImporterData.customHeaders; + headers = textImporterData.customHeaders; } usize totalLines = s_HeaderCache[s_InstanceId].TotalLines; // Check that we have a valid start import row - if(csvImporterData.startImportRow == 0) + if(textImporterData.startImportRow == 0) { std::string errMsg = "'Start import at row' value is out of range. The 'Start import at row' value cannot be set to line #0."; return {MakeErrorResult(to_underlying(IssueCodes::START_IMPORT_ROW_OUT_OF_RANGE), errMsg), {}}; } - if(csvImporterData.startImportRow > totalLines) + if(textImporterData.startImportRow > totalLines) { - std::string errMsg = fmt::format("'Start import at row' value ({}) is larger than the total number of lines in the file ({}).", csvImporterData.startImportRow, totalLines); + std::string errMsg = fmt::format("'Start import at row' value ({}) is larger than the total number of lines in the file ({}).", textImporterData.startImportRow, totalLines); return {MakeErrorResult(to_underlying(IssueCodes::START_IMPORT_ROW_OUT_OF_RANGE), errMsg), {}}; } // Check that we have a valid header line number - if(headerMode == CSVImporterData::HeaderMode::LINE && csvImporterData.headersLine == 0) + if(headerMode == TextImporterData::HeaderMode::LINE && textImporterData.headersLine == 0) { std::string errMsg = "The header line number is out of range. The header line number cannot be set to line #0."; return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; } - if(headerMode == CSVImporterData::HeaderMode::LINE && csvImporterData.headersLine > totalLines) + if(headerMode == TextImporterData::HeaderMode::LINE && textImporterData.headersLine > totalLines) { - std::string errMsg = fmt::format("The header line number is out of range. There are {} lines in the file and the header line number is set to line #{}.", totalLines, csvImporterData.headersLine); + std::string errMsg = + fmt::format("The header line number is out of range. There are {} lines in the file and the header line number is set to line #{}.", totalLines, textImporterData.headersLine); return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; } - if(headerMode == CSVImporterData::HeaderMode::LINE && csvImporterData.headersLine > csvImporterData.startImportRow) + if(headerMode == TextImporterData::HeaderMode::LINE && textImporterData.headersLine > textImporterData.startImportRow) { std::string errMsg = fmt::format( "The header line number is out of range. The start import row is set to line #{} and the header line number is set to line #{}. The header line number must be in the range 1-{}.", - csvImporterData.startImportRow, csvImporterData.headersLine, csvImporterData.startImportRow - 1); + textImporterData.startImportRow, textImporterData.headersLine, textImporterData.startImportRow - 1); return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; } @@ -508,20 +510,20 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_HEADERS), errMsg), {}}; } - if(csvImporterData.dataTypes.size() != headers.size()) + if(textImporterData.dataTypes.size() != headers.size()) { std::string errMsg = fmt::format("The number of data types ({}) does not match the number of imported array headers ({}). The number of data types must match the number of imported array headers.", - csvImporterData.dataTypes.size(), headers.size()); + textImporterData.dataTypes.size(), headers.size()); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_DATATYPE_COUNT), errMsg), {}}; } - if(csvImporterData.skippedArrayMask.size() != headers.size()) + if(textImporterData.skippedArrayMask.size() != headers.size()) { std::string errMsg = fmt::format( "The number of booleans in the skipped array mask ({}) does not match the number of imported array headers ({}). The number of booleans in the skipped array mask must match the number " "of imported array headers.", - csvImporterData.skippedArrayMask.size(), headers.size()); + textImporterData.skippedArrayMask.size(), headers.size()); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_MASK_COUNT), errMsg), {}}; } @@ -553,17 +555,17 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& } // Check that we have a valid tuple count - usize totalImportedLines = totalLines - csvImporterData.startImportRow + 1; - usize tupleTotal = std::accumulate(csvImporterData.tupleDims.begin(), csvImporterData.tupleDims.end(), static_cast(1), std::multiplies()); + usize totalImportedLines = totalLines - textImporterData.startImportRow + 1; + usize tupleTotal = std::accumulate(textImporterData.tupleDims.begin(), textImporterData.tupleDims.end(), static_cast(1), std::multiplies()); if(tupleTotal == 0) { - std::string tupleDimsStr = tupleDimsToString(csvImporterData.tupleDims); + std::string tupleDimsStr = tupleDimsToString(textImporterData.tupleDims); std::string errMsg = fmt::format("Error: The current tuple dimensions ({}) has 0 total tuples. At least 1 tuple is required.", tupleDimsStr, tupleTotal, totalImportedLines); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_TUPLES), errMsg), {}}; } else if(tupleTotal > totalImportedLines) { - std::string tupleDimsStr = tupleDimsToString(csvImporterData.tupleDims); + std::string tupleDimsStr = tupleDimsToString(textImporterData.tupleDims); std::string errMsg = fmt::format("Error: The current tuple dimensions ({}) has {} total tuples, but this is larger than the total number of available lines to import ({}).", tupleDimsStr, tupleTotal, totalImportedLines); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_TUPLES), errMsg), {}}; @@ -588,17 +590,17 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& return {std::move(result)}; } groupPath = createdDataAM; - resultOutputActions.value().appendAction(std::make_unique(createdDataAM, csvImporterData.tupleDims)); + resultOutputActions.value().appendAction(std::make_unique(createdDataAM, textImporterData.tupleDims)); } // Create the arrays - std::vector tupleDims(csvImporterData.tupleDims.size()); - std::transform(csvImporterData.tupleDims.begin(), csvImporterData.tupleDims.end(), tupleDims.begin(), [](float64 d) { return static_cast(d); }); + std::vector tupleDims(textImporterData.tupleDims.size()); + std::transform(textImporterData.tupleDims.begin(), textImporterData.tupleDims.end(), tupleDims.begin(), [](float64 d) { return static_cast(d); }); if(useExistingAM) { const AttributeMatrix& am = dataStructure.getDataRefAs(groupPath); tupleDims = am.getShape(); - std::string tupleDimsStr = tupleDimsToString(csvImporterData.tupleDims); + std::string tupleDimsStr = tupleDimsToString(textImporterData.tupleDims); std::string tupleDimsStr2 = tupleDimsToString(tupleDims); std::string msg = fmt::format("The Array Tuple Dimensions ({}) will be ignored and the Existing Attribute Matrix tuple dimensions ({}) will be used instead.", tupleDimsStr, tupleDimsStr2); resultOutputActions.warnings().push_back(Warning{to_underlying(IssueCodes::IGNORED_TUPLE_DIMS), msg}); @@ -606,13 +608,13 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& for(usize i = 0; i < headers.size(); i++) { - if(csvImporterData.skippedArrayMask[i]) + if(textImporterData.skippedArrayMask[i]) { // The user decided to skip importing this array continue; } - DataType dataType = csvImporterData.dataTypes[i]; + DataType dataType = textImporterData.dataTypes[i]; std::string name = headers[i]; DataPath arrayPath = groupPath; @@ -624,25 +626,25 @@ IFilter::PreflightResult ImportCSVDataFilter::preflightImpl(const DataStructure& } //------------------------------------------------------------------------------ -Result<> ImportCSVDataFilter::executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, - const std::atomic_bool& shouldCancel) const +Result<> ImportTextDataFilter::executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, + const std::atomic_bool& shouldCancel) const { - CSVImporterData csvImporterData = filterArgs.value(k_CSVImporterData_Key); + TextImporterData textImporterData = filterArgs.value(k_TextImporterData_Key); bool useExistingGroup = filterArgs.value(k_UseExistingGroup_Key); DataPath selectedDataGroup = filterArgs.value(k_SelectedDataGroup_Key); DataPath createdDataGroup = filterArgs.value(k_CreatedDataGroup_Key); - std::string inputFilePath = csvImporterData.inputFilePath; - std::vector delimiters = CreateDelimitersVector(csvImporterData.tabAsDelimiter, csvImporterData.semicolonAsDelimiter, csvImporterData.commaAsDelimiter, csvImporterData.spaceAsDelimiter); - StringVector headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, delimiters, csvImporterData.consecutiveDelimiters); - DataTypeVector dataTypes = csvImporterData.dataTypes; - std::vector skippedArrays = csvImporterData.skippedArrayMask; - bool consecutiveDelimiters = csvImporterData.consecutiveDelimiters; - usize startImportRow = csvImporterData.startImportRow; + std::string inputFilePath = textImporterData.inputFilePath; + std::vector delimiters = CreateDelimitersVector(textImporterData.tabAsDelimiter, textImporterData.semicolonAsDelimiter, textImporterData.commaAsDelimiter, textImporterData.spaceAsDelimiter); + StringVector headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, delimiters, textImporterData.consecutiveDelimiters); + DataTypeVector dataTypes = textImporterData.dataTypes; + std::vector skippedArrays = textImporterData.skippedArrayMask; + bool consecutiveDelimiters = textImporterData.consecutiveDelimiters; + usize startImportRow = textImporterData.startImportRow; - if(csvImporterData.headerMode == CSVImporterData::HeaderMode::CUSTOM) + if(textImporterData.headerMode == TextImporterData::HeaderMode::CUSTOM) { - headers = csvImporterData.customHeaders; + headers = textImporterData.customHeaders; } DataPath groupPath = createdDataGroup; @@ -670,7 +672,7 @@ Result<> ImportCSVDataFilter::executeImpl(DataStructure& dataStructure, const Ar } float32 threshold = 0.0f; - usize numTuples = std::accumulate(csvImporterData.tupleDims.cbegin(), csvImporterData.tupleDims.cend(), static_cast(1), std::multiplies<>()); + usize numTuples = std::accumulate(textImporterData.tupleDims.cbegin(), textImporterData.tupleDims.cend(), static_cast(1), std::multiplies<>()); if(useExistingGroup) { const AttributeMatrix& am = dataStructure.getDataRefAs(groupPath); diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.hpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.hpp similarity index 85% rename from src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.hpp rename to src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.hpp index 8127e24398..cc3a3654ba 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportCSVDataFilter.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.hpp @@ -11,7 +11,7 @@ class AbstractDataParser; namespace complex { /** - * @class ImportCSVDataFilter + * @class ImportTextDataFilter * @brief This filter reads CSV data from any text-based file and imports the data into complex-style arrays. * The user uses the parameter user interface to specify which file to import, how the data is formatted, what to call * each array, and what type each array should be. @@ -21,20 +21,20 @@ namespace complex * If multiple columns are in fact different components of the same array, then the columns may be imported as * separate arrays and then combined in the correct order using the Combine Attribute Arrays filter. */ -class COMPLEXCORE_EXPORT ImportCSVDataFilter : public IFilter +class COMPLEXCORE_EXPORT ImportTextDataFilter : public IFilter { public: - ImportCSVDataFilter(); - ~ImportCSVDataFilter() noexcept override; + ImportTextDataFilter(); + ~ImportTextDataFilter() noexcept override; - ImportCSVDataFilter(const ImportCSVDataFilter&) = delete; - ImportCSVDataFilter(ImportCSVDataFilter&&) noexcept = delete; + ImportTextDataFilter(const ImportTextDataFilter&) = delete; + ImportTextDataFilter(ImportTextDataFilter&&) noexcept = delete; - ImportCSVDataFilter& operator=(const ImportCSVDataFilter&) = delete; - ImportCSVDataFilter& operator=(ImportCSVDataFilter&&) noexcept = delete; + ImportTextDataFilter& operator=(const ImportTextDataFilter&) = delete; + ImportTextDataFilter& operator=(ImportTextDataFilter&&) noexcept = delete; // Parameter Keys - static inline constexpr StringLiteral k_CSVImporterData_Key = "csv_importer_data"; + static inline constexpr StringLiteral k_TextImporterData_Key = "text_importer_data"; static inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; static inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; static inline constexpr StringLiteral k_CreatedDataGroup_Key = "created_data_group"; @@ -109,4 +109,4 @@ class COMPLEXCORE_EXPORT ImportCSVDataFilter : public IFilter }; } // namespace complex -COMPLEX_DEF_FILTER_TRAITS(complex, ImportCSVDataFilter, "373be1f8-31cf-49f6-aa5d-e356f4f3f261"); +COMPLEX_DEF_FILTER_TRAITS(complex, ImportTextDataFilter, "373be1f8-31cf-49f6-aa5d-e356f4f3f261"); diff --git a/src/Plugins/ComplexCore/test/CMakeLists.txt b/src/Plugins/ComplexCore/test/CMakeLists.txt index 4693917832..d41da0b0fe 100644 --- a/src/Plugins/ComplexCore/test/CMakeLists.txt +++ b/src/Plugins/ComplexCore/test/CMakeLists.txt @@ -77,7 +77,7 @@ set(${PLUGIN_NAME}UnitTest_SRCS ImageContouringTest.cpp ImageGeomTest.cpp ImportBinaryCTNorthstarTest.cpp - ImportCSVDataTest.cpp + ImportTextDataTest.cpp # ImportDeformKeyFileV12Test.cpp ImportHDF5DatasetTest.cpp diff --git a/src/Plugins/ComplexCore/test/ImportCSVDataTest.cpp b/src/Plugins/ComplexCore/test/ImportTextDataTest.cpp similarity index 72% rename from src/Plugins/ComplexCore/test/ImportCSVDataTest.cpp rename to src/Plugins/ComplexCore/test/ImportTextDataTest.cpp index 0d07bbbdd5..23297695cd 100644 --- a/src/Plugins/ComplexCore/test/ImportCSVDataTest.cpp +++ b/src/Plugins/ComplexCore/test/ImportTextDataTest.cpp @@ -1,11 +1,11 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" #include "ComplexCore/Filters/CreateDataGroup.hpp" -#include "ComplexCore/Filters/ImportCSVDataFilter.hpp" +#include "ComplexCore/Filters/ImportTextDataFilter.hpp" #include "complex/Common/TypesUtility.hpp" #include "complex/DataStructure/DataArray.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportCSVDataParameter.hpp" +#include "complex/Parameters/ImportTextDataParameter.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include "complex/Utilities/DataArrayUtilities.hpp" #include "complex/Utilities/StringUtilities.hpp" @@ -19,7 +19,7 @@ using namespace complex; namespace { -const fs::path k_TestInput = fs::path(unit_test::k_BinaryDir.view()) / "ImportCSVDataTest" / "Input.txt"; +const fs::path k_TestInput = fs::path(unit_test::k_BinaryDir.view()) / "ImportTextDataTest" / "Input.txt"; constexpr int32 k_InvalidArgumentErrorCode = -100; constexpr int32 k_OverflowErrorCode = -101; constexpr int32 k_BlankLineErrorCode = -119; @@ -80,13 +80,13 @@ void CreateTestDataFile(const fs::path& inputFilePath, nonstd::span } // ----------------------------------------------------------------------------- -Arguments createArguments(const std::string& inputFilePath, usize startImportRow, CSVImporterData::HeaderMode headerMode, usize headersLine, const std::vector& customHeaders, +Arguments createArguments(const std::string& inputFilePath, usize startImportRow, TextImporterData::HeaderMode headerMode, usize headersLine, const std::vector& customHeaders, const std::vector& dataTypes, const std::vector& skippedArrayMask, const std::vector& tupleDims, nonstd::span values, const std::string& newGroupName) { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = inputFilePath; data.customHeaders = customHeaders; data.dataTypes = dataTypes; @@ -97,9 +97,9 @@ Arguments createArguments(const std::string& inputFilePath, usize startImportRow data.tupleDims = tupleDims; data.skippedArrayMask = skippedArrayMask; - args.insertOrAssign(ImportCSVDataFilter::k_CSVImporterData_Key, std::make_any(data)); - args.insertOrAssign(ImportCSVDataFilter::k_UseExistingGroup_Key, std::make_any(false)); - args.insertOrAssign(ImportCSVDataFilter::k_CreatedDataGroup_Key, std::make_any(DataPath({newGroupName}))); + args.insertOrAssign(ImportTextDataFilter::k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(ImportTextDataFilter::k_UseExistingGroup_Key, std::make_any(false)); + args.insertOrAssign(ImportTextDataFilter::k_CreatedDataGroup_Key, std::make_any(DataPath({newGroupName}))); return args; } @@ -116,9 +116,9 @@ void TestCase_TestPrimitives(nonstd::span values) std::string arrayName = "Array"; DataPath arrayPath = DataPath({newGroupName, arrayName}); - ImportCSVDataFilter filter; + ImportTextDataFilter filter; DataStructure dataStructure; - Arguments args = createArguments(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); + Arguments args = createArguments(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); // Create the test input data file CreateTestDataFile(k_TestInput, values, {arrayName}); @@ -158,9 +158,9 @@ void TestCase_TestPrimitives_Error(nonstd::span values, int32 expec std::string arrayName = "Array"; DataPath arrayPath = DataPath({newGroupName, arrayName}); - ImportCSVDataFilter filter; + ImportTextDataFilter filter; DataStructure dataStructure; - Arguments args = createArguments(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); + Arguments args = createArguments(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); // Create the test input data file fs::create_directories(k_TestInput.parent_path()); @@ -178,12 +178,12 @@ void TestCase_TestPrimitives_Error(nonstd::span values, int32 expec } // ----------------------------------------------------------------------------- -void TestCase_TestImporterData_Error(const std::string& inputFilePath, usize startImportRow, CSVImporterData::HeaderMode headerMode, usize headersLine, const std::vector& headers, +void TestCase_TestImporterData_Error(const std::string& inputFilePath, usize startImportRow, TextImporterData::HeaderMode headerMode, usize headersLine, const std::vector& headers, const std::vector& dataTypes, const std::vector& skippedArrayMask, const std::vector& tupleDims, nonstd::span values, int32 expectedErrorCode) { std::string newGroupName = "New Group"; - ImportCSVDataFilter filter; + ImportTextDataFilter filter; DataStructure dataStructure; Arguments args = createArguments(inputFilePath, startImportRow, headerMode, headersLine, headers, dataTypes, skippedArrayMask, tupleDims, values, newGroupName); @@ -194,7 +194,7 @@ void TestCase_TestImporterData_Error(const std::string& inputFilePath, usize sta REQUIRE(executeResult.result.errors()[0].code == expectedErrorCode); } -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 1): Valid filter execution") +TEST_CASE("ComplexCore::ImportTextDataFilter (Case 1): Valid filter execution") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); @@ -230,17 +230,17 @@ TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 1): Valid filter execution") TestCase_TestPrimitives(v); } -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 2): Valid filter execution - Skipped Array") +TEST_CASE("ComplexCore::ImportTextDataFilter (Case 2): Valid filter execution - Skipped Array") { std::string newGroupName = "New Group"; std::string arrayName = "Array"; DataPath arrayPath = DataPath({newGroupName, arrayName}); - ImportCSVDataFilter filter; + ImportTextDataFilter filter; DataStructure dataStructure; std::vector values = {"0"}; - Arguments args = createArguments(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {arrayName}, {DataType::int8}, {true}, {static_cast(values.size())}, values, newGroupName); + Arguments args = createArguments(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {arrayName}, {DataType::int8}, {true}, {static_cast(values.size())}, values, newGroupName); // Create the test input data file CreateTestDataFile(k_TestInput, values, {arrayName}); @@ -258,7 +258,7 @@ TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 2): Valid filter execution - S REQUIRE(array == nullptr); } -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 3): Invalid filter execution - Out of Bounds") +TEST_CASE("ComplexCore::ImportTextDataFilter (Case 3): Invalid filter execution - Out of Bounds") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); @@ -334,7 +334,7 @@ TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 3): Invalid filter execution - TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); } -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 4): Invalid filter execution - Invalid arguments") +TEST_CASE("ComplexCore::ImportTextDataFilter (Case 4): Invalid filter execution - Invalid arguments") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); @@ -376,7 +376,7 @@ TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 4): Invalid filter execution - TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); } -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 5): Invalid filter execution - Invalid CSVImporterData values") +TEST_CASE("ComplexCore::ImportTextDataFilter (Case 5): Invalid filter execution - Invalid TextImporterData values") { std::vector v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; fs::create_directories(k_TestInput.parent_path()); @@ -384,51 +384,51 @@ TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 5): Invalid filter execution - std::vector tupleDims = {static_cast(v.size())}; // Empty input file path - TestCase_TestImporterData_Error("", 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_EmptyFile); + TestCase_TestImporterData_Error("", 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_EmptyFile); // Input file does not exist fs::path tmp_file = fs::temp_directory_path() / "ThisFileDoesNotExist.txt"; - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_FileDoesNotExist); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_FileDoesNotExist); // Start Import Row Out-of-Range - TestCase_TestImporterData_Error(k_TestInput.string(), 0, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); - TestCase_TestImporterData_Error(k_TestInput.string(), 500, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 0, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 500, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); // Header Line Number Out-of-Range - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 0, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 600, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 3, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 0, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 600, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 3, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); // Empty array headers tmp_file = fs::temp_directory_path() / "BlankLines.txt"; v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; CreateTestDataFile(tmp_file, v, {"Array"}); - TestCase_TestImporterData_Error(tmp_file.string(), 4, CSVImporterData::HeaderMode::LINE, 3, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); + TestCase_TestImporterData_Error(tmp_file.string(), 4, TextImporterData::HeaderMode::LINE, 3, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); fs::remove(tmp_file); v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; // Incorrect Data Type Count - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, k_IncorrectDataTypeCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, k_IncorrectDataTypeCount); // Incorrect Skipped Array Mask Count - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); // Empty Header Names - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {""}, {DataType::int8}, {false}, tupleDims, v, k_EmptyNames); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {""}, {DataType::int8}, {false}, tupleDims, v, k_EmptyNames); // Duplicate Header Names tmp_file = fs::temp_directory_path() / "DuplicateHeaders.txt"; std::vector duplicateHeaders = {"Custom Array", "Custom Array"}; CreateTestDataFile(tmp_file, v, duplicateHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, duplicateHeaders, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, k_DuplicateNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, duplicateHeaders, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, k_DuplicateNames); fs::remove(tmp_file); // Illegal Header Names @@ -436,42 +436,42 @@ TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 5): Invalid filter execution - std::vector illegalHeaders = {"Illegal/Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); illegalHeaders = {"Illegal\\Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); illegalHeaders = {"Illegal&Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); illegalHeaders = {"Illegal:Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); fs::remove(tmp_file); // Incorrect Tuple Dimensions - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {0}, v, k_IncorrectTuples); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {30}, v, k_IncorrectTuples); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {30, 2}, v, k_IncorrectTuples); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {30, 5, 7}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {0}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {30}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {30, 2}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::int8}, {false}, {30, 5, 7}, v, k_IncorrectTuples); // Invalid Data Type - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::LINE, 1, {}, {DataType::boolean}, {false}, tupleDims, v, k_InvalidArrayType); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::boolean}, {false}, tupleDims, v, k_InvalidArrayType); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {}, {DataType::boolean}, {false}, tupleDims, v, k_InvalidArrayType); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array"}, {DataType::boolean}, {false}, tupleDims, v, k_InvalidArrayType); // Inconsistent Columns - TestCase_TestImporterData_Error(k_TestInput.string(), 2, CSVImporterData::HeaderMode::CUSTOM, 1, {"Custom Array", "Custom Array2"}, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, + TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {"Custom Array", "Custom Array2"}, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, k_InconsistentCols); } -TEST_CASE("ComplexCore::ImportCSVDataFilter (Case 6): Invalid filter execution - Blank Lines") +TEST_CASE("ComplexCore::ImportTextDataFilter (Case 6): Invalid filter execution - Blank Lines") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); diff --git a/src/Plugins/ComplexCore/test/ImportTextTest.cpp b/src/Plugins/ComplexCore/test/ImportTextTest.cpp index ba165bde40..08d5e38c99 100644 --- a/src/Plugins/ComplexCore/test/ImportTextTest.cpp +++ b/src/Plugins/ComplexCore/test/ImportTextTest.cpp @@ -1,11 +1,11 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" #include "ComplexCore/Filters/ImportTextFilter.hpp" -#include "ComplexCore/Filters/ImportCSVDataFilter.hpp" +#include "ComplexCore/Filters/ImportTextDataFilter.hpp" #include "complex/Common/TypesUtility.hpp" #include "complex/DataStructure/DataArray.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportCSVDataParameter.hpp" +#include "complex/Parameters/ImportTextDataParameter.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include "complex/Utilities/DataArrayUtilities.hpp" #include "complex/Utilities/StringUtilities.hpp" diff --git a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp index 9c973d51da..b43e15a886 100644 --- a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp +++ b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp @@ -68,8 +68,8 @@ #include #include #include -#include #include +#include #include #include #include @@ -428,27 +428,27 @@ PYBIND11_MODULE(complex, mod) arrayThresholdSet.def_property("thresholds", &ArrayThresholdSet::getArrayThresholds, &ArrayThresholdSet::setArrayThresholds); arrayThresholdSet.def("__repr__", [](const ArrayThresholdSet& self) { return "ArrayThresholdSet()"; }); - py::class_ csvImporterData(mod, "CSVImporterData"); - - py::enum_ csvHeaderMode(csvImporterData, "HeaderMode"); - csvHeaderMode.value("Line", CSVImporterData::HeaderMode::LINE); - csvHeaderMode.value("Custom", CSVImporterData::HeaderMode::CUSTOM); - - csvImporterData.def(py::init<>()); - csvImporterData.def_readwrite("input_file_path", &CSVImporterData::inputFilePath); - csvImporterData.def_readwrite("custom_headers", &CSVImporterData::customHeaders); - csvImporterData.def_readwrite("start_import_row", &CSVImporterData::startImportRow); - csvImporterData.def_readwrite("data_types", &CSVImporterData::dataTypes); - csvImporterData.def_readwrite("skipped_array_mask", &CSVImporterData::skippedArrayMask); - csvImporterData.def_readwrite("headers_line", &CSVImporterData::headersLine); - csvImporterData.def_readwrite("header_mode", &CSVImporterData::headerMode); - csvImporterData.def_readwrite("tuple_dims", &CSVImporterData::tupleDims); - csvImporterData.def_readwrite("tab_as_delimiter", &CSVImporterData::tabAsDelimiter); - csvImporterData.def_readwrite("semicolon_as_delimiter", &CSVImporterData::semicolonAsDelimiter); - csvImporterData.def_readwrite("comma_as_delimiter", &CSVImporterData::commaAsDelimiter); - csvImporterData.def_readwrite("space_as_delimiter", &CSVImporterData::spaceAsDelimiter); - csvImporterData.def_readwrite("consecutive_delimiters", &CSVImporterData::consecutiveDelimiters); - csvImporterData.def("__repr__", [](const CSVImporterData& self) { return "CSVImporterData()"; }); + py::class_ textImporterData(mod, "TextImporterData"); + + py::enum_ csvHeaderMode(textImporterData, "HeaderMode"); + csvHeaderMode.value("Line", TextImporterData::HeaderMode::LINE); + csvHeaderMode.value("Custom", TextImporterData::HeaderMode::CUSTOM); + + textImporterData.def(py::init<>()); + textImporterData.def_readwrite("input_file_path", &TextImporterData::inputFilePath); + textImporterData.def_readwrite("custom_headers", &TextImporterData::customHeaders); + textImporterData.def_readwrite("start_import_row", &TextImporterData::startImportRow); + textImporterData.def_readwrite("data_types", &TextImporterData::dataTypes); + textImporterData.def_readwrite("skipped_array_mask", &TextImporterData::skippedArrayMask); + textImporterData.def_readwrite("headers_line", &TextImporterData::headersLine); + textImporterData.def_readwrite("header_mode", &TextImporterData::headerMode); + textImporterData.def_readwrite("tuple_dims", &TextImporterData::tupleDims); + textImporterData.def_readwrite("tab_as_delimiter", &TextImporterData::tabAsDelimiter); + textImporterData.def_readwrite("semicolon_as_delimiter", &TextImporterData::semicolonAsDelimiter); + textImporterData.def_readwrite("comma_as_delimiter", &TextImporterData::commaAsDelimiter); + textImporterData.def_readwrite("space_as_delimiter", &TextImporterData::spaceAsDelimiter); + textImporterData.def_readwrite("consecutive_delimiters", &TextImporterData::consecutiveDelimiters); + textImporterData.def("__repr__", [](const TextImporterData& self) { return "TextImporterData()"; }); py::class_> abstractPlugin(mod, "AbstractPlugin"); py::class_> pythonPlugin(mod, "PythonPlugin"); @@ -715,7 +715,7 @@ PYBIND11_MODULE(complex, mod) auto generateColorTableParameter = COMPLEX_PY_BIND_PARAMETER(mod, GenerateColorTableParameter); auto generatedFileListParameter = COMPLEX_PY_BIND_PARAMETER(mod, GeneratedFileListParameter); auto geometrySelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, GeometrySelectionParameter); - auto importCSVDataParameter = COMPLEX_PY_BIND_PARAMETER(mod, ImportCSVDataParameter); + auto importTextDataParameter = COMPLEX_PY_BIND_PARAMETER(mod, ImportTextDataParameter); auto importHDF5DatasetParameter = COMPLEX_PY_BIND_PARAMETER(mod, ImportHDF5DatasetParameter); auto multiArraySelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, MultiArraySelectionParameter); auto multiPathSelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, MultiPathSelectionParameter); @@ -875,7 +875,7 @@ PYBIND11_MODULE(complex, mod) geometrySelectionParameter.def(py::init(), "name"_a, "human_name"_a, "help_text"_a, "default_value"_a, "allowed_types"_a); - BindParameterConstructor(importCSVDataParameter); + BindParameterConstructor(importTextDataParameter); BindParameterConstructor(importHDF5DatasetParameter); @@ -1056,7 +1056,7 @@ PYBIND11_MODULE(complex, mod) internals->addConversion(); internals->addConversion(); internals->addConversion(); - internals->addConversion(); + internals->addConversion(); internals->addConversion(); internals->addConversion(); internals->addConversion(); diff --git a/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp b/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp index 9333e64e31..7864d7389b 100644 --- a/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp @@ -11,7 +11,7 @@ #include "complex/Parameters/GeometrySelectionParameter.hpp" #include "complex/Parameters/NumberParameter.hpp" #include "complex/Parameters/VectorParameter.hpp" -#include "complex/Parameters/util/CSVImporterData.hpp" +#include "complex/Parameters/util/TextImporterData.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include @@ -24,7 +24,7 @@ namespace { inline constexpr StringLiteral k_FaceEnsembleDataPath("FaceEnsembleData [NX]"); -inline constexpr StringLiteral k_CSVImporterData_Key = "csv_importer_data"; +inline constexpr StringLiteral k_TextImporterData_Key = "csv_importer_data"; inline constexpr StringLiteral k_TupleDims_Key = "tuple_dimensions"; inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; @@ -94,12 +94,12 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_3_1.dat", unit_test::k_TestFilesDir); data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -108,7 +108,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.spaceAsDelimiter = true; data.tupleDims = {3751}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -119,7 +119,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -128,7 +128,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.spaceAsDelimiter = true; data.tupleDims = {3751}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); @@ -180,12 +180,12 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_9_1.dat", unit_test::k_TestFilesDir); data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -194,7 +194,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.spaceAsDelimiter = true; data.tupleDims = {3751}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -205,7 +205,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -214,7 +214,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.spaceAsDelimiter = true; data.tupleDims = {3751}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); @@ -266,12 +266,12 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_11_1.dat", unit_test::k_TestFilesDir); data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -280,7 +280,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.spaceAsDelimiter = true; data.tupleDims = {3751}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -291,7 +291,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -300,7 +300,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.spaceAsDelimiter = true; data.tupleDims = {3751}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); diff --git a/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp b/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp index f4dcacde5c..f8616de8c9 100644 --- a/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp @@ -6,7 +6,7 @@ #include "complex/Parameters/DynamicTableParameter.hpp" #include "complex/Parameters/FileSystemPathParameter.hpp" -#include "complex/Parameters/util/CSVImporterData.hpp" +#include "complex/Parameters/util/TextImporterData.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include @@ -17,7 +17,7 @@ using namespace complex::UnitTest; namespace { -inline constexpr StringLiteral k_CSVImporterData_Key = "csv_importer_data"; +inline constexpr StringLiteral k_TextImporterData_Key = "csv_importer_data"; inline constexpr StringLiteral k_TupleDims_Key = "tuple_dimensions"; inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; @@ -84,13 +84,13 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu } // Compare the Output triangles files - auto importDataFilter = filterList->createFilter(k_ImportCSVDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); REQUIRE(nullptr != importDataFilter); // read in exemplar { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/6_6_Small_IN100_GBCD_Triangles.ph", unit_test::k_TestFilesDir); data.customHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, @@ -100,7 +100,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu data.spaceAsDelimiter = true; data.tupleDims = {636474}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(exemplarResultsGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(DataPath{})); @@ -112,7 +112,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu // read in generated { Arguments args; - CSVImporterData data; + TextImporterData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, @@ -122,7 +122,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu data.spaceAsDelimiter = true; data.tupleDims = {636474}; - args.insertOrAssign(k_CSVImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(generatedResultsGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(generatedResultsGroupPath)); diff --git a/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp b/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp index d32c59e6bb..b2559e6bd6 100644 --- a/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp +++ b/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp @@ -60,8 +60,8 @@ const FilterHandle k_RemoveMinimumSizeFeaturesFilterHandle(k_RemoveMinimumSizeFe // Make sure we can instantiate the CalculateFeatureSizesFilter const Uuid k_CalculateFeatureSizesFilterId = *Uuid::FromString("c666ee17-ca58-4969-80d0-819986c72485"); const FilterHandle k_CalculateFeatureSizesFilterHandle(k_CalculateFeatureSizesFilterId, k_ComplexCorePluginId); -const Uuid k_ImportCSVDataFilterId = *Uuid::FromString("373be1f8-31cf-49f6-aa5d-e356f4f3f261"); -const FilterHandle k_ImportCSVDataFilterHandle(k_ImportCSVDataFilterId, k_ComplexCorePluginId); +const Uuid k_ImportTextDataFilterId = *Uuid::FromString("373be1f8-31cf-49f6-aa5d-e356f4f3f261"); +const FilterHandle k_ImportTextDataFilterHandle(k_ImportTextDataFilterId, k_ComplexCorePluginId); const Uuid k_OrientationAnalysisPluginId = *Uuid::FromString("c09cf01b-014e-5adb-84eb-ea76fc79eeb1"); // Make sure we can instantiate the Convert Orientations diff --git a/src/complex/Parameters/ImportCSVDataParameter.cpp b/src/complex/Parameters/ImportTextDataParameter.cpp similarity index 70% rename from src/complex/Parameters/ImportCSVDataParameter.cpp rename to src/complex/Parameters/ImportTextDataParameter.cpp index ace5a8e442..01b0512efd 100644 --- a/src/complex/Parameters/ImportCSVDataParameter.cpp +++ b/src/complex/Parameters/ImportTextDataParameter.cpp @@ -28,59 +28,59 @@ * * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -#include "ImportCSVDataParameter.hpp" +#include "ImportTextDataParameter.hpp" namespace complex { // ----------------------------------------------------------------------------- -ImportCSVDataParameter::ImportCSVDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue) +ImportTextDataParameter::ImportTextDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue) : ValueParameter(name, humanName, helpText) , m_DefaultValue(defaultValue) { } // ----------------------------------------------------------------------------- -Uuid ImportCSVDataParameter::uuid() const +Uuid ImportTextDataParameter::uuid() const { - return ParameterTraits::uuid; + return ParameterTraits::uuid; } // ----------------------------------------------------------------------------- -IParameter::AcceptedTypes ImportCSVDataParameter::acceptedTypes() const +IParameter::AcceptedTypes ImportTextDataParameter::acceptedTypes() const { return {typeid(ValueType)}; } // ----------------------------------------------------------------------------- -nlohmann::json ImportCSVDataParameter::toJson(const std::any& value) const +nlohmann::json ImportTextDataParameter::toJson(const std::any& value) const { - const auto& CSVImporterData = GetAnyRef(value); - nlohmann::json json = CSVImporterData.writeJson(); + const auto& TextImporterData = GetAnyRef(value); + nlohmann::json json = TextImporterData.writeJson(); return json; } // ----------------------------------------------------------------------------- -Result ImportCSVDataParameter::fromJson(const nlohmann::json& json) const +Result ImportTextDataParameter::fromJson(const nlohmann::json& json) const { - return {ConvertResultTo(CSVImporterData::ReadJson(json))}; + return {ConvertResultTo(TextImporterData::ReadJson(json))}; } // ----------------------------------------------------------------------------- -IParameter::UniquePointer ImportCSVDataParameter::clone() const +IParameter::UniquePointer ImportTextDataParameter::clone() const { - return std::make_unique(name(), humanName(), helpText(), m_DefaultValue); + return std::make_unique(name(), humanName(), helpText(), m_DefaultValue); } // ----------------------------------------------------------------------------- -std::any ImportCSVDataParameter::defaultValue() const +std::any ImportTextDataParameter::defaultValue() const { return m_DefaultValue; } // ----------------------------------------------------------------------------- -Result<> ImportCSVDataParameter::validate(const std::any& value) const +Result<> ImportTextDataParameter::validate(const std::any& value) const { - [[maybe_unused]] auto data = std::any_cast(value); + [[maybe_unused]] auto data = std::any_cast(value); return {}; } } // namespace complex diff --git a/src/complex/Parameters/ImportCSVDataParameter.hpp b/src/complex/Parameters/ImportTextDataParameter.hpp similarity index 79% rename from src/complex/Parameters/ImportCSVDataParameter.hpp rename to src/complex/Parameters/ImportTextDataParameter.hpp index b60cea6a23..92f239a951 100644 --- a/src/complex/Parameters/ImportCSVDataParameter.hpp +++ b/src/complex/Parameters/ImportTextDataParameter.hpp @@ -32,25 +32,25 @@ #include "complex/Filter/ParameterTraits.hpp" #include "complex/Filter/ValueParameter.hpp" -#include "complex/Parameters/util/CSVImporterData.hpp" +#include "complex/Parameters/util/TextImporterData.hpp" #include "complex/complex_export.hpp" namespace complex { -class COMPLEX_EXPORT ImportCSVDataParameter : public ValueParameter +class COMPLEX_EXPORT ImportTextDataParameter : public ValueParameter { public: - using ValueType = CSVImporterData; + using ValueType = TextImporterData; - ImportCSVDataParameter() = delete; - ImportCSVDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue); - ~ImportCSVDataParameter() override = default; + ImportTextDataParameter() = delete; + ImportTextDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue); + ~ImportTextDataParameter() override = default; - ImportCSVDataParameter(const ImportCSVDataParameter&) = delete; - ImportCSVDataParameter(ImportCSVDataParameter&&) noexcept = delete; + ImportTextDataParameter(const ImportTextDataParameter&) = delete; + ImportTextDataParameter(ImportTextDataParameter&&) noexcept = delete; - ImportCSVDataParameter& operator=(const ImportCSVDataParameter&) = delete; - ImportCSVDataParameter& operator=(ImportCSVDataParameter&&) noexcept = delete; + ImportTextDataParameter& operator=(const ImportTextDataParameter&) = delete; + ImportTextDataParameter& operator=(ImportTextDataParameter&&) noexcept = delete; /** * @brief Returns the parameter's uuid. @@ -103,4 +103,4 @@ class COMPLEX_EXPORT ImportCSVDataParameter : public ValueParameter }; } // namespace complex -COMPLEX_DEF_PARAMETER_TRAITS(complex::ImportCSVDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45"); +COMPLEX_DEF_PARAMETER_TRAITS(complex::ImportTextDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45"); diff --git a/src/complex/Parameters/util/CSVImporterData.cpp b/src/complex/Parameters/util/TextImporterData.cpp similarity index 56% rename from src/complex/Parameters/util/CSVImporterData.cpp rename to src/complex/Parameters/util/TextImporterData.cpp index 4ee28738b6..4e9599f6fd 100644 --- a/src/complex/Parameters/util/CSVImporterData.cpp +++ b/src/complex/Parameters/util/TextImporterData.cpp @@ -28,7 +28,7 @@ * * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -#include "CSVImporterData.hpp" +#include "TextImporterData.hpp" #include @@ -52,7 +52,7 @@ const std::string k_ConsecutiveDelimitersKey = "Consecutive Delimiters"; } // namespace // ----------------------------------------------------------------------------- -nlohmann::json CSVImporterData::writeJson() const +nlohmann::json TextImporterData::writeJson() const { nlohmann::json json; @@ -98,13 +98,13 @@ nlohmann::json CSVImporterData::writeJson() const } // ----------------------------------------------------------------------------- -Result CSVImporterData::ReadJson(const nlohmann::json& json) +Result TextImporterData::ReadJson(const nlohmann::json& json) { - CSVImporterData data; + TextImporterData data; if(!json.contains(k_CustomHeadersKey)) { - return MakeErrorResult(-100, fmt::format("CSVImporterData: Cannot find the Data Headers key \"{}\" in the CSVImporterData json object.", k_CustomHeadersKey)); + return MakeErrorResult(-100, fmt::format("TextImporterData: Cannot find the Data Headers key \"{}\" in the TextImporterData json object.", k_CustomHeadersKey)); } nlohmann::json dHeaders = json[k_CustomHeadersKey]; @@ -113,7 +113,7 @@ Result CSVImporterData::ReadJson(const nlohmann::json& json) auto header = dHeaders[i]; if(!header.is_string()) { - return MakeErrorResult(-101, fmt::format("CSVImporterData: Custom header at index {} is of type {} and is not a string.", std::to_string(i), header.type_name())); + return MakeErrorResult(-101, fmt::format("TextImporterData: Custom header at index {} is of type {} and is not a string.", std::to_string(i), header.type_name())); } data.customHeaders.push_back(header); @@ -121,7 +121,7 @@ Result CSVImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_DataTypesKey)) { - return MakeErrorResult(-102, fmt::format("CSVImporterData: Cannot find the Data Types key \"{}\" in the CSVImporterData json object.", k_DataTypesKey)); + return MakeErrorResult(-102, fmt::format("TextImporterData: Cannot find the Data Types key \"{}\" in the TextImporterData json object.", k_DataTypesKey)); } nlohmann::json dTypes = json[k_DataTypesKey]; @@ -130,7 +130,7 @@ Result CSVImporterData::ReadJson(const nlohmann::json& json) auto dType = dTypes[i]; if(!dType.is_number_integer()) { - return MakeErrorResult(-103, fmt::format("CSVImporterData: Data type at index {} is of type {} and is not an integer.", std::to_string(i), dType.type_name())); + return MakeErrorResult(-103, fmt::format("TextImporterData: Data type at index {} is of type {} and is not an integer.", std::to_string(i), dType.type_name())); } data.dataTypes.push_back(dType); @@ -138,7 +138,7 @@ Result CSVImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_TupleDimensionsKey)) { - return MakeErrorResult(-104, fmt::format("CSVImporterData: Cannot find the Tuple Dimensions key \"{}\" in the CSVImporterData json object.", k_TupleDimensionsKey)); + return MakeErrorResult(-104, fmt::format("TextImporterData: Cannot find the Tuple Dimensions key \"{}\" in the TextImporterData json object.", k_TupleDimensionsKey)); } nlohmann::json tDims = json[k_TupleDimensionsKey]; @@ -151,7 +151,7 @@ Result CSVImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_SkippedArrayMaskKey)) { - return MakeErrorResult(-105, fmt::format("CSVImporterData: Cannot find the Skipped Arrays key \"{}\" in the CSVImporterData json object.", k_DataTypesKey)); + return MakeErrorResult(-105, fmt::format("TextImporterData: Cannot find the Skipped Arrays key \"{}\" in the TextImporterData json object.", k_DataTypesKey)); } nlohmann::json dSkippedArrays = json[k_SkippedArrayMaskKey]; @@ -160,8 +160,8 @@ Result CSVImporterData::ReadJson(const nlohmann::json& json) auto skippedArrayVal = dSkippedArrays[i]; if(!skippedArrayVal.is_boolean()) { - return MakeErrorResult(-106, - fmt::format("CSVImporterData: Skipped array value at index {} is of type {} and is not a boolean.", std::to_string(i), skippedArrayVal.type_name())); + return MakeErrorResult(-106, + fmt::format("TextImporterData: Skipped array value at index {} is of type {} and is not a boolean.", std::to_string(i), skippedArrayVal.type_name())); } data.skippedArrayMask.push_back(skippedArrayVal); @@ -169,91 +169,93 @@ Result CSVImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_InputFilePathKey)) { - return MakeErrorResult(-107, fmt::format("CSVImporterData: Cannot find the 'Input File Path' key \"{}\" in the CSVImporterData json object.", k_InputFilePathKey)); + return MakeErrorResult(-107, fmt::format("TextImporterData: Cannot find the 'Input File Path' key \"{}\" in the TextImporterData json object.", k_InputFilePathKey)); } else if(!json[k_InputFilePathKey].is_string()) { - return MakeErrorResult(-108, fmt::format("CSVImporterData: 'Input File Path' value is of type {} and is not a string.", json[k_InputFilePathKey].type_name())); + return MakeErrorResult(-108, fmt::format("TextImporterData: 'Input File Path' value is of type {} and is not a string.", json[k_InputFilePathKey].type_name())); } data.inputFilePath = json[k_InputFilePathKey]; if(!json.contains(k_StartImportRowKey)) { - return MakeErrorResult(-109, fmt::format("CSVImporterData: Cannot find the 'Begin Index' key \"{}\" in the CSVImporterData json object.", k_StartImportRowKey)); + return MakeErrorResult(-109, fmt::format("TextImporterData: Cannot find the 'Begin Index' key \"{}\" in the TextImporterData json object.", k_StartImportRowKey)); } else if(!json[k_StartImportRowKey].is_number_integer()) { - return MakeErrorResult(-110, fmt::format("CSVImporterData: 'Begin Index' value is of type {} and is not an integer.", json[k_StartImportRowKey].type_name())); + return MakeErrorResult(-110, fmt::format("TextImporterData: 'Begin Index' value is of type {} and is not an integer.", json[k_StartImportRowKey].type_name())); } data.startImportRow = json[k_StartImportRowKey]; if(!json.contains(k_HeaderLineKey)) { - return MakeErrorResult(-113, fmt::format("CSVImporterData: Cannot find the 'Header Line' key \"{}\" in the CSVImporterData json object.", k_HeaderLineKey)); + return MakeErrorResult(-113, fmt::format("TextImporterData: Cannot find the 'Header Line' key \"{}\" in the TextImporterData json object.", k_HeaderLineKey)); } else if(!json[k_HeaderLineKey].is_number_integer()) { - return MakeErrorResult(-114, fmt::format("CSVImporterData: 'Header Line' value is of type {} and is not an integer.", json[k_HeaderLineKey].type_name())); + return MakeErrorResult(-114, fmt::format("TextImporterData: 'Header Line' value is of type {} and is not an integer.", json[k_HeaderLineKey].type_name())); } data.headersLine = json[k_HeaderLineKey]; if(!json.contains(k_HeaderModeKey)) { - return MakeErrorResult(-115, fmt::format("CSVImporterData: Cannot find the 'Header Mode' key \"{}\" in the CSVImporterData json object.", k_HeaderModeKey)); + return MakeErrorResult(-115, fmt::format("TextImporterData: Cannot find the 'Header Mode' key \"{}\" in the TextImporterData json object.", k_HeaderModeKey)); } else if(!json[k_HeaderModeKey].is_number_integer()) { - return MakeErrorResult(-116, fmt::format("CSVImporterData: 'Header Mode' value is of type {} and is not an integer.", json[k_HeaderModeKey].type_name())); + return MakeErrorResult(-116, fmt::format("TextImporterData: 'Header Mode' value is of type {} and is not an integer.", json[k_HeaderModeKey].type_name())); } data.headerMode = json[k_HeaderModeKey]; if(!json.contains(k_TabAsDelimiterKey)) { - return MakeErrorResult(-117, fmt::format("CSVImporterData: Cannot find the 'Tab As Delimiter' key \"{}\" in the CSVImporterData json object.", k_TabAsDelimiterKey)); + return MakeErrorResult(-117, fmt::format("TextImporterData: Cannot find the 'Tab As Delimiter' key \"{}\" in the TextImporterData json object.", k_TabAsDelimiterKey)); } else if(!json[k_TabAsDelimiterKey].is_boolean()) { - return MakeErrorResult(-118, fmt::format("CSVImporterData: 'Tab As Delimiter' value is of type {} and is not a boolean.", json[k_TabAsDelimiterKey].type_name())); + return MakeErrorResult(-118, fmt::format("TextImporterData: 'Tab As Delimiter' value is of type {} and is not a boolean.", json[k_TabAsDelimiterKey].type_name())); } data.tabAsDelimiter = json[k_TabAsDelimiterKey]; if(!json.contains(k_SemicolonAsDelimiterKey)) { - return MakeErrorResult(-119, fmt::format("CSVImporterData: Cannot find the 'Semicolon As Delimiter' key \"{}\" in the CSVImporterData json object.", k_SemicolonAsDelimiterKey)); + return MakeErrorResult(-119, + fmt::format("TextImporterData: Cannot find the 'Semicolon As Delimiter' key \"{}\" in the TextImporterData json object.", k_SemicolonAsDelimiterKey)); } else if(!json[k_SemicolonAsDelimiterKey].is_boolean()) { - return MakeErrorResult(-120, fmt::format("CSVImporterData: 'Semicolon As Delimiter' value is of type {} and is not a boolean.", json[k_SemicolonAsDelimiterKey].type_name())); + return MakeErrorResult(-120, fmt::format("TextImporterData: 'Semicolon As Delimiter' value is of type {} and is not a boolean.", json[k_SemicolonAsDelimiterKey].type_name())); } data.semicolonAsDelimiter = json[k_SemicolonAsDelimiterKey]; if(!json.contains(k_SpaceAsDelimiterKey)) { - return MakeErrorResult(-121, fmt::format("CSVImporterData: Cannot find the 'Space As Delimiter' key \"{}\" in the CSVImporterData json object.", k_SpaceAsDelimiterKey)); + return MakeErrorResult(-121, fmt::format("TextImporterData: Cannot find the 'Space As Delimiter' key \"{}\" in the TextImporterData json object.", k_SpaceAsDelimiterKey)); } else if(!json[k_SpaceAsDelimiterKey].is_boolean()) { - return MakeErrorResult(-122, fmt::format("CSVImporterData: 'Space As Delimiter' value is of type {} and is not a boolean.", json[k_SpaceAsDelimiterKey].type_name())); + return MakeErrorResult(-122, fmt::format("TextImporterData: 'Space As Delimiter' value is of type {} and is not a boolean.", json[k_SpaceAsDelimiterKey].type_name())); } data.spaceAsDelimiter = json[k_SpaceAsDelimiterKey]; if(!json.contains(k_CommaAsDelimiterKey)) { - return MakeErrorResult(-123, fmt::format("CSVImporterData: Cannot find the 'Comma As Delimiter' key \"{}\" in the CSVImporterData json object.", k_CommaAsDelimiterKey)); + return MakeErrorResult(-123, fmt::format("TextImporterData: Cannot find the 'Comma As Delimiter' key \"{}\" in the TextImporterData json object.", k_CommaAsDelimiterKey)); } else if(!json[k_CommaAsDelimiterKey].is_boolean()) { - return MakeErrorResult(-124, fmt::format("CSVImporterData: 'Comma As Delimiter' value is of type {} and is not a boolean.", json[k_CommaAsDelimiterKey].type_name())); + return MakeErrorResult(-124, fmt::format("TextImporterData: 'Comma As Delimiter' value is of type {} and is not a boolean.", json[k_CommaAsDelimiterKey].type_name())); } data.commaAsDelimiter = json[k_CommaAsDelimiterKey]; if(!json.contains(k_ConsecutiveDelimitersKey)) { - return MakeErrorResult(-125, fmt::format("CSVImporterData: Cannot find the 'Consecutive Delimiters' key \"{}\" in the CSVImporterData json object.", k_ConsecutiveDelimitersKey)); + return MakeErrorResult(-125, + fmt::format("TextImporterData: Cannot find the 'Consecutive Delimiters' key \"{}\" in the TextImporterData json object.", k_ConsecutiveDelimitersKey)); } else if(!json[k_ConsecutiveDelimitersKey].is_boolean()) { - return MakeErrorResult(-126, fmt::format("CSVImporterData: 'Consecutive Delimiters' value is of type {} and is not a boolean.", json[k_ConsecutiveDelimitersKey].type_name())); + return MakeErrorResult(-126, fmt::format("TextImporterData: 'Consecutive Delimiters' value is of type {} and is not a boolean.", json[k_ConsecutiveDelimitersKey].type_name())); } data.consecutiveDelimiters = json[k_ConsecutiveDelimitersKey]; diff --git a/src/complex/Parameters/util/CSVImporterData.hpp b/src/complex/Parameters/util/TextImporterData.hpp similarity index 95% rename from src/complex/Parameters/util/CSVImporterData.hpp rename to src/complex/Parameters/util/TextImporterData.hpp index 7a12832971..095b8ed4a5 100644 --- a/src/complex/Parameters/util/CSVImporterData.hpp +++ b/src/complex/Parameters/util/TextImporterData.hpp @@ -42,7 +42,7 @@ namespace complex { -struct COMPLEX_EXPORT CSVImporterData +struct COMPLEX_EXPORT TextImporterData { public: enum class HeaderMode @@ -53,7 +53,7 @@ struct COMPLEX_EXPORT CSVImporterData // Json Reader and Writer nlohmann::json writeJson() const; - static Result ReadJson(const nlohmann::json& json); + static Result ReadJson(const nlohmann::json& json); std::string inputFilePath; std::vector customHeaders; diff --git a/wrapping/python/docs/generate_sphinx_docs.cpp b/wrapping/python/docs/generate_sphinx_docs.cpp index ef09619678..f241953b89 100644 --- a/wrapping/python/docs/generate_sphinx_docs.cpp +++ b/wrapping/python/docs/generate_sphinx_docs.cpp @@ -55,7 +55,7 @@ void GenerateParameterList() ADD_PARAMETER_TRAIT(complex.DataGroupCreationParameter, "bff2d4ac-04a6-5251-b188-4f83f7865074") ADD_PARAMETER_TRAIT(complex.DataPathSelectionParameter, "cd12d081-fbf0-46c4-8f4a-15e2e06e98b8") ADD_PARAMETER_TRAIT(complex.CalculatorParameter, "ba2d4937-dbec-5536-8c5c-c0a406e80f77") - ADD_PARAMETER_TRAIT(complex.ImportCSVDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45") + ADD_PARAMETER_TRAIT(complex.ImportTextDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45") ADD_PARAMETER_TRAIT(complex.Int8Parameter, "cae73834-68f8-4235-b010-8bea87d8ff7a") ADD_PARAMETER_TRAIT(complex.UInt8Parameter, "6c3efeff-ce8f-47c0-83d1-262f2b2dd6cc") ADD_PARAMETER_TRAIT(complex.Int16Parameter, "44ae56e8-e6e7-4e4d-8128-dd3dc2c6696e") diff --git a/wrapping/python/docs/source/API.rst b/wrapping/python/docs/source/API.rst index 840a8bbaf0..6b788bd305 100644 --- a/wrapping/python/docs/source/API.rst +++ b/wrapping/python/docs/source/API.rst @@ -363,10 +363,10 @@ Parameters This parameter represents the DataPath_ to a valid :ref:`complex.Geometry() ` -.. _ImportCSVDataParameter: -.. py:class:: ImportCSVDataParameter +.. _ImportTextDataParameter: +.. py:class:: ImportTextDataParameter - This parameter is used for the :ref:`complex.ImportCSVDataFilter() ` and holds + This parameter is used for the :ref:`complex.ImportTextDataFilter() ` and holds the information to import a file formatted as table data where each column of data is a single array. @@ -374,13 +374,13 @@ Parameters + The file optionally can have a line of headers. The user can specify what line the headers are on + The import can start at a user specified line number but will continue to the end of the file. - The primary python object that will hold the information to pass to the filter is the CSVImporterData class described below. + The primary python object that will hold the information to pass to the filter is the TextImporterData class described below. - :ivar ValueType: CSVImporterData + :ivar ValueType: TextImporterData - .. py:class:: ImportCSVDataParameter.CSVImporterData + .. py:class:: ImportTextDataParameter.TextImporterData - The CSVImporterData class holds all the necessary information to import a CSV formatted file into DREAM3D-NX. There are + The TextImporterData class holds all the necessary information to import a CSV formatted file into DREAM3D-NX. There are a number of member variables that need to be set correctly before the filter will execute correctly. @@ -396,41 +396,41 @@ Parameters :ivar skipped_array_mask: List[bool]. Booleans, one per column, that indicate whether or not to skip importing each created :ref:`DataArray `. :ivar tuple_dims: List[int]. The tuple dimensions for the created :ref:`DataArrays `. :ivar headers_line: Int. The line number of the headers. - :ivar header_mode: 'cx.CSVImporterData.HeaderMode.'. Can be one of 'cx.CSVImporterData.HeaderMode.Line' or 'cx.CSVImporterData.HeaderMode.Custom'. + :ivar header_mode: 'cx.TextImporterData.HeaderMode.'. Can be one of 'cx.TextImporterData.HeaderMode.Line' or 'cx.TextImporterData.HeaderMode.Custom'. .. code:: python data_structure = cx.DataStructure() - csv_importer_data = cx.CSVImporterData() - csv_importer_data.input_file_path = "/tmp/test_csv_data.csv" - csv_importer_data.start_import_row = 2 + text_importer_data = cx.TextImporterData() + text_importer_data.input_file_path = "/tmp/test_csv_data.csv" + text_importer_data.start_import_row = 2 - csv_importer_data.comma_as_delimiter = True - csv_importer_data.semicolon_as_delimiter = False - csv_importer_data.space_as_delimiter = False - csv_importer_data.tab_as_delimiter = False - csv_importer_data.consecutive_delimiters = False + text_importer_data.comma_as_delimiter = True + text_importer_data.semicolon_as_delimiter = False + text_importer_data.space_as_delimiter = False + text_importer_data.tab_as_delimiter = False + text_importer_data.consecutive_delimiters = False - csv_importer_data.custom_headers = [] - csv_importer_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] - csv_importer_data.skipped_array_mask = [False,False,False,False,False,False,False ] - csv_importer_data.tuple_dims = [37989] + text_importer_data.custom_headers = [] + text_importer_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] + text_importer_data.skipped_array_mask = [False,False,False,False,False,False,False ] + text_importer_data.tuple_dims = [37989] - csv_importer_data.headers_line = 1 - csv_importer_data.header_mode = cx.CSVImporterData.HeaderMode.Line + text_importer_data.headers_line = 1 + text_importer_data.header_mode = cx.TextImporterData.HeaderMode.Line # This will store the imported arrays into a newly generated DataGroup - result = cx.ImportCSVDataFilter.execute(data_structure=data_structure, + result = cx.ImportTextDataFilter.execute(data_structure=data_structure, # This will store the imported arrays into a newly generated DataGroup created_data_group=cx.DataPath(["Imported Data"]), # We are not using this parameter but it still needs a value selected_data_group=cx.DataPath(), # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match use_existing_group=False, - # The CSVImporterData object with all member variables set. - csv_importer_data=csv_importer_data # The CSVImporterData object with all member variables set. + # The TextImporterData object with all member variables set. + text_importer_data=text_importer_data # The TextImporterData object with all member variables set. ) diff --git a/wrapping/python/examples/import_csv.py b/wrapping/python/examples/import_csv.py deleted file mode 100644 index 3ce603978b..0000000000 --- a/wrapping/python/examples/import_csv.py +++ /dev/null @@ -1,43 +0,0 @@ -import complex as cx -import itkimageprocessing as cxitk -import orientationanalysis as cxor - -import numpy as np - -# Create the DataStructure object -data_structure = cx.DataStructure() - -csv_importer_data = cx.CSVImporterData() -csv_importer_data.input_file_path = "wrapping/python/examples/test_csv_data.csv" -csv_importer_data.start_import_row = 2 - -csv_importer_data.comma_as_delimiter = True -csv_importer_data.semicolon_as_delimiter = False -csv_importer_data.space_as_delimiter = False -csv_importer_data.tab_as_delimiter = False -csv_importer_data.consecutive_delimiters = False - -csv_importer_data.custom_headers = [] -csv_importer_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] -csv_importer_data.skipped_array_mask = [False,False,False,False,False,False,False ] -csv_importer_data.tuple_dims = [37989] - -csv_importer_data.headers_line = 1 -csv_importer_data.header_mode = cx.CSVImporterData.HeaderMode.Line - -# This will store the imported arrays into a newly generated DataGroup -result = cx.ImportCSVDataFilter.execute(data_structure=data_structure, - # This will store the imported arrays into a newly generated DataGroup - created_data_group=cx.DataPath(["Imported Data"]), - # We are not using this parameter but it still needs a value - selected_data_group=cx.DataPath(), - # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match - use_existing_group=False, - # The CSVImporterData object with all member variables set. - csv_importer_data=csv_importer_data # The CSVImporterData object with all member variables set. - ) -if len(result.errors) != 0: - print('Errors: {}', result.errors) - print('Warnings: {}', result.warnings) -else: - print("No errors running the ImportCSVDataFilter filter") diff --git a/wrapping/python/examples/import_text.py b/wrapping/python/examples/import_text.py new file mode 100644 index 0000000000..a1c9fd4f0e --- /dev/null +++ b/wrapping/python/examples/import_text.py @@ -0,0 +1,43 @@ +import complex as cx +import itkimageprocessing as cxitk +import orientationanalysis as cxor + +import numpy as np + +# Create the DataStructure object +data_structure = cx.DataStructure() + +text_importer_data = cx.TextImporterData() +text_importer_data.input_file_path = "wrapping/python/examples/test_csv_data.csv" +text_importer_data.start_import_row = 2 + +text_importer_data.comma_as_delimiter = True +text_importer_data.semicolon_as_delimiter = False +text_importer_data.space_as_delimiter = False +text_importer_data.tab_as_delimiter = False +text_importer_data.consecutive_delimiters = False + +text_importer_data.custom_headers = [] +text_importer_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] +text_importer_data.skipped_array_mask = [False,False,False,False,False,False,False ] +text_importer_data.tuple_dims = [37989] + +text_importer_data.headers_line = 1 +text_importer_data.header_mode = cx.TextImporterData.HeaderMode.Line + +# This will store the imported arrays into a newly generated DataGroup +result = cx.ImportTextDataFilter.execute(data_structure=data_structure, + # This will store the imported arrays into a newly generated DataGroup + created_data_group=cx.DataPath(["Imported Data"]), + # We are not using this parameter but it still needs a value + selected_data_group=cx.DataPath(), + # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match + use_existing_group=False, + # The TextImporterData object with all member variables set. + text_importer_data=text_importer_data # The TextImporterData object with all member variables set. + ) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the ImportTextDataFilter filter")