diff --git a/CMakeLists.txt b/CMakeLists.txt index d3aba8c524..dbccf30416 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -259,7 +259,7 @@ set(CoreParameters MultiPathSelectionParameter NumberParameter NumericTypeParameter - ImportTextDataParameter + ReadCSVFileParameter ImportHDF5DatasetParameter StringParameter VectorParameter @@ -431,7 +431,7 @@ set(COMPLEX_HDRS ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateGridMontageAction.hpp ${COMPLEX_SOURCE_DIR}/Parameters/util/DynamicTableInfo.hpp - ${COMPLEX_SOURCE_DIR}/Parameters/util/TextImporterData.hpp + ${COMPLEX_SOURCE_DIR}/Parameters/util/ReadCSVData.hpp ${COMPLEX_SOURCE_DIR}/Pipeline/AbstractPipelineNode.hpp ${COMPLEX_SOURCE_DIR}/Pipeline/Pipeline.hpp @@ -630,7 +630,7 @@ set(COMPLEX_SRCS ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateAttributeMatrixAction.cpp ${COMPLEX_SOURCE_DIR}/Filter/Actions/CreateGridMontageAction.cpp - ${COMPLEX_SOURCE_DIR}/Parameters/util/TextImporterData.cpp + ${COMPLEX_SOURCE_DIR}/Parameters/util/ReadCSVData.cpp ${COMPLEX_SOURCE_DIR}/Parameters/util/DynamicTableInfo.cpp ${COMPLEX_SOURCE_DIR}/Pipeline/AbstractPipelineNode.cpp diff --git a/src/Plugins/ComplexCore/CMakeLists.txt b/src/Plugins/ComplexCore/CMakeLists.txt index 7a471047b2..3c0cd37958 100644 --- a/src/Plugins/ComplexCore/CMakeLists.txt +++ b/src/Plugins/ComplexCore/CMakeLists.txt @@ -77,7 +77,7 @@ set(FilterList ImageContouringFilter IdentifySample ImportBinaryCTNorthstarFilter - ImportTextDataFilter + ReadCSVFileFilter ImportDeformKeyFileV12Filter ImportDREAM3DFilter ImportHDF5Dataset diff --git a/src/Plugins/ComplexCore/docs/ImportTextDataFilter.md b/src/Plugins/ComplexCore/docs/ReadCSVFileFilter.md similarity index 95% rename from src/Plugins/ComplexCore/docs/ImportTextDataFilter.md rename to src/Plugins/ComplexCore/docs/ReadCSVFileFilter.md index ab582d5ea9..fc1914fbff 100644 --- a/src/Plugins/ComplexCore/docs/ImportTextDataFilter.md +++ b/src/Plugins/ComplexCore/docs/ReadCSVFileFilter.md @@ -1,4 +1,4 @@ -# Import Text Data +# Read CSV File ## Group (Subgroup) ## @@ -58,7 +58,7 @@ Afterwards, you end up with a data structure that looks like this: | Name | Type | Description | |----------------------------------------------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Importer Data Object | TextImporterData | The object that holds all data relevant to importing the data, such as input file path, custom headers, start import line number, data types for all the imported arrays, headers line number, header mode, imported array tuple dimensions, delimiters, etc. | +| Importer Data Object | ReadCSVData | The object that holds all data relevant to importing the data, such as input file path, custom headers, start import line number, data types for all the imported arrays, headers line number, header mode, imported array tuple dimensions, delimiters, etc. | | Use Existing Attribute Matrix | bool | Determines whether or not to store the imported data arrays in an existing attribute matrix | | Existing Attribute Matrix (Use Existing Attribute Matrix - ON) | DataPath | The data path to the existing attribute matrix where the imported arrays will be stored | | New Attribute Matrix (Use Existing Attribute Matrix - OFF) | DataPath | The data path to the newly created attribute matrix where the imported arrays will be stored | diff --git a/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline b/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline index e6a49c3731..c3e30aa95c 100644 --- a/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/Import_ASCII.d3dpipeline @@ -84,7 +84,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextDataFilter", + "name": "complex::ReadCSVFileFilter", "uuid": "373be1f8-31cf-49f6-aa5d-e356f4f3f261" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline b/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline index 1f27974a01..b4a13d4d94 100644 --- a/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline +++ b/src/Plugins/ComplexCore/pipelines/ResampleRectGridToImageGeom.d3dpipeline @@ -41,7 +41,7 @@ }, "comments": "", "filter": { - "name": "complex::ImportTextDataFilter", + "name": "complex::ReadCSVFileFilter", "uuid": "373be1f8-31cf-49f6-aa5d-e356f4f3f261" }, "isDisabled": false diff --git a/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp b/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp index d6b8422d6d..93c024bcd8 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/ComplexCoreLegacyUUIDMapping.hpp @@ -55,7 +55,7 @@ #include "ComplexCore/Filters/GenerateColorTableFilter.hpp" #include "ComplexCore/Filters/IdentifySample.hpp" #include "ComplexCore/Filters/ImportBinaryCTNorthstarFilter.hpp" -#include "ComplexCore/Filters/ImportTextDataFilter.hpp" +#include "ComplexCore/Filters/ReadCSVFileFilter.hpp" #include "ComplexCore/Filters/ImportDeformKeyFileV12Filter.hpp" #include "ComplexCore/Filters/ImportDREAM3DFilter.hpp" #include "ComplexCore/Filters/ImportHDF5Dataset.hpp" @@ -166,7 +166,7 @@ namespace complex {complex::Uuid::FromString("0d0a6535-6565-51c5-a3fc-fbc00008606d").value(), complex::FilterTraits::uuid}, // GenerateColorTable {complex::Uuid::FromString("0e8c0818-a3fb-57d4-a5c8-7cb8ae54a40a").value(), complex::FilterTraits::uuid}, // IdentifySample {complex::Uuid::FromString("f2259481-5011-5f22-9fcb-c92fb6f8be10").value(), complex::FilterTraits::uuid}, // ImportBinaryCTNorthstarFilter - {complex::Uuid::FromString("bdb978bc-96bf-5498-972c-b509c38b8d50").value(), complex::FilterTraits::uuid}, // ReadASCIIData + {complex::Uuid::FromString("bdb978bc-96bf-5498-972c-b509c38b8d50").value(), complex::FilterTraits::uuid}, // ReadASCIIData {complex::Uuid::FromString("043cbde5-3878-5718-958f-ae75714df0df").value(), complex::FilterTraits::uuid}, // DataContainerReader {complex::Uuid::FromString("9e98c3b0-5707-5a3b-b8b5-23ef83b02896").value(), complex::FilterTraits::uuid}, // ImportHDF5Dataset {complex::Uuid::FromString("a7007472-29e5-5d0a-89a6-1aed11b603f8").value(), complex::FilterTraits::uuid}, // ImportAsciDataArray diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.cpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.cpp similarity index 80% rename from src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.cpp rename to src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.cpp index 8d47771724..de3bb7eb86 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.cpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.cpp @@ -1,4 +1,4 @@ -#include "ImportTextDataFilter.hpp" +#include "ReadCSVFileFilter.hpp" #include "ComplexCore/utils/CSVDataParser.hpp" @@ -14,7 +14,7 @@ #include "complex/Parameters/BoolParameter.hpp" #include "complex/Parameters/DataGroupCreationParameter.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportTextDataParameter.hpp" +#include "complex/Parameters/ReadCSVFileParameter.hpp" #include "complex/Utilities/FileUtilities.hpp" #include "complex/Utilities/FilterUtilities.hpp" #include "complex/Utilities/StringUtilities.hpp" @@ -32,7 +32,7 @@ namespace fs = std::filesystem; namespace { -struct ImportTextDataFilterCache +struct ReadCSVFileFilterCache { std::string FilePath; usize TotalLines = 0; @@ -41,7 +41,7 @@ struct ImportTextDataFilterCache }; std::atomic_int32_t s_InstanceId = 0; -std::map s_HeaderCache; +std::map s_HeaderCache; enum class IssueCodes { @@ -288,7 +288,7 @@ std::string tupleDimsToString(const std::vector& tupleDims) } //------------------------------------------------------------------------------ -IFilter::PreflightResult readHeaders(const std::string& inputFilePath, usize headersLineNum, ImportTextDataFilterCache& headerCache) +IFilter::PreflightResult readHeaders(const std::string& inputFilePath, usize headersLineNum, ReadCSVFileFilterCache& headerCache) { std::fstream in(inputFilePath.c_str(), std::ios_base::in); if(!in.is_open()) @@ -312,56 +312,56 @@ IFilter::PreflightResult readHeaders(const std::string& inputFilePath, usize hea namespace complex { //------------------------------------------------------------------------------ -ImportTextDataFilter::ImportTextDataFilter() +ReadCSVFileFilter::ReadCSVFileFilter() : m_InstanceId(s_InstanceId.fetch_add(1)) { s_HeaderCache[m_InstanceId] = {}; } // ----------------------------------------------------------------------------- -ImportTextDataFilter::~ImportTextDataFilter() noexcept +ReadCSVFileFilter::~ReadCSVFileFilter() noexcept { s_HeaderCache.erase(m_InstanceId); } // ----------------------------------------------------------------------------- -std::string ImportTextDataFilter::name() const +std::string ReadCSVFileFilter::name() const { - return FilterTraits::name.str(); + return FilterTraits::name.str(); } //------------------------------------------------------------------------------ -std::string ImportTextDataFilter::className() const +std::string ReadCSVFileFilter::className() const { - return FilterTraits::className; + return FilterTraits::className; } //------------------------------------------------------------------------------ -Uuid ImportTextDataFilter::uuid() const +Uuid ReadCSVFileFilter::uuid() const { - return FilterTraits::uuid; + return FilterTraits::uuid; } //------------------------------------------------------------------------------ -std::string ImportTextDataFilter::humanName() const +std::string ReadCSVFileFilter::humanName() const { - return "Import CSV Data"; + return "Read CSV File"; } //------------------------------------------------------------------------------ -std::vector ImportTextDataFilter::defaultTags() const +std::vector ReadCSVFileFilter::defaultTags() const { - return {className(), "IO", "Input", "Read", "Import", "ASCII", "ascii", "CSV", "csv", "Column"}; + return {className(), "IO", "Input", "Read", "Import", "ASCII", "ascii", "CSV", "csv", "Column", "column", "delimited", "Delimited", "text", "Text"}; } //------------------------------------------------------------------------------ -Parameters ImportTextDataFilter::parameters() const +Parameters ReadCSVFileFilter::parameters() const { Parameters params; params.insertSeparator(Parameters::Separator{"Input Parameters"}); - params.insert(std::make_unique(k_TextImporterData_Key, "CSV Importer Data", "Holds all relevant csv file data collected from the custom interface", TextImporterData())); + params.insert(std::make_unique(k_ReadCSVData_Key, "CSV Importer Data", "Holds all relevant csv file data collected from the custom interface", ReadCSVData())); DynamicTableInfo tableInfo; tableInfo.setColsInfo(DynamicTableInfo::DynamicVectorInfo(1, "Value {}")); @@ -385,22 +385,22 @@ Parameters ImportTextDataFilter::parameters() const } //------------------------------------------------------------------------------ -IFilter::UniquePointer ImportTextDataFilter::clone() const +IFilter::UniquePointer ReadCSVFileFilter::clone() const { - return std::make_unique(); + return std::make_unique(); } //------------------------------------------------------------------------------ -IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure& dataStructure, const Arguments& filterArgs, const MessageHandler& messageHandler, - const std::atomic_bool& shouldCancel) const +IFilter::PreflightResult ReadCSVFileFilter::preflightImpl(const DataStructure& dataStructure, const Arguments& filterArgs, const MessageHandler& messageHandler, + const std::atomic_bool& shouldCancel) const { - TextImporterData textImporterData = filterArgs.value(k_TextImporterData_Key); + ReadCSVData readCSVData = filterArgs.value(k_ReadCSVData_Key); bool useExistingAM = filterArgs.value(k_UseExistingGroup_Key); DataPath selectedAM = filterArgs.value(k_SelectedDataGroup_Key); DataPath createdDataAM = filterArgs.value(k_CreatedDataGroup_Key); - std::string inputFilePath = textImporterData.inputFilePath; - TextImporterData::HeaderMode headerMode = textImporterData.headerMode; + std::string inputFilePath = readCSVData.inputFilePath; + ReadCSVData::HeaderMode headerMode = readCSVData.headerMode; complex::Result resultOutputActions; @@ -417,7 +417,7 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure } StringVector headers; - if(textImporterData.inputFilePath != s_HeaderCache[s_InstanceId].FilePath) + if(readCSVData.inputFilePath != s_HeaderCache[s_InstanceId].FilePath) { std::fstream in(inputFilePath.c_str(), std::ios_base::in); if(!in.is_open()) @@ -425,7 +425,7 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure return {MakeErrorResult(to_underlying(IssueCodes::FILE_NOT_OPEN), fmt::format("Could not open file for reading: {}", inputFilePath)), {}}; } - s_HeaderCache[s_InstanceId].FilePath = textImporterData.inputFilePath; + s_HeaderCache[s_InstanceId].FilePath = readCSVData.inputFilePath; usize lineCount = 0; while(!in.eof()) @@ -434,69 +434,68 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure std::getline(in, line); lineCount++; - if(headerMode == TextImporterData::HeaderMode::LINE && lineCount == textImporterData.headersLine) + if(headerMode == ReadCSVData::HeaderMode::LINE && lineCount == readCSVData.headersLine) { s_HeaderCache[s_InstanceId].Headers = line; - s_HeaderCache[s_InstanceId].HeadersLine = textImporterData.headersLine; + s_HeaderCache[s_InstanceId].HeadersLine = readCSVData.headersLine; } } - headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, textImporterData.delimiters, textImporterData.consecutiveDelimiters); + headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, readCSVData.delimiters, readCSVData.consecutiveDelimiters); s_HeaderCache[s_InstanceId].TotalLines = lineCount; } - else if(headerMode == TextImporterData::HeaderMode::LINE) + else if(headerMode == ReadCSVData::HeaderMode::LINE) { - if(textImporterData.headersLine != s_HeaderCache[s_InstanceId].HeadersLine) + if(readCSVData.headersLine != s_HeaderCache[s_InstanceId].HeadersLine) { - IFilter::PreflightResult result = readHeaders(textImporterData.inputFilePath, textImporterData.headersLine, s_HeaderCache[s_InstanceId]); + IFilter::PreflightResult result = readHeaders(readCSVData.inputFilePath, readCSVData.headersLine, s_HeaderCache[s_InstanceId]); if(result.outputActions.invalid()) { return result; } } - headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, textImporterData.delimiters, textImporterData.consecutiveDelimiters); + headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, readCSVData.delimiters, readCSVData.consecutiveDelimiters); } - if(headerMode == TextImporterData::HeaderMode::CUSTOM) + if(headerMode == ReadCSVData::HeaderMode::CUSTOM) { - headers = textImporterData.customHeaders; + headers = readCSVData.customHeaders; } usize totalLines = s_HeaderCache[s_InstanceId].TotalLines; // Check that we have a valid start import row - if(textImporterData.startImportRow == 0) + if(readCSVData.startImportRow == 0) { std::string errMsg = "'Start import at row' value is out of range. The 'Start import at row' value cannot be set to line #0."; return {MakeErrorResult(to_underlying(IssueCodes::START_IMPORT_ROW_OUT_OF_RANGE), errMsg), {}}; } - if(textImporterData.startImportRow > totalLines) + if(readCSVData.startImportRow > totalLines) { - std::string errMsg = fmt::format("'Start import at row' value ({}) is larger than the total number of lines in the file ({}).", textImporterData.startImportRow, totalLines); + std::string errMsg = fmt::format("'Start import at row' value ({}) is larger than the total number of lines in the file ({}).", readCSVData.startImportRow, totalLines); return {MakeErrorResult(to_underlying(IssueCodes::START_IMPORT_ROW_OUT_OF_RANGE), errMsg), {}}; } // Check that we have a valid header line number - if(headerMode == TextImporterData::HeaderMode::LINE && textImporterData.headersLine == 0) + if(headerMode == ReadCSVData::HeaderMode::LINE && readCSVData.headersLine == 0) { std::string errMsg = "The header line number is out of range. The header line number cannot be set to line #0."; return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; } - if(headerMode == TextImporterData::HeaderMode::LINE && textImporterData.headersLine > totalLines) + if(headerMode == ReadCSVData::HeaderMode::LINE && readCSVData.headersLine > totalLines) { - std::string errMsg = - fmt::format("The header line number is out of range. There are {} lines in the file and the header line number is set to line #{}.", totalLines, textImporterData.headersLine); + std::string errMsg = fmt::format("The header line number is out of range. There are {} lines in the file and the header line number is set to line #{}.", totalLines, readCSVData.headersLine); return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; } - if(headerMode == TextImporterData::HeaderMode::LINE && textImporterData.headersLine > textImporterData.startImportRow) + if(headerMode == ReadCSVData::HeaderMode::LINE && readCSVData.headersLine > readCSVData.startImportRow) { std::string errMsg = fmt::format( "The header line number is out of range. The start import row is set to line #{} and the header line number is set to line #{}. The header line number must be in the range 1-{}.", - textImporterData.startImportRow, textImporterData.headersLine, textImporterData.startImportRow - 1); + readCSVData.startImportRow, readCSVData.headersLine, readCSVData.startImportRow - 1); return {MakeErrorResult(to_underlying(IssueCodes::HEADER_LINE_OUT_OF_RANGE), errMsg), {}}; } @@ -508,20 +507,20 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure return {MakeErrorResult(to_underlying(IssueCodes::EMPTY_HEADERS), errMsg), {}}; } - if(textImporterData.dataTypes.size() != headers.size()) + if(readCSVData.dataTypes.size() != headers.size()) { std::string errMsg = fmt::format("The number of data types ({}) does not match the number of imported array headers ({}). The number of data types must match the number of imported array headers.", - textImporterData.dataTypes.size(), headers.size()); + readCSVData.dataTypes.size(), headers.size()); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_DATATYPE_COUNT), errMsg), {}}; } - if(textImporterData.skippedArrayMask.size() != headers.size()) + if(readCSVData.skippedArrayMask.size() != headers.size()) { std::string errMsg = fmt::format( "The number of booleans in the skipped array mask ({}) does not match the number of imported array headers ({}). The number of booleans in the skipped array mask must match the number " "of imported array headers.", - textImporterData.skippedArrayMask.size(), headers.size()); + readCSVData.skippedArrayMask.size(), headers.size()); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_MASK_COUNT), errMsg), {}}; } @@ -553,17 +552,17 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure } // Check that we have a valid tuple count - usize totalImportedLines = totalLines - textImporterData.startImportRow + 1; - usize tupleTotal = std::accumulate(textImporterData.tupleDims.begin(), textImporterData.tupleDims.end(), static_cast(1), std::multiplies()); + usize totalImportedLines = totalLines - readCSVData.startImportRow + 1; + usize tupleTotal = std::accumulate(readCSVData.tupleDims.begin(), readCSVData.tupleDims.end(), static_cast(1), std::multiplies()); if(tupleTotal == 0) { - std::string tupleDimsStr = tupleDimsToString(textImporterData.tupleDims); + std::string tupleDimsStr = tupleDimsToString(readCSVData.tupleDims); std::string errMsg = fmt::format("Error: The current tuple dimensions ({}) has 0 total tuples. At least 1 tuple is required.", tupleDimsStr, tupleTotal, totalImportedLines); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_TUPLES), errMsg), {}}; } else if(tupleTotal > totalImportedLines) { - std::string tupleDimsStr = tupleDimsToString(textImporterData.tupleDims); + std::string tupleDimsStr = tupleDimsToString(readCSVData.tupleDims); std::string errMsg = fmt::format("Error: The current tuple dimensions ({}) has {} total tuples, but this is larger than the total number of available lines to import ({}).", tupleDimsStr, tupleTotal, totalImportedLines); return {MakeErrorResult(to_underlying(IssueCodes::INCORRECT_TUPLES), errMsg), {}}; @@ -588,17 +587,17 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure return {std::move(result)}; } groupPath = createdDataAM; - resultOutputActions.value().appendAction(std::make_unique(createdDataAM, textImporterData.tupleDims)); + resultOutputActions.value().appendAction(std::make_unique(createdDataAM, readCSVData.tupleDims)); } // Create the arrays - std::vector tupleDims(textImporterData.tupleDims.size()); - std::transform(textImporterData.tupleDims.begin(), textImporterData.tupleDims.end(), tupleDims.begin(), [](float64 d) { return static_cast(d); }); + std::vector tupleDims(readCSVData.tupleDims.size()); + std::transform(readCSVData.tupleDims.begin(), readCSVData.tupleDims.end(), tupleDims.begin(), [](float64 d) { return static_cast(d); }); if(useExistingAM) { const AttributeMatrix& am = dataStructure.getDataRefAs(groupPath); tupleDims = am.getShape(); - std::string tupleDimsStr = tupleDimsToString(textImporterData.tupleDims); + std::string tupleDimsStr = tupleDimsToString(readCSVData.tupleDims); std::string tupleDimsStr2 = tupleDimsToString(tupleDims); std::string msg = fmt::format("The Array Tuple Dimensions ({}) will be ignored and the Existing Attribute Matrix tuple dimensions ({}) will be used instead.", tupleDimsStr, tupleDimsStr2); resultOutputActions.warnings().push_back(Warning{to_underlying(IssueCodes::IGNORED_TUPLE_DIMS), msg}); @@ -606,13 +605,13 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure for(usize i = 0; i < headers.size(); i++) { - if(textImporterData.skippedArrayMask[i]) + if(readCSVData.skippedArrayMask[i]) { // The user decided to skip importing this array continue; } - DataType dataType = textImporterData.dataTypes[i]; + DataType dataType = readCSVData.dataTypes[i]; std::string name = headers[i]; DataPath arrayPath = groupPath; @@ -624,24 +623,24 @@ IFilter::PreflightResult ImportTextDataFilter::preflightImpl(const DataStructure } //------------------------------------------------------------------------------ -Result<> ImportTextDataFilter::executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, - const std::atomic_bool& shouldCancel) const +Result<> ReadCSVFileFilter::executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler, + const std::atomic_bool& shouldCancel) const { - TextImporterData textImporterData = filterArgs.value(k_TextImporterData_Key); + ReadCSVData readCSVData = filterArgs.value(k_ReadCSVData_Key); bool useExistingGroup = filterArgs.value(k_UseExistingGroup_Key); DataPath selectedDataGroup = filterArgs.value(k_SelectedDataGroup_Key); DataPath createdDataGroup = filterArgs.value(k_CreatedDataGroup_Key); - std::string inputFilePath = textImporterData.inputFilePath; - StringVector headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, textImporterData.delimiters, textImporterData.consecutiveDelimiters); - DataTypeVector dataTypes = textImporterData.dataTypes; - std::vector skippedArrays = textImporterData.skippedArrayMask; - bool consecutiveDelimiters = textImporterData.consecutiveDelimiters; - usize startImportRow = textImporterData.startImportRow; + std::string inputFilePath = readCSVData.inputFilePath; + StringVector headers = StringUtilities::split(s_HeaderCache[s_InstanceId].Headers, readCSVData.delimiters, readCSVData.consecutiveDelimiters); + DataTypeVector dataTypes = readCSVData.dataTypes; + std::vector skippedArrays = readCSVData.skippedArrayMask; + bool consecutiveDelimiters = readCSVData.consecutiveDelimiters; + usize startImportRow = readCSVData.startImportRow; - if(textImporterData.headerMode == TextImporterData::HeaderMode::CUSTOM) + if(readCSVData.headerMode == ReadCSVData::HeaderMode::CUSTOM) { - headers = textImporterData.customHeaders; + headers = readCSVData.customHeaders; } DataPath groupPath = createdDataGroup; @@ -669,7 +668,7 @@ Result<> ImportTextDataFilter::executeImpl(DataStructure& dataStructure, const A } float32 threshold = 0.0f; - usize numTuples = std::accumulate(textImporterData.tupleDims.cbegin(), textImporterData.tupleDims.cend(), static_cast(1), std::multiplies<>()); + usize numTuples = std::accumulate(readCSVData.tupleDims.cbegin(), readCSVData.tupleDims.cend(), static_cast(1), std::multiplies<>()); if(useExistingGroup) { const AttributeMatrix& am = dataStructure.getDataRefAs(groupPath); @@ -683,7 +682,7 @@ Result<> ImportTextDataFilter::executeImpl(DataStructure& dataStructure, const A return {}; } - Result<> parsingResult = parseLine(in, parsersResult.value(), headers, textImporterData.delimiters, consecutiveDelimiters, lineNum, startImportRow); + Result<> parsingResult = parseLine(in, parsersResult.value(), headers, readCSVData.delimiters, consecutiveDelimiters, lineNum, startImportRow); if(parsingResult.invalid()) { return std::move(parsingResult); diff --git a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.hpp b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.hpp similarity index 85% rename from src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.hpp rename to src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.hpp index cc3a3654ba..74ea8d8459 100644 --- a/src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportTextDataFilter.hpp +++ b/src/Plugins/ComplexCore/src/ComplexCore/Filters/ReadCSVFileFilter.hpp @@ -11,7 +11,7 @@ class AbstractDataParser; namespace complex { /** - * @class ImportTextDataFilter + * @class ReadCSVFileFilter * @brief This filter reads CSV data from any text-based file and imports the data into complex-style arrays. * The user uses the parameter user interface to specify which file to import, how the data is formatted, what to call * each array, and what type each array should be. @@ -21,20 +21,20 @@ namespace complex * If multiple columns are in fact different components of the same array, then the columns may be imported as * separate arrays and then combined in the correct order using the Combine Attribute Arrays filter. */ -class COMPLEXCORE_EXPORT ImportTextDataFilter : public IFilter +class COMPLEXCORE_EXPORT ReadCSVFileFilter : public IFilter { public: - ImportTextDataFilter(); - ~ImportTextDataFilter() noexcept override; + ReadCSVFileFilter(); + ~ReadCSVFileFilter() noexcept override; - ImportTextDataFilter(const ImportTextDataFilter&) = delete; - ImportTextDataFilter(ImportTextDataFilter&&) noexcept = delete; + ReadCSVFileFilter(const ReadCSVFileFilter&) = delete; + ReadCSVFileFilter(ReadCSVFileFilter&&) noexcept = delete; - ImportTextDataFilter& operator=(const ImportTextDataFilter&) = delete; - ImportTextDataFilter& operator=(ImportTextDataFilter&&) noexcept = delete; + ReadCSVFileFilter& operator=(const ReadCSVFileFilter&) = delete; + ReadCSVFileFilter& operator=(ReadCSVFileFilter&&) noexcept = delete; // Parameter Keys - static inline constexpr StringLiteral k_TextImporterData_Key = "text_importer_data"; + static inline constexpr StringLiteral k_ReadCSVData_Key = "read_csv_data"; static inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; static inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; static inline constexpr StringLiteral k_CreatedDataGroup_Key = "created_data_group"; @@ -109,4 +109,4 @@ class COMPLEXCORE_EXPORT ImportTextDataFilter : public IFilter }; } // namespace complex -COMPLEX_DEF_FILTER_TRAITS(complex, ImportTextDataFilter, "373be1f8-31cf-49f6-aa5d-e356f4f3f261"); +COMPLEX_DEF_FILTER_TRAITS(complex, ReadCSVFileFilter, "373be1f8-31cf-49f6-aa5d-e356f4f3f261"); diff --git a/src/Plugins/ComplexCore/test/CMakeLists.txt b/src/Plugins/ComplexCore/test/CMakeLists.txt index d41da0b0fe..d58d222ed7 100644 --- a/src/Plugins/ComplexCore/test/CMakeLists.txt +++ b/src/Plugins/ComplexCore/test/CMakeLists.txt @@ -77,7 +77,7 @@ set(${PLUGIN_NAME}UnitTest_SRCS ImageContouringTest.cpp ImageGeomTest.cpp ImportBinaryCTNorthstarTest.cpp - ImportTextDataTest.cpp + ReadCSVFileTest.cpp # ImportDeformKeyFileV12Test.cpp ImportHDF5DatasetTest.cpp diff --git a/src/Plugins/ComplexCore/test/ImportTextTest.cpp b/src/Plugins/ComplexCore/test/ImportTextTest.cpp index 08d5e38c99..d2ed09ef2e 100644 --- a/src/Plugins/ComplexCore/test/ImportTextTest.cpp +++ b/src/Plugins/ComplexCore/test/ImportTextTest.cpp @@ -1,11 +1,11 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" #include "ComplexCore/Filters/ImportTextFilter.hpp" -#include "ComplexCore/Filters/ImportTextDataFilter.hpp" +#include "ComplexCore/Filters/ReadCSVFileFilter.hpp" #include "complex/Common/TypesUtility.hpp" #include "complex/DataStructure/DataArray.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportTextDataParameter.hpp" +#include "complex/Parameters/ReadCSVFileParameter.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include "complex/Utilities/DataArrayUtilities.hpp" #include "complex/Utilities/StringUtilities.hpp" diff --git a/src/Plugins/ComplexCore/test/ImportTextDataTest.cpp b/src/Plugins/ComplexCore/test/ReadCSVFileTest.cpp similarity index 73% rename from src/Plugins/ComplexCore/test/ImportTextDataTest.cpp rename to src/Plugins/ComplexCore/test/ReadCSVFileTest.cpp index 6b296fc57d..e2ee5022f2 100644 --- a/src/Plugins/ComplexCore/test/ImportTextDataTest.cpp +++ b/src/Plugins/ComplexCore/test/ReadCSVFileTest.cpp @@ -1,11 +1,11 @@ #include "ComplexCore/ComplexCore_test_dirs.hpp" #include "ComplexCore/Filters/CreateDataGroup.hpp" -#include "ComplexCore/Filters/ImportTextDataFilter.hpp" +#include "ComplexCore/Filters/ReadCSVFileFilter.hpp" #include "complex/Common/TypesUtility.hpp" #include "complex/DataStructure/DataArray.hpp" #include "complex/Parameters/DynamicTableParameter.hpp" -#include "complex/Parameters/ImportTextDataParameter.hpp" +#include "complex/Parameters/ReadCSVFileParameter.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include "complex/Utilities/DataArrayUtilities.hpp" #include "complex/Utilities/StringUtilities.hpp" @@ -19,7 +19,7 @@ using namespace complex; namespace { -const fs::path k_TestInput = fs::path(unit_test::k_BinaryDir.view()) / "ImportTextDataTest" / "Input.txt"; +const fs::path k_TestInput = fs::path(unit_test::k_BinaryDir.view()) / "ReadCSVFileTest" / "Input.txt"; constexpr int32 k_InvalidArgumentErrorCode = -100; constexpr int32 k_OverflowErrorCode = -101; constexpr int32 k_BlankLineErrorCode = -119; @@ -80,13 +80,13 @@ void CreateTestDataFile(const fs::path& inputFilePath, nonstd::span } // ----------------------------------------------------------------------------- -Arguments createArguments(const std::string& inputFilePath, usize startImportRow, TextImporterData::HeaderMode headerMode, usize headersLine, const std::vector& delimiters, +Arguments createArguments(const std::string& inputFilePath, usize startImportRow, ReadCSVData::HeaderMode headerMode, usize headersLine, const std::vector& delimiters, const std::vector& customHeaders, const std::vector& dataTypes, const std::vector& skippedArrayMask, const std::vector& tupleDims, nonstd::span values, const std::string& newGroupName) { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = inputFilePath; data.customHeaders = customHeaders; data.dataTypes = dataTypes; @@ -97,9 +97,9 @@ Arguments createArguments(const std::string& inputFilePath, usize startImportRow data.tupleDims = tupleDims; data.skippedArrayMask = skippedArrayMask; - args.insertOrAssign(ImportTextDataFilter::k_TextImporterData_Key, std::make_any(data)); - args.insertOrAssign(ImportTextDataFilter::k_UseExistingGroup_Key, std::make_any(false)); - args.insertOrAssign(ImportTextDataFilter::k_CreatedDataGroup_Key, std::make_any(DataPath({newGroupName}))); + args.insertOrAssign(ReadCSVFileFilter::k_ReadCSVData_Key, std::make_any(data)); + args.insertOrAssign(ReadCSVFileFilter::k_UseExistingGroup_Key, std::make_any(false)); + args.insertOrAssign(ReadCSVFileFilter::k_CreatedDataGroup_Key, std::make_any(DataPath({newGroupName}))); return args; } @@ -116,10 +116,10 @@ void TestCase_TestPrimitives(nonstd::span values) std::string arrayName = "Array"; DataPath arrayPath = DataPath({newGroupName, arrayName}); - ImportTextDataFilter filter; + ReadCSVFileFilter filter; DataStructure dataStructure; Arguments args = - createArguments(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); + createArguments(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); // Create the test input data file CreateTestDataFile(k_TestInput, values, {arrayName}); @@ -159,10 +159,10 @@ void TestCase_TestPrimitives_Error(nonstd::span values, int32 expec std::string arrayName = "Array"; DataPath arrayPath = DataPath({newGroupName, arrayName}); - ImportTextDataFilter filter; + ReadCSVFileFilter filter; DataStructure dataStructure; Arguments args = - createArguments(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); + createArguments(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {arrayName}, {GetDataType()}, {false}, {static_cast(values.size())}, values, newGroupName); // Create the test input data file fs::create_directories(k_TestInput.parent_path()); @@ -180,12 +180,12 @@ void TestCase_TestPrimitives_Error(nonstd::span values, int32 expec } // ----------------------------------------------------------------------------- -void TestCase_TestImporterData_Error(const std::string& inputFilePath, usize startImportRow, TextImporterData::HeaderMode headerMode, usize headersLine, const std::vector& delimiters, +void TestCase_TestImporterData_Error(const std::string& inputFilePath, usize startImportRow, ReadCSVData::HeaderMode headerMode, usize headersLine, const std::vector& delimiters, const std::vector& headers, const std::vector& dataTypes, const std::vector& skippedArrayMask, const std::vector& tupleDims, nonstd::span values, int32 expectedErrorCode) { std::string newGroupName = "New Group"; - ImportTextDataFilter filter; + ReadCSVFileFilter filter; DataStructure dataStructure; Arguments args = createArguments(inputFilePath, startImportRow, headerMode, headersLine, delimiters, headers, dataTypes, skippedArrayMask, tupleDims, values, newGroupName); @@ -196,7 +196,7 @@ void TestCase_TestImporterData_Error(const std::string& inputFilePath, usize sta REQUIRE(executeResult.result.errors()[0].code == expectedErrorCode); } -TEST_CASE("ComplexCore::ImportTextDataFilter (Case 1): Valid filter execution") +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 1): Valid filter execution") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); @@ -235,18 +235,17 @@ TEST_CASE("ComplexCore::ImportTextDataFilter (Case 1): Valid filter execution") TestCase_TestPrimitives(v); } -TEST_CASE("ComplexCore::ImportTextDataFilter (Case 2): Valid filter execution - Skipped Array") +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 2): Valid filter execution - Skipped Array") { std::string newGroupName = "New Group"; std::string arrayName = "Array"; DataPath arrayPath = DataPath({newGroupName, arrayName}); - ImportTextDataFilter filter; + ReadCSVFileFilter filter; DataStructure dataStructure; std::vector values = {"0"}; - Arguments args = - createArguments(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {arrayName}, {DataType::int8}, {true}, {static_cast(values.size())}, values, newGroupName); + Arguments args = createArguments(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {arrayName}, {DataType::int8}, {true}, {static_cast(values.size())}, values, newGroupName); // Create the test input data file CreateTestDataFile(k_TestInput, values, {arrayName}); @@ -264,7 +263,7 @@ TEST_CASE("ComplexCore::ImportTextDataFilter (Case 2): Valid filter execution - REQUIRE(array == nullptr); } -TEST_CASE("ComplexCore::ImportTextDataFilter (Case 3): Invalid filter execution - Out of Bounds") +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 3): Invalid filter execution - Out of Bounds") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); @@ -340,7 +339,7 @@ TEST_CASE("ComplexCore::ImportTextDataFilter (Case 3): Invalid filter execution TestCase_TestPrimitives_Error(v, k_OverflowErrorCode); } -TEST_CASE("ComplexCore::ImportTextDataFilter (Case 4): Invalid filter execution - Invalid arguments") +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 4): Invalid filter execution - Invalid arguments") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); @@ -382,7 +381,7 @@ TEST_CASE("ComplexCore::ImportTextDataFilter (Case 4): Invalid filter execution TestCase_TestPrimitives_Error(v, k_InvalidArgumentErrorCode); } -TEST_CASE("ComplexCore::ImportTextDataFilter (Case 5): Invalid filter execution - Invalid TextImporterData values") +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 5): Invalid filter execution - Invalid ReadCSVData values") { std::vector v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; fs::create_directories(k_TestInput.parent_path()); @@ -390,52 +389,51 @@ TEST_CASE("ComplexCore::ImportTextDataFilter (Case 5): Invalid filter execution std::vector tupleDims = {static_cast(v.size())}; // Empty input file path - TestCase_TestImporterData_Error("", 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_EmptyFile); + TestCase_TestImporterData_Error("", 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_EmptyFile); // Input file does not exist fs::path tmp_file = fs::temp_directory_path() / "ThisFileDoesNotExist.txt"; - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_FileDoesNotExist); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_FileDoesNotExist); // Start Import Row Out-of-Range - TestCase_TestImporterData_Error(k_TestInput.string(), 0, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); - TestCase_TestImporterData_Error(k_TestInput.string(), 500, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 0, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 500, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_StartImportRowOutOfRange); // Header Line Number Out-of-Range - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 0, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 600, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 3, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 0, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 600, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 3, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_HeaderLineOutOfRange); // Empty array headers tmp_file = fs::temp_directory_path() / "BlankLines.txt"; v = {std::to_string(std::numeric_limits::min()), "", std::to_string(std::numeric_limits::max())}; CreateTestDataFile(tmp_file, v, {"Array"}); - TestCase_TestImporterData_Error(tmp_file.string(), 4, TextImporterData::HeaderMode::LINE, 3, {','}, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); + TestCase_TestImporterData_Error(tmp_file.string(), 4, ReadCSVData::HeaderMode::LINE, 3, {','}, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {}, {DataType::int8}, {false}, {static_cast(v.size())}, v, k_EmptyHeaders); fs::remove(tmp_file); v = {std::to_string(std::numeric_limits::min()), std::to_string(std::numeric_limits::max())}; // Incorrect Data Type Count - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, k_IncorrectDataTypeCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {}, {false}, tupleDims, v, k_IncorrectDataTypeCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8, DataType::int32}, {false}, tupleDims, v, k_IncorrectDataTypeCount); // Incorrect Skipped Array Mask Count - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {}, tupleDims, v, k_IncorrectMaskCount); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false, false}, tupleDims, v, k_IncorrectMaskCount); // Empty Header Names - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {""}, {DataType::int8}, {false}, tupleDims, v, k_EmptyNames); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {""}, {DataType::int8}, {false}, tupleDims, v, k_EmptyNames); // Duplicate Header Names tmp_file = fs::temp_directory_path() / "DuplicateHeaders.txt"; std::vector duplicateHeaders = {"Custom Array", "Custom Array"}; CreateTestDataFile(tmp_file, v, duplicateHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, duplicateHeaders, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, - k_DuplicateNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, duplicateHeaders, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, k_DuplicateNames); fs::remove(tmp_file); // Illegal Header Names @@ -443,38 +441,38 @@ TEST_CASE("ComplexCore::ImportTextDataFilter (Case 5): Invalid filter execution std::vector illegalHeaders = {"Illegal/Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); illegalHeaders = {"Illegal\\Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); illegalHeaders = {"Illegal&Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); illegalHeaders = {"Illegal:Header"}; CreateTestDataFile(tmp_file, v, illegalHeaders); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); - TestCase_TestImporterData_Error(tmp_file.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::LINE, 1, {','}, {}, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); + TestCase_TestImporterData_Error(tmp_file.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, illegalHeaders, {DataType::int8}, {false}, tupleDims, v, k_IllegalNames); fs::remove(tmp_file); // Incorrect Tuple Dimensions - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {0}, v, k_IncorrectTuples); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30}, v, k_IncorrectTuples); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30, 2}, v, k_IncorrectTuples); - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30, 5, 7}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {0}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30, 2}, v, k_IncorrectTuples); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array"}, {DataType::int8}, {false}, {30, 5, 7}, v, k_IncorrectTuples); // Inconsistent Columns - TestCase_TestImporterData_Error(k_TestInput.string(), 2, TextImporterData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array", "Custom Array2"}, {DataType::int8, DataType::int8}, {false, false}, - tupleDims, v, k_InconsistentCols); + TestCase_TestImporterData_Error(k_TestInput.string(), 2, ReadCSVData::HeaderMode::CUSTOM, 1, {','}, {"Custom Array", "Custom Array2"}, {DataType::int8, DataType::int8}, {false, false}, tupleDims, v, + k_InconsistentCols); } -TEST_CASE("ComplexCore::ImportTextDataFilter (Case 6): Invalid filter execution - Blank Lines") +TEST_CASE("ComplexCore::ReadCSVFileFilter (Case 6): Invalid filter execution - Blank Lines") { // Create the parent directory path fs::create_directories(k_TestInput.parent_path()); diff --git a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp index 65c7dd8a16..e617f80e1a 100644 --- a/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp +++ b/src/Plugins/ComplexCore/wrapping/python/complexpy.cpp @@ -69,12 +69,12 @@ #include #include #include -#include #include #include #include #include #include +#include #include #include #include @@ -428,24 +428,24 @@ PYBIND11_MODULE(complex, mod) arrayThresholdSet.def_property("thresholds", &ArrayThresholdSet::getArrayThresholds, &ArrayThresholdSet::setArrayThresholds); arrayThresholdSet.def("__repr__", [](const ArrayThresholdSet& self) { return "ArrayThresholdSet()"; }); - py::class_ textImporterData(mod, "TextImporterData"); - - py::enum_ csvHeaderMode(textImporterData, "HeaderMode"); - csvHeaderMode.value("Line", TextImporterData::HeaderMode::LINE); - csvHeaderMode.value("Custom", TextImporterData::HeaderMode::CUSTOM); - - textImporterData.def(py::init<>()); - textImporterData.def_readwrite("input_file_path", &TextImporterData::inputFilePath); - textImporterData.def_readwrite("custom_headers", &TextImporterData::customHeaders); - textImporterData.def_readwrite("start_import_row", &TextImporterData::startImportRow); - textImporterData.def_readwrite("data_types", &TextImporterData::dataTypes); - textImporterData.def_readwrite("skipped_array_mask", &TextImporterData::skippedArrayMask); - textImporterData.def_readwrite("headers_line", &TextImporterData::headersLine); - textImporterData.def_readwrite("header_mode", &TextImporterData::headerMode); - textImporterData.def_readwrite("tuple_dims", &TextImporterData::tupleDims); - textImporterData.def_readwrite("delimiters", &TextImporterData::delimiters); - textImporterData.def_readwrite("consecutive_delimiters", &TextImporterData::consecutiveDelimiters); - textImporterData.def("__repr__", [](const TextImporterData& self) { return "TextImporterData()"; }); + py::class_ readCSVData(mod, "ReadCSVData"); + + py::enum_ csvHeaderMode(readCSVData, "HeaderMode"); + csvHeaderMode.value("Line", ReadCSVData::HeaderMode::LINE); + csvHeaderMode.value("Custom", ReadCSVData::HeaderMode::CUSTOM); + + readCSVData.def(py::init<>()); + readCSVData.def_readwrite("input_file_path", &ReadCSVData::inputFilePath); + readCSVData.def_readwrite("custom_headers", &ReadCSVData::customHeaders); + readCSVData.def_readwrite("start_import_row", &ReadCSVData::startImportRow); + readCSVData.def_readwrite("data_types", &ReadCSVData::dataTypes); + readCSVData.def_readwrite("skipped_array_mask", &ReadCSVData::skippedArrayMask); + readCSVData.def_readwrite("headers_line", &ReadCSVData::headersLine); + readCSVData.def_readwrite("header_mode", &ReadCSVData::headerMode); + readCSVData.def_readwrite("tuple_dims", &ReadCSVData::tupleDims); + readCSVData.def_readwrite("delimiters", &ReadCSVData::delimiters); + readCSVData.def_readwrite("consecutive_delimiters", &ReadCSVData::consecutiveDelimiters); + readCSVData.def("__repr__", [](const ReadCSVData& self) { return "ReadCSVData()"; }); py::class_> abstractPlugin(mod, "AbstractPlugin"); py::class_> pythonPlugin(mod, "PythonPlugin"); @@ -712,7 +712,7 @@ PYBIND11_MODULE(complex, mod) auto generateColorTableParameter = COMPLEX_PY_BIND_PARAMETER(mod, GenerateColorTableParameter); auto generatedFileListParameter = COMPLEX_PY_BIND_PARAMETER(mod, GeneratedFileListParameter); auto geometrySelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, GeometrySelectionParameter); - auto importTextDataParameter = COMPLEX_PY_BIND_PARAMETER(mod, ImportTextDataParameter); + auto importTextDataParameter = COMPLEX_PY_BIND_PARAMETER(mod, ReadCSVFileParameter); auto importHDF5DatasetParameter = COMPLEX_PY_BIND_PARAMETER(mod, ImportHDF5DatasetParameter); auto multiArraySelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, MultiArraySelectionParameter); auto multiPathSelectionParameter = COMPLEX_PY_BIND_PARAMETER(mod, MultiPathSelectionParameter); @@ -1053,7 +1053,7 @@ PYBIND11_MODULE(complex, mod) internals->addConversion(); internals->addConversion(); internals->addConversion(); - internals->addConversion(); + internals->addConversion(); internals->addConversion(); internals->addConversion(); internals->addConversion(); diff --git a/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp b/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp index e9b3603a99..26798af9b3 100644 --- a/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/ExportGBCDGMTFileTest.cpp @@ -11,7 +11,7 @@ #include "complex/Parameters/GeometrySelectionParameter.hpp" #include "complex/Parameters/NumberParameter.hpp" #include "complex/Parameters/VectorParameter.hpp" -#include "complex/Parameters/util/TextImporterData.hpp" +#include "complex/Parameters/util/ReadCSVData.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include @@ -24,7 +24,7 @@ namespace { inline constexpr StringLiteral k_FaceEnsembleDataPath("FaceEnsembleData [NX]"); -inline constexpr StringLiteral k_TextImporterData_Key = "text_importer_data"; +inline constexpr StringLiteral k_ReadCSVData_Key = "read_csv_data"; inline constexpr StringLiteral k_TupleDims_Key = "tuple_dimensions"; inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; @@ -94,12 +94,12 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_3_1.dat", unit_test::k_TestFilesDir); data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -108,7 +108,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.delimiters = {' '}; data.tupleDims = {3751}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -119,7 +119,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -128,7 +128,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.delimiters = {' '}; data.tupleDims = {3751}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); @@ -180,12 +180,12 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_9_1.dat", unit_test::k_TestFilesDir); data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -194,7 +194,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.delimiters = {' '}; data.tupleDims = {3751}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -205,7 +205,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -214,7 +214,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.delimiters = {' '}; data.tupleDims = {3751}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); @@ -266,12 +266,12 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] } // Compare the Output Pole Figure - auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/small_in100_sigma_11_1.dat", unit_test::k_TestFilesDir); data.customHeaders = {k_ExemplarGMT1, k_ExemplarGMT2, k_ExemplarGMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -280,7 +280,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.delimiters = {' '}; data.tupleDims = {3751}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(faceEnsemblePath)); @@ -291,7 +291,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_GMT1, k_GMT2, k_GMT3}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32}; @@ -300,7 +300,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDGMTFileFilter", "[OrientationAnalysis] data.delimiters = {' '}; data.tupleDims = {3751}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(true)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(gmtGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(gmtGroupPath)); diff --git a/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp b/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp index c286e652d8..a5c274507b 100644 --- a/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp +++ b/src/Plugins/OrientationAnalysis/test/ExportGBCDTriangleDataTest.cpp @@ -6,7 +6,7 @@ #include "complex/Parameters/DynamicTableParameter.hpp" #include "complex/Parameters/FileSystemPathParameter.hpp" -#include "complex/Parameters/util/TextImporterData.hpp" +#include "complex/Parameters/util/ReadCSVData.hpp" #include "complex/UnitTest/UnitTestCommon.hpp" #include @@ -17,7 +17,7 @@ using namespace complex::UnitTest; namespace { -inline constexpr StringLiteral k_TextImporterData_Key = "text_importer_data"; +inline constexpr StringLiteral k_ReadCSVData_Key = "read_csv_data"; inline constexpr StringLiteral k_TupleDims_Key = "tuple_dimensions"; inline constexpr StringLiteral k_UseExistingGroup_Key = "use_existing_group"; inline constexpr StringLiteral k_SelectedDataGroup_Key = "selected_data_group"; @@ -84,13 +84,13 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu } // Compare the Output triangles files - auto importDataFilter = filterList->createFilter(k_ImportTextDataFilterHandle); + auto importDataFilter = filterList->createFilter(k_ReadCSVFileFilterHandle); REQUIRE(nullptr != importDataFilter); // read in exemplar { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = fmt::format("{}/6_6_Small_IN100_GBCD/6_6_Small_IN100_GBCD_Triangles.ph", unit_test::k_TestFilesDir); data.customHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, @@ -100,7 +100,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu data.delimiters = {' '}; data.tupleDims = {636474}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(exemplarResultsGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(DataPath{})); @@ -112,7 +112,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu // read in generated { Arguments args; - TextImporterData data; + ReadCSVData data; data.inputFilePath = outputFile.string(); data.customHeaders = {k_Phi1Right, k_PhiRight, k_Phi2Right, k_Phi1Left, k_PhiLeft, k_Phi2Left, k_TriangleNormal0, k_TriangleNormal1, k_TriangleNormal2, k_SurfaceArea}; data.dataTypes = {DataType::float32, DataType::float32, DataType::float32, DataType::float32, DataType::float32, @@ -122,7 +122,7 @@ TEST_CASE("OrientationAnalysis::ExportGBCDTriangleDataFilter: Valid filter execu data.delimiters = {' '}; data.tupleDims = {636474}; - args.insertOrAssign(k_TextImporterData_Key, std::make_any(data)); + args.insertOrAssign(k_ReadCSVData_Key, std::make_any(data)); args.insertOrAssign(k_UseExistingGroup_Key, std::make_any(false)); args.insertOrAssign(k_CreatedDataGroup_Key, std::make_any(generatedResultsGroupPath)); args.insertOrAssign(k_SelectedDataGroup_Key, std::make_any(generatedResultsGroupPath)); diff --git a/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp b/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp index b2559e6bd6..ee63a440cd 100644 --- a/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp +++ b/src/Plugins/OrientationAnalysis/test/OrientationAnalysisTestUtils.hpp @@ -60,8 +60,8 @@ const FilterHandle k_RemoveMinimumSizeFeaturesFilterHandle(k_RemoveMinimumSizeFe // Make sure we can instantiate the CalculateFeatureSizesFilter const Uuid k_CalculateFeatureSizesFilterId = *Uuid::FromString("c666ee17-ca58-4969-80d0-819986c72485"); const FilterHandle k_CalculateFeatureSizesFilterHandle(k_CalculateFeatureSizesFilterId, k_ComplexCorePluginId); -const Uuid k_ImportTextDataFilterId = *Uuid::FromString("373be1f8-31cf-49f6-aa5d-e356f4f3f261"); -const FilterHandle k_ImportTextDataFilterHandle(k_ImportTextDataFilterId, k_ComplexCorePluginId); +const Uuid k_ReadCSVFileFilterId = *Uuid::FromString("373be1f8-31cf-49f6-aa5d-e356f4f3f261"); +const FilterHandle k_ReadCSVFileFilterHandle(k_ReadCSVFileFilterId, k_ComplexCorePluginId); const Uuid k_OrientationAnalysisPluginId = *Uuid::FromString("c09cf01b-014e-5adb-84eb-ea76fc79eeb1"); // Make sure we can instantiate the Convert Orientations diff --git a/src/complex/Parameters/ImportTextDataParameter.cpp b/src/complex/Parameters/ReadCSVFileParameter.cpp similarity index 70% rename from src/complex/Parameters/ImportTextDataParameter.cpp rename to src/complex/Parameters/ReadCSVFileParameter.cpp index 01b0512efd..2373226b7e 100644 --- a/src/complex/Parameters/ImportTextDataParameter.cpp +++ b/src/complex/Parameters/ReadCSVFileParameter.cpp @@ -28,59 +28,59 @@ * * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -#include "ImportTextDataParameter.hpp" +#include "ReadCSVFileParameter.hpp" namespace complex { // ----------------------------------------------------------------------------- -ImportTextDataParameter::ImportTextDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue) +ReadCSVFileParameter::ReadCSVFileParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue) : ValueParameter(name, humanName, helpText) , m_DefaultValue(defaultValue) { } // ----------------------------------------------------------------------------- -Uuid ImportTextDataParameter::uuid() const +Uuid ReadCSVFileParameter::uuid() const { - return ParameterTraits::uuid; + return ParameterTraits::uuid; } // ----------------------------------------------------------------------------- -IParameter::AcceptedTypes ImportTextDataParameter::acceptedTypes() const +IParameter::AcceptedTypes ReadCSVFileParameter::acceptedTypes() const { return {typeid(ValueType)}; } // ----------------------------------------------------------------------------- -nlohmann::json ImportTextDataParameter::toJson(const std::any& value) const +nlohmann::json ReadCSVFileParameter::toJson(const std::any& value) const { - const auto& TextImporterData = GetAnyRef(value); - nlohmann::json json = TextImporterData.writeJson(); + const auto& ReadCSVData = GetAnyRef(value); + nlohmann::json json = ReadCSVData.writeJson(); return json; } // ----------------------------------------------------------------------------- -Result ImportTextDataParameter::fromJson(const nlohmann::json& json) const +Result ReadCSVFileParameter::fromJson(const nlohmann::json& json) const { - return {ConvertResultTo(TextImporterData::ReadJson(json))}; + return {ConvertResultTo(ReadCSVData::ReadJson(json))}; } // ----------------------------------------------------------------------------- -IParameter::UniquePointer ImportTextDataParameter::clone() const +IParameter::UniquePointer ReadCSVFileParameter::clone() const { - return std::make_unique(name(), humanName(), helpText(), m_DefaultValue); + return std::make_unique(name(), humanName(), helpText(), m_DefaultValue); } // ----------------------------------------------------------------------------- -std::any ImportTextDataParameter::defaultValue() const +std::any ReadCSVFileParameter::defaultValue() const { return m_DefaultValue; } // ----------------------------------------------------------------------------- -Result<> ImportTextDataParameter::validate(const std::any& value) const +Result<> ReadCSVFileParameter::validate(const std::any& value) const { - [[maybe_unused]] auto data = std::any_cast(value); + [[maybe_unused]] auto data = std::any_cast(value); return {}; } } // namespace complex diff --git a/src/complex/Parameters/ImportTextDataParameter.hpp b/src/complex/Parameters/ReadCSVFileParameter.hpp similarity index 79% rename from src/complex/Parameters/ImportTextDataParameter.hpp rename to src/complex/Parameters/ReadCSVFileParameter.hpp index 92f239a951..6c98ece45d 100644 --- a/src/complex/Parameters/ImportTextDataParameter.hpp +++ b/src/complex/Parameters/ReadCSVFileParameter.hpp @@ -32,25 +32,25 @@ #include "complex/Filter/ParameterTraits.hpp" #include "complex/Filter/ValueParameter.hpp" -#include "complex/Parameters/util/TextImporterData.hpp" +#include "complex/Parameters/util/ReadCSVData.hpp" #include "complex/complex_export.hpp" namespace complex { -class COMPLEX_EXPORT ImportTextDataParameter : public ValueParameter +class COMPLEX_EXPORT ReadCSVFileParameter : public ValueParameter { public: - using ValueType = TextImporterData; + using ValueType = ReadCSVData; - ImportTextDataParameter() = delete; - ImportTextDataParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue); - ~ImportTextDataParameter() override = default; + ReadCSVFileParameter() = delete; + ReadCSVFileParameter(const std::string& name, const std::string& humanName, const std::string& helpText, const ValueType& defaultValue); + ~ReadCSVFileParameter() override = default; - ImportTextDataParameter(const ImportTextDataParameter&) = delete; - ImportTextDataParameter(ImportTextDataParameter&&) noexcept = delete; + ReadCSVFileParameter(const ReadCSVFileParameter&) = delete; + ReadCSVFileParameter(ReadCSVFileParameter&&) noexcept = delete; - ImportTextDataParameter& operator=(const ImportTextDataParameter&) = delete; - ImportTextDataParameter& operator=(ImportTextDataParameter&&) noexcept = delete; + ReadCSVFileParameter& operator=(const ReadCSVFileParameter&) = delete; + ReadCSVFileParameter& operator=(ReadCSVFileParameter&&) noexcept = delete; /** * @brief Returns the parameter's uuid. @@ -103,4 +103,4 @@ class COMPLEX_EXPORT ImportTextDataParameter : public ValueParameter }; } // namespace complex -COMPLEX_DEF_PARAMETER_TRAITS(complex::ImportTextDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45"); +COMPLEX_DEF_PARAMETER_TRAITS(complex::ReadCSVFileParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45"); diff --git a/src/complex/Parameters/util/TextImporterData.cpp b/src/complex/Parameters/util/ReadCSVData.cpp similarity index 60% rename from src/complex/Parameters/util/TextImporterData.cpp rename to src/complex/Parameters/util/ReadCSVData.cpp index d79b4bba37..910d5160bc 100644 --- a/src/complex/Parameters/util/TextImporterData.cpp +++ b/src/complex/Parameters/util/ReadCSVData.cpp @@ -28,7 +28,7 @@ * * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -#include "TextImporterData.hpp" +#include "ReadCSVData.hpp" #include @@ -49,7 +49,7 @@ const std::string k_ConsecutiveDelimitersKey = "Consecutive Delimiters"; } // namespace // ----------------------------------------------------------------------------- -nlohmann::json TextImporterData::writeJson() const +nlohmann::json ReadCSVData::writeJson() const { nlohmann::json json; @@ -92,13 +92,13 @@ nlohmann::json TextImporterData::writeJson() const } // ----------------------------------------------------------------------------- -Result TextImporterData::ReadJson(const nlohmann::json& json) +Result ReadCSVData::ReadJson(const nlohmann::json& json) { - TextImporterData data; + ReadCSVData data; if(!json.contains(k_CustomHeadersKey)) { - return MakeErrorResult(-100, fmt::format("TextImporterData: Cannot find the Data Headers key \"{}\" in the TextImporterData json object.", k_CustomHeadersKey)); + return MakeErrorResult(-100, fmt::format("ReadCSVData: Cannot find the Data Headers key \"{}\" in the ReadCSVData json object.", k_CustomHeadersKey)); } nlohmann::json dHeaders = json[k_CustomHeadersKey]; @@ -107,7 +107,7 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) auto header = dHeaders[i]; if(!header.is_string()) { - return MakeErrorResult(-101, fmt::format("TextImporterData: Custom header at index {} is of type {} and is not a string.", std::to_string(i), header.type_name())); + return MakeErrorResult(-101, fmt::format("ReadCSVData: Custom header at index {} is of type {} and is not a string.", std::to_string(i), header.type_name())); } data.customHeaders.push_back(header); @@ -115,7 +115,7 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_DataTypesKey)) { - return MakeErrorResult(-102, fmt::format("TextImporterData: Cannot find the Data Types key \"{}\" in the TextImporterData json object.", k_DataTypesKey)); + return MakeErrorResult(-102, fmt::format("ReadCSVData: Cannot find the Data Types key \"{}\" in the ReadCSVData json object.", k_DataTypesKey)); } nlohmann::json dTypes = json[k_DataTypesKey]; @@ -124,7 +124,7 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) auto dType = dTypes[i]; if(!dType.is_number_integer()) { - return MakeErrorResult(-103, fmt::format("TextImporterData: Data type at index {} is of type {} and is not an integer.", std::to_string(i), dType.type_name())); + return MakeErrorResult(-103, fmt::format("ReadCSVData: Data type at index {} is of type {} and is not an integer.", std::to_string(i), dType.type_name())); } data.dataTypes.push_back(dType); @@ -132,7 +132,7 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_TupleDimensionsKey)) { - return MakeErrorResult(-104, fmt::format("TextImporterData: Cannot find the Tuple Dimensions key \"{}\" in the TextImporterData json object.", k_TupleDimensionsKey)); + return MakeErrorResult(-104, fmt::format("ReadCSVData: Cannot find the Tuple Dimensions key \"{}\" in the ReadCSVData json object.", k_TupleDimensionsKey)); } nlohmann::json tDims = json[k_TupleDimensionsKey]; @@ -145,7 +145,7 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_SkippedArrayMaskKey)) { - return MakeErrorResult(-105, fmt::format("TextImporterData: Cannot find the Skipped Arrays key \"{}\" in the TextImporterData json object.", k_DataTypesKey)); + return MakeErrorResult(-105, fmt::format("ReadCSVData: Cannot find the Skipped Arrays key \"{}\" in the ReadCSVData json object.", k_DataTypesKey)); } nlohmann::json dSkippedArrays = json[k_SkippedArrayMaskKey]; @@ -154,8 +154,7 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) auto skippedArrayVal = dSkippedArrays[i]; if(!skippedArrayVal.is_boolean()) { - return MakeErrorResult(-106, - fmt::format("TextImporterData: Skipped array value at index {} is of type {} and is not a boolean.", std::to_string(i), skippedArrayVal.type_name())); + return MakeErrorResult(-106, fmt::format("ReadCSVData: Skipped array value at index {} is of type {} and is not a boolean.", std::to_string(i), skippedArrayVal.type_name())); } data.skippedArrayMask.push_back(skippedArrayVal); @@ -163,47 +162,47 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_InputFilePathKey)) { - return MakeErrorResult(-107, fmt::format("TextImporterData: Cannot find the 'Input File Path' key \"{}\" in the TextImporterData json object.", k_InputFilePathKey)); + return MakeErrorResult(-107, fmt::format("ReadCSVData: Cannot find the 'Input File Path' key \"{}\" in the ReadCSVData json object.", k_InputFilePathKey)); } else if(!json[k_InputFilePathKey].is_string()) { - return MakeErrorResult(-108, fmt::format("TextImporterData: 'Input File Path' value is of type {} and is not a string.", json[k_InputFilePathKey].type_name())); + return MakeErrorResult(-108, fmt::format("ReadCSVData: 'Input File Path' value is of type {} and is not a string.", json[k_InputFilePathKey].type_name())); } data.inputFilePath = json[k_InputFilePathKey]; if(!json.contains(k_StartImportRowKey)) { - return MakeErrorResult(-109, fmt::format("TextImporterData: Cannot find the 'Begin Index' key \"{}\" in the TextImporterData json object.", k_StartImportRowKey)); + return MakeErrorResult(-109, fmt::format("ReadCSVData: Cannot find the 'Begin Index' key \"{}\" in the ReadCSVData json object.", k_StartImportRowKey)); } else if(!json[k_StartImportRowKey].is_number_integer()) { - return MakeErrorResult(-110, fmt::format("TextImporterData: 'Begin Index' value is of type {} and is not an integer.", json[k_StartImportRowKey].type_name())); + return MakeErrorResult(-110, fmt::format("ReadCSVData: 'Begin Index' value is of type {} and is not an integer.", json[k_StartImportRowKey].type_name())); } data.startImportRow = json[k_StartImportRowKey]; if(!json.contains(k_HeaderLineKey)) { - return MakeErrorResult(-113, fmt::format("TextImporterData: Cannot find the 'Header Line' key \"{}\" in the TextImporterData json object.", k_HeaderLineKey)); + return MakeErrorResult(-113, fmt::format("ReadCSVData: Cannot find the 'Header Line' key \"{}\" in the ReadCSVData json object.", k_HeaderLineKey)); } else if(!json[k_HeaderLineKey].is_number_integer()) { - return MakeErrorResult(-114, fmt::format("TextImporterData: 'Header Line' value is of type {} and is not an integer.", json[k_HeaderLineKey].type_name())); + return MakeErrorResult(-114, fmt::format("ReadCSVData: 'Header Line' value is of type {} and is not an integer.", json[k_HeaderLineKey].type_name())); } data.headersLine = json[k_HeaderLineKey]; if(!json.contains(k_HeaderModeKey)) { - return MakeErrorResult(-115, fmt::format("TextImporterData: Cannot find the 'Header Mode' key \"{}\" in the TextImporterData json object.", k_HeaderModeKey)); + return MakeErrorResult(-115, fmt::format("ReadCSVData: Cannot find the 'Header Mode' key \"{}\" in the ReadCSVData json object.", k_HeaderModeKey)); } else if(!json[k_HeaderModeKey].is_number_integer()) { - return MakeErrorResult(-116, fmt::format("TextImporterData: 'Header Mode' value is of type {} and is not an integer.", json[k_HeaderModeKey].type_name())); + return MakeErrorResult(-116, fmt::format("ReadCSVData: 'Header Mode' value is of type {} and is not an integer.", json[k_HeaderModeKey].type_name())); } data.headerMode = json[k_HeaderModeKey]; if(!json.contains(k_Delimiters)) { - return MakeErrorResult(-117, fmt::format("TextImporterData: Cannot find the 'Delimiters' key \"{}\" in the TextImporterData json object.", k_Delimiters)); + return MakeErrorResult(-117, fmt::format("ReadCSVData: Cannot find the 'Delimiters' key \"{}\" in the ReadCSVData json object.", k_Delimiters)); } nlohmann::json dDelimiters = json[k_Delimiters]; @@ -212,7 +211,7 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) auto delimiter = dDelimiters[i]; if(!delimiter.is_string() || delimiter.get().size() != 1) { - return MakeErrorResult(-118, fmt::format("TextImporterData: Delimiter at index {} is of type {} and is not a boolean.", std::to_string(i), delimiter.type_name())); + return MakeErrorResult(-118, fmt::format("ReadCSVData: Delimiter at index {} is of type {} and is not a boolean.", std::to_string(i), delimiter.type_name())); } data.delimiters.push_back(delimiter.get()[0]); @@ -220,12 +219,11 @@ Result TextImporterData::ReadJson(const nlohmann::json& json) if(!json.contains(k_ConsecutiveDelimitersKey)) { - return MakeErrorResult(-125, - fmt::format("TextImporterData: Cannot find the 'Consecutive Delimiters' key \"{}\" in the TextImporterData json object.", k_ConsecutiveDelimitersKey)); + return MakeErrorResult(-125, fmt::format("ReadCSVData: Cannot find the 'Consecutive Delimiters' key \"{}\" in the ReadCSVData json object.", k_ConsecutiveDelimitersKey)); } else if(!json[k_ConsecutiveDelimitersKey].is_boolean()) { - return MakeErrorResult(-126, fmt::format("TextImporterData: 'Consecutive Delimiters' value is of type {} and is not a boolean.", json[k_ConsecutiveDelimitersKey].type_name())); + return MakeErrorResult(-126, fmt::format("ReadCSVData: 'Consecutive Delimiters' value is of type {} and is not a boolean.", json[k_ConsecutiveDelimitersKey].type_name())); } data.consecutiveDelimiters = json[k_ConsecutiveDelimitersKey]; diff --git a/src/complex/Parameters/util/TextImporterData.hpp b/src/complex/Parameters/util/ReadCSVData.hpp similarity index 95% rename from src/complex/Parameters/util/TextImporterData.hpp rename to src/complex/Parameters/util/ReadCSVData.hpp index e9add2d13c..e3e01bf855 100644 --- a/src/complex/Parameters/util/TextImporterData.hpp +++ b/src/complex/Parameters/util/ReadCSVData.hpp @@ -42,7 +42,7 @@ namespace complex { -struct COMPLEX_EXPORT TextImporterData +struct COMPLEX_EXPORT ReadCSVData { public: enum class HeaderMode @@ -53,7 +53,7 @@ struct COMPLEX_EXPORT TextImporterData // Json Reader and Writer nlohmann::json writeJson() const; - static Result ReadJson(const nlohmann::json& json); + static Result ReadJson(const nlohmann::json& json); std::string inputFilePath; std::vector customHeaders; diff --git a/wrapping/python/docs/generate_sphinx_docs.cpp b/wrapping/python/docs/generate_sphinx_docs.cpp index b87664b9ae..597311c48d 100644 --- a/wrapping/python/docs/generate_sphinx_docs.cpp +++ b/wrapping/python/docs/generate_sphinx_docs.cpp @@ -55,7 +55,7 @@ void GenerateParameterList() ADD_PARAMETER_TRAIT(complex.DataGroupCreationParameter, "bff2d4ac-04a6-5251-b188-4f83f7865074") ADD_PARAMETER_TRAIT(complex.DataPathSelectionParameter, "cd12d081-fbf0-46c4-8f4a-15e2e06e98b8") ADD_PARAMETER_TRAIT(complex.CalculatorParameter, "ba2d4937-dbec-5536-8c5c-c0a406e80f77") - ADD_PARAMETER_TRAIT(complex.ImportTextDataParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45") + ADD_PARAMETER_TRAIT(complex.ReadCSVFileParameter, "4f6d6a33-48da-427a-8b17-61e07d1d5b45") ADD_PARAMETER_TRAIT(complex.Int8Parameter, "cae73834-68f8-4235-b010-8bea87d8ff7a") ADD_PARAMETER_TRAIT(complex.UInt8Parameter, "6c3efeff-ce8f-47c0-83d1-262f2b2dd6cc") ADD_PARAMETER_TRAIT(complex.Int16Parameter, "44ae56e8-e6e7-4e4d-8128-dd3dc2c6696e") diff --git a/wrapping/python/docs/source/API.rst b/wrapping/python/docs/source/API.rst index 3d90eea6c4..d7dab189a8 100644 --- a/wrapping/python/docs/source/API.rst +++ b/wrapping/python/docs/source/API.rst @@ -383,10 +383,10 @@ General Parameters This parameter represents the :ref:`DataPath` to a valid :ref:`complex.Geometry() ` -.. _ImportTextDataParameter: -.. py:class:: ImportTextDataParameter +.. _ReadCSVFileParameter: +.. py:class:: ReadCSVFileParameter - This parameter is used for the :ref:`complex.ImportTextDataFilter() ` and holds + This parameter is used for the :ref:`complex.ReadCSVFileFilter() ` and holds the information to import a file formatted as table data where each column of data is a single array. @@ -394,13 +394,13 @@ General Parameters + The file optionally can have a line of headers. The user can specify what line the headers are on + The import can start at a user specified line number but will continue to the end of the file. - The primary python object that will hold the information to pass to the filter is the TextImporterData class described below. + The primary python object that will hold the information to pass to the filter is the ReadCSVData class described below. - :ivar ValueType: TextImporterData + :ivar ValueType: ReadCSVData - .. py:class:: ImportTextDataParameter.TextImporterData + .. py:class:: ReadCSVFileParameter.ReadCSVData - The TextImporterData class holds all the necessary information to import a CSV formatted file into DREAM3D-NX. There are + The ReadCSVData class holds all the necessary information to import a CSV formatted file into DREAM3D-NX. There are a number of member variables that need to be set correctly before the filter will execute correctly. @@ -413,35 +413,35 @@ General Parameters :ivar skipped_array_mask: List[bool]. Booleans, one per column, that indicate whether or not to skip importing each created :ref:`DataArray `. :ivar tuple_dims: List[int]. The tuple dimensions for the created :ref:`DataArrays `. :ivar headers_line: Int. The line number of the headers. - :ivar header_mode: 'cx.TextImporterData.HeaderMode.'. Can be one of 'cx.TextImporterData.HeaderMode.Line' or 'cx.TextImporterData.HeaderMode.Custom'. + :ivar header_mode: 'cx.ReadCSVData.HeaderMode.'. Can be one of 'cx.ReadCSVData.HeaderMode.Line' or 'cx.ReadCSVData.HeaderMode.Custom'. .. code:: python data_structure = cx.DataStructure() - text_importer_data = cx.TextImporterData() - text_importer_data.input_file_path = "/tmp/test_csv_data.csv" - text_importer_data.start_import_row = 2 - text_importer_data.delimiters = [','] - text_importer_data.custom_headers = [] - text_importer_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] - text_importer_data.skipped_array_mask = [False,False,False,False,False,False,False ] - text_importer_data.tuple_dims = [37989] + read_csv_data = cx.ReadCSVData() + read_csv_data.input_file_path = "/tmp/test_csv_data.csv" + read_csv_data.start_import_row = 2 + read_csv_data.delimiters = [','] + read_csv_data.custom_headers = [] + read_csv_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] + read_csv_data.skipped_array_mask = [False,False,False,False,False,False,False ] + read_csv_data.tuple_dims = [37989] - text_importer_data.headers_line = 1 - text_importer_data.header_mode = cx.TextImporterData.HeaderMode.Line + read_csv_data.headers_line = 1 + read_csv_data.header_mode = cx.ReadCSVData.HeaderMode.Line # This will store the imported arrays into a newly generated DataGroup - result = cx.ImportTextDataFilter.execute(data_structure=data_structure, + result = cx.ReadCSVFileFilter.execute(data_structure=data_structure, # This will store the imported arrays into a newly generated DataGroup created_data_group=cx.DataPath(["Imported Data"]), # We are not using this parameter but it still needs a value selected_data_group=cx.DataPath(), # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match use_existing_group=False, - # The TextImporterData object with all member variables set. - text_importer_data=text_importer_data # The TextImporterData object with all member variables set. + # The ReadCSVData object with all member variables set. + read_csv_data=read_csv_data # The ReadCSVData object with all member variables set. ) diff --git a/wrapping/python/examples/import_text.py b/wrapping/python/examples/import_text.py deleted file mode 100644 index de5bd0f9bb..0000000000 --- a/wrapping/python/examples/import_text.py +++ /dev/null @@ -1,37 +0,0 @@ -import complex as cx -import itkimageprocessing as cxitk -import orientationanalysis as cxor - -import numpy as np - -# Create the DataStructure object -data_structure = cx.DataStructure() - -text_importer_data = cx.TextImporterData() -text_importer_data.input_file_path = "wrapping/python/examples/test_csv_data.csv" -text_importer_data.start_import_row = 2 -text_importer_data.delimiters = [','] -text_importer_data.custom_headers = [] -text_importer_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] -text_importer_data.skipped_array_mask = [False,False,False,False,False,False,False ] -text_importer_data.tuple_dims = [37989] - -text_importer_data.headers_line = 1 -text_importer_data.header_mode = cx.TextImporterData.HeaderMode.Line - -# This will store the imported arrays into a newly generated DataGroup -result = cx.ImportTextDataFilter.execute(data_structure=data_structure, - # This will store the imported arrays into a newly generated DataGroup - created_data_group=cx.DataPath(["Imported Data"]), - # We are not using this parameter but it still needs a value - selected_data_group=cx.DataPath(), - # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match - use_existing_group=False, - # The TextImporterData object with all member variables set. - text_importer_data=text_importer_data # The TextImporterData object with all member variables set. - ) -if len(result.errors) != 0: - print('Errors: {}', result.errors) - print('Warnings: {}', result.warnings) -else: - print("No errors running the ImportTextDataFilter filter") diff --git a/wrapping/python/examples/read_csv_file.py b/wrapping/python/examples/read_csv_file.py new file mode 100644 index 0000000000..1ff21c7337 --- /dev/null +++ b/wrapping/python/examples/read_csv_file.py @@ -0,0 +1,37 @@ +import complex as cx +import itkimageprocessing as cxitk +import orientationanalysis as cxor + +import numpy as np + +# Create the DataStructure object +data_structure = cx.DataStructure() + +read_csv_data = cx.ReadCSVData() +read_csv_data.input_file_path = "wrapping/python/examples/test_csv_data.csv" +read_csv_data.start_import_row = 2 +read_csv_data.delimiters = [','] +read_csv_data.custom_headers = [] +read_csv_data.data_types = [cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.float32,cx.DataType.int32 ] +read_csv_data.skipped_array_mask = [False,False,False,False,False,False,False ] +read_csv_data.tuple_dims = [37989] + +read_csv_data.headers_line = 1 +read_csv_data.header_mode = cx.ReadCSVData.HeaderMode.Line + +# This will store the imported arrays into a newly generated DataGroup +result = cx.ReadCSVFileFilter.execute(data_structure=data_structure, + # This will store the imported arrays into a newly generated DataGroup + created_data_group=cx.DataPath(["Imported Data"]), + # We are not using this parameter but it still needs a value + selected_data_group=cx.DataPath(), + # Use an existing DataGroup or AttributeMatrix. If an AttributemMatrix is used, the total number of tuples must match + use_existing_group=False, + # The ReadCSVData object with all member variables set. + read_csv_data=read_csv_data # The ReadCSVData object with all member variables set. + ) +if len(result.errors) != 0: + print('Errors: {}', result.errors) + print('Warnings: {}', result.warnings) +else: + print("No errors running the ReadCSVFileFilter filter")