Skip to content

Commit

Permalink
PR Requested Changes
Browse files Browse the repository at this point in the history
  • Loading branch information
mmarineBlueQuartz committed Nov 6, 2023
1 parent 2a0adbb commit 5377309
Show file tree
Hide file tree
Showing 16 changed files with 123 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -134,11 +134,11 @@ class ParallelWrapper
ParallelWrapper& operator=(ParallelWrapper&&) = delete; // Move Assignment Not Implemented

template <typename T>
static void Run(T impl, size_t totalPoints)
static void Run(T impl, size_t totalPoints, typename IParallelAlgorithm::AlgorithmArrays algArrays)
{
ParallelDataAlgorithm dataAlg;
dataAlg.setRange(0, totalPoints);
dataAlg.setParallelizationEnabled(false);
dataAlg.requireArraysInMemory(algArrays);
dataAlg.execute(impl);
}

Expand Down Expand Up @@ -186,19 +186,23 @@ Result<> ConvertColorToGrayScale::operator()()
size_t comp = inputColorData.getNumberOfComponents();
size_t totalPoints = inputColorData.getNumberOfTuples();

typename IParallelAlgorithm::AlgorithmArrays algArrays;
algArrays.push_back(&inputColorData);
algArrays.push_back(&outputGrayData);

switch(convType)
{
case ConversionType::Luminosity:
ParallelWrapper::Run<LuminosityImpl>(LuminosityImpl(inputColorData, outputGrayData, m_InputValues->ColorWeights, comp), totalPoints);
ParallelWrapper::Run<LuminosityImpl>(LuminosityImpl(inputColorData, outputGrayData, m_InputValues->ColorWeights, comp), totalPoints, algArrays);
break;
case ConversionType::Average:
ParallelWrapper::Run<LuminosityImpl>(LuminosityImpl(inputColorData, outputGrayData, {0.3333f, 0.3333f, 0.3333f}, comp), totalPoints);
ParallelWrapper::Run<LuminosityImpl>(LuminosityImpl(inputColorData, outputGrayData, {0.3333f, 0.3333f, 0.3333f}, comp), totalPoints, algArrays);
break;
case ConversionType::Lightness:
ParallelWrapper::Run<LightnessImpl>(LightnessImpl(inputColorData, outputGrayData, comp), totalPoints);
ParallelWrapper::Run<LightnessImpl>(LightnessImpl(inputColorData, outputGrayData, comp), totalPoints, algArrays);
break;
case ConversionType::SingleChannel:
ParallelWrapper::Run<SingleChannelImpl>(SingleChannelImpl(inputColorData, outputGrayData, comp, m_InputValues->ColorChannel), totalPoints);
ParallelWrapper::Run<SingleChannelImpl>(SingleChannelImpl(inputColorData, outputGrayData, comp, m_InputValues->ColorChannel), totalPoints, algArrays);
break;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,13 @@ Result<> ConvertData(DataStructure& dataStructure, const ConvertDataInputValues*
DataArray<T>& inputArray = dataStructure.getDataRefAs<DataArray<T>>(inputValues->SelectedArrayPath);
AbstractDataStore<T>& inputStore = inputArray.getDataStoreRef();

typename IParallelAlgorithm::AlgorithmArrays algArrays;
algArrays.push_back(&inputArray);
algArrays.push_back(dataStructure.getDataAs<IDataArray>(inputValues->OutputArrayName));

ParallelDataAlgorithm dataAlg;
dataAlg.setRange(0, inputArray.size());
dataAlg.setParallelizationEnabled(false);
dataAlg.requireArraysInMemory(algArrays);

switch(inputValues->ScalarType)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -404,12 +404,8 @@ IFilter::PreflightResult FindArrayStatisticsFilter::preflightImpl(const DataStru
Result<> FindArrayStatisticsFilter::executeImpl(DataStructure& dataStructure, const Arguments& filterArgs, const PipelineFilter* pipelineNode, const MessageHandler& messageHandler,
const std::atomic_bool& shouldCancel) const
{
std::cout << "Values: 0" << std::endl;

FindArrayStatisticsInputValues inputValues;

std::cout << "Values: 1" << std::endl;

inputValues.FindHistogram = filterArgs.value<bool>(k_FindHistogram_Key);
inputValues.MinRange = filterArgs.value<float64>(k_MinRange_Key);
inputValues.MaxRange = filterArgs.value<float64>(k_MaxRange_Key);
Expand Down Expand Up @@ -447,8 +443,6 @@ Result<> FindArrayStatisticsFilter::executeImpl(DataStructure& dataStructure, co
inputValues.StandardizedArrayName = inputValues.SelectedArrayPath.getParent().createChildPath(filterArgs.value<std::string>(k_StandardizedArrayName_Key));
inputValues.NumUniqueValuesName = inputValues.DestinationAttributeMatrix.createChildPath(filterArgs.value<std::string>(k_NumUniqueValues_Key));

std::cout << "Values: 2" << std::endl;

return FindArrayStatistics(dataStructure, messageHandler, shouldCancel, &inputValues)();
}
} // namespace complex
Original file line number Diff line number Diff line change
Expand Up @@ -43,23 +43,54 @@ std::vector<size_t> createDimensionVector(const std::string& cDimsStr)
}

template <typename T>
Result<> fillDataArray(DataStructure& dataStructure, const DataPath& dataArrayPath, const complex::HDF5::DatasetReader& datasetReader)
Result<> fillDataStore(DataArray<T>& dataArray, const DataPath& dataArrayPath, const complex::HDF5::DatasetReader& datasetReader)
{
auto& dataArray = dataStructure.getDataRefAs<DataArray<T>>(dataArrayPath);
auto& absDataStore = dataArray.getDataStoreRef();
DataStore<T>& dataStore = dataArray.getIDataStoreRefAs<DataStore<T>>();

Check failure on line 48 in src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportHDF5Dataset.cpp

View workflow job for this annotation

GitHub Actions / build (macos-11)

use 'template' keyword to treat 'getIDataStoreRefAs' as a dependent template name

Check failure on line 48 in src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportHDF5Dataset.cpp

View workflow job for this annotation

GitHub Actions / build (ubuntu-20.04, g++-9)

expected primary-expression before ‘>’ token

Check failure on line 48 in src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportHDF5Dataset.cpp

View workflow job for this annotation

GitHub Actions / build (ubuntu-20.04, g++-9)

expected primary-expression before ‘)’ token

Check failure on line 48 in src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportHDF5Dataset.cpp

View workflow job for this annotation

GitHub Actions / build (ubuntu-20.04, g++-10)

expected primary-expression before ‘>’ token

Check failure on line 48 in src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportHDF5Dataset.cpp

View workflow job for this annotation

GitHub Actions / build (ubuntu-20.04, g++-10)

expected primary-expression before ‘)’ token

Check failure on line 48 in src/Plugins/ComplexCore/src/ComplexCore/Filters/ImportHDF5Dataset.cpp

View workflow job for this annotation

GitHub Actions / build (ubuntu-20.04, clang++-10)

use 'template' keyword to treat 'getIDataStoreRefAs' as a dependent template name
if(!datasetReader.readIntoSpan<T>(dataStore.createSpan()))
{
return {MakeErrorResult(-21002, fmt::format("Error reading dataset '{}' with '{}' total elements into data store for data array '{}' with '{}' total elements ('{}' tuples and '{}' components)",
dataArrayPath.getTargetName(), datasetReader.getNumElements(), dataArrayPath.toString(), dataArray.getSize(), dataArray.getNumberOfTuples(),
dataArray.getNumberOfComponents()))};
}

return {};
}

template <typename T>
Result<> fillOocDataStore(DataArray<T>& dataArray, const DataPath& dataArrayPath, const complex::HDF5::DatasetReader& datasetReader)
{
if(Memory::GetTotalMemory() <= dataArray.getSize() * sizeof(T))
{
return MakeErrorResult(-21004, fmt::format("Error reading dataset '{}' with '{}' total elements. Not enough memory to import data.", dataArray.getName(), datasetReader.getNumElements()));
}

auto& absDataStore = dataArray.getDataStoreRef();
std::vector<T> data(absDataStore.getSize());
nonstd::span<T> span{data.data(), data.size()};
if(!datasetReader.readIntoSpan<T>(span))
{
return {MakeErrorResult(-21002, fmt::format("Error reading dataset '{}' with '{}' total elements into data store for data array '{}' with '{}' total elements ('{}' tuples and '{}' components)",
return {MakeErrorResult(-21003, fmt::format("Error reading dataset '{}' with '{}' total elements into data store for data array '{}' with '{}' total elements ('{}' tuples and '{}' components)",
dataArrayPath.getTargetName(), datasetReader.getNumElements(), dataArrayPath.toString(), dataArray.getSize(), dataArray.getNumberOfTuples(),
dataArray.getNumberOfComponents()))};
}
std::copy(data.begin(), data.end(), absDataStore.begin());

return {};
}

template <typename T>
Result<> fillDataArray(DataStructure& dataStructure, const DataPath& dataArrayPath, const complex::HDF5::DatasetReader& datasetReader)
{
auto& dataArray = dataStructure.getDataRefAs<DataArray<T>>(dataArrayPath);
if(dataArray.getDataFormat().empty())
{
return fillDataStore(dataArray, dataArrayPath, datasetReader);
}
else
{
return fillOocDataStore(dataArray, dataArrayPath, datasetReader);
}
}
} // namespace

namespace complex
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,16 @@ Result<> FindKernelAvgMisorientations::operator()()
// set up threadsafe messenger
m_TotalElements = udims[2] * udims[1] * udims[0];

typename IParallelAlgorithm::AlgorithmArrays algArrays;
algArrays.push_back(m_DataStructure.getDataAs<IDataArray>(m_InputValues->CellPhasesArrayPath));
algArrays.push_back(m_DataStructure.getDataAs<IDataArray>(m_InputValues->CrystalStructuresArrayPath));
algArrays.push_back(m_DataStructure.getDataAs<IDataArray>(m_InputValues->FeatureIdsArrayPath));
algArrays.push_back(m_DataStructure.getDataAs<IDataArray>(m_InputValues->KernelAverageMisorientationsArrayName));
algArrays.push_back(m_DataStructure.getDataAs<IDataArray>(m_InputValues->QuatsArrayPath));

ParallelData3DAlgorithm parallelAlgorithm;
parallelAlgorithm.setRange(Range3D(0, udims[0], 0, udims[1], 0, udims[2]));
parallelAlgorithm.requireArraysInMemory(algArrays);
parallelAlgorithm.execute(FindKernelAvgMisorientationsImpl(this, m_DataStructure, m_InputValues, m_ShouldCancel));

return {};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,17 @@ Result<> GenerateFaceIPFColoring::operator()()
auto& faceIpfColors = m_DataStructure.getDataRefAs<UInt8Array>(faceIpfColorsArrayPath);
int64 numTriangles = faceLabels.getNumberOfTuples();

typename IParallelAlgorithm::AlgorithmArrays algArrays;
algArrays.push_back(&faceLabels);
algArrays.push_back(&faceNormals);
algArrays.push_back(&eulerAngles);
algArrays.push_back(&phases);
algArrays.push_back(&crystalStructures);
algArrays.push_back(&faceIpfColors);

ParallelDataAlgorithm parallelTask;
parallelTask.setRange(0, numTriangles);
parallelTask.setParallelizationEnabled(false);
parallelTask.requireArraysInMemory(algArrays);
parallelTask.execute(CalculateFaceIPFColorsImpl(faceLabels, phases, faceNormals, eulerAngles, crystalStructures, faceIpfColors));

return {};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,9 +324,14 @@ Result<> GenerateGBCDPoleFigure::operator()()

m_MessageHandler({IFilter::Message::Type::Info, fmt::format("Generating Intensity Plot for phase {}", m_InputValues->PhaseOfInterest)});

typename IParallelAlgorithm::AlgorithmArrays algArrays;
algArrays.push_back(&poleFigure);
algArrays.push_back(&gbcd);

ParallelData2DAlgorithm dataAlg;
dataAlg.setRange(0, xPoints, 0, yPoints);
dataAlg.setParallelizationEnabled(false);
dataAlg.requireArraysInMemory(algArrays);

dataAlg.execute(
GenerateGBCDPoleFigureImpl(poleFigure, {xPoints, yPoints}, orientOps, gbcdDeltas, gbcdLimits, gbcdSizes, gbcd, m_InputValues->PhaseOfInterest, m_InputValues->MisorientationRotation));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,15 +159,24 @@ Result<> GenerateIPFColors::operator()()

MatrixMath::Normalize3x1(normRefDir[0], normRefDir[1], normRefDir[2]);

typename IParallelAlgorithm::AlgorithmArrays algArrays;
algArrays.push_back(&eulers);
algArrays.push_back(&phases);
algArrays.push_back(&crystalStructures);
algArrays.push_back(&ipfColors);

complex::IDataArray* goodVoxelsArray = nullptr;
if(m_InputValues->useGoodVoxels)
{
goodVoxelsArray = m_DataStructure.getDataAs<IDataArray>(m_InputValues->goodVoxelsArrayPath);
algArrays.push_back(goodVoxelsArray);
}

// Allow data-based parallelization
ParallelDataAlgorithm dataAlg;
dataAlg.setRange(0, totalPoints);
dataAlg.setParallelizationEnabled(false);
dataAlg.requireArraysInMemory(algArrays);

dataAlg.execute(GenerateIPFColorsImpl(this, normRefDir, eulers, phases, crystalStructures, numPhases, goodVoxelsArray, ipfColors));

if(m_PhaseWarningCount > 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,9 +264,21 @@ Result<> GenerateFZQuaternions::executeImpl(DataStructure& dataStructure, const
std::atomic_int32_t warningCount = 0;
int32_t numPhases = static_cast<int32_t>(xtalArray.getNumberOfTuples());

typename IParallelAlgorithm::AlgorithmArrays algArrays;
algArrays.push_back(&phaseArray);
algArrays.push_back(&quatArray);
algArrays.push_back(&xtalArray);
algArrays.push_back(&fzQuatArray);

if(pUseGoodVoxelsValue)
{
algArrays.push_back(maskArray);
}

// Parallel algorithm to find duplicate nodes
ParallelDataAlgorithm dataAlg;
dataAlg.setRange(0ULL, static_cast<size_t>(quatArray.getNumberOfTuples()));
dataAlg.requireArraysInMemory(algArrays);

if(pUseGoodVoxelsValue)
{
Expand Down
1 change: 1 addition & 0 deletions src/Plugins/OrientationAnalysis/test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ set(${PLUGIN_NAME}UnitTest_SRCS
CAxisSegmentFeaturesTest.cpp
ConvertHexGridToSquareGridTest.cpp
ConvertOrientationsTest.cpp
ConvertQuaternionTest.cpp
CreateEnsembleInfoTest.cpp
EBSDSegmentFeaturesFilterTest.cpp
EbsdToH5EbsdTest.cpp
Expand Down
8 changes: 4 additions & 4 deletions src/complex/Common/Uuid.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -171,10 +171,10 @@ struct COMPLEX_EXPORT Uuid

static inline Uuid GenerateV4()
{
static std::random_device rd;
static std::mt19937 gen(rd());
static std::uniform_int_distribution<> dis(0, 15);
static std::uniform_int_distribution<> dis2(8, 11);
static thread_local std::random_device rd;
static thread_local std::mt19937 gen(rd());
static thread_local std::uniform_int_distribution<> dis(0, 15);
static thread_local std::uniform_int_distribution<> dis2(8, 11);

std::stringstream ss;
int i;
Expand Down
2 changes: 1 addition & 1 deletion src/complex/Core/Application.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ void Application::loadPlugin(const std::filesystem::path& path, bool verbose)
fmt::print("Loading Plugin: {}\n", path.string());
}
auto pluginLoader = std::make_shared<PluginLoader>(path);
if(!getFilterList()->addPlugin(pluginLoader))
if(getFilterList()->addPlugin(pluginLoader).invalid())
{
return;
}
Expand Down
13 changes: 6 additions & 7 deletions src/complex/Filter/FilterList.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -91,27 +91,26 @@ AbstractPlugin* FilterList::getPlugin(const FilterHandle& handle) const
return nullptr;
}

bool FilterList::addPlugin(const std::shared_ptr<IPluginLoader>& loader)
Result<> FilterList::addPlugin(const std::shared_ptr<IPluginLoader>& loader)
{
if(!loader->isLoaded())
{
return false;
return MakeErrorResult(-444, "Plugin was not loaded");
}
AbstractPlugin* plugin = loader->getPlugin();
Uuid pluginUuid = plugin->getId();
if(m_PluginMap.count(pluginUuid) > 0)
{
// throw std::runtime_error(fmt::format("Attempted to add plugin '{}' with uuid '{}', but plugin '{}' already exists with that uuid", plugin->getName(), pluginUuid.str(),
// m_PluginMap[pluginUuid]->getPlugin()->getName()));
return false;
return MakeErrorResult(-445, fmt::format("Attempted to add plugin '{}' with uuid '{}', but plugin '{}' already exists with that uuid", plugin->getName(), pluginUuid.str(),
m_PluginMap[pluginUuid]->getPlugin()->getName()));
}
auto pluginHandles = plugin->getFilterHandles();
m_FilterHandles.merge(pluginHandles);
m_PluginMap[pluginUuid] = loader;
return true;
return {};
}

bool FilterList::addPlugin(const std::string& path)
Result<> FilterList::addPlugin(const std::string& path)
{
return addPlugin(std::dynamic_pointer_cast<IPluginLoader>(std::make_shared<PluginLoader>(path)));
}
Expand Down
4 changes: 2 additions & 2 deletions src/complex/Filter/FilterList.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,15 +127,15 @@ class COMPLEX_EXPORT FilterList
* @param loader
* @return bool
*/
bool addPlugin(const std::shared_ptr<IPluginLoader>& loader);
Result<> addPlugin(const std::shared_ptr<IPluginLoader>& loader);

/**
* @brief Attempts to add the plugin at the specified filepath. Returns true
* if the plugin was added. Returns false otherwise.
* @param path
* @return bool
*/
bool addPlugin(const std::string& path);
Result<> addPlugin(const std::string& path);

/**
* @brief Removes the plugin with the given uuid.
Expand Down
12 changes: 11 additions & 1 deletion src/complex/Utilities/IParallelAlgorithm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,20 @@ void IParallelAlgorithm::setParallelizationEnabled(bool doParallel)
m_RunParallel = doParallel;
}
// -----------------------------------------------------------------------------
void IParallelAlgorithm::requireArraysInMemory(const std::vector<const IDataArray*>& arrays)
void IParallelAlgorithm::requireArraysInMemory(const AlgorithmArrays& arrays)
{
if(arrays.size() == 0)
{
return;
}

for(const auto* array : arrays)
{
if(array == nullptr)
{
continue;
}

if(array->getIDataStoreRef().getDataFormat().empty() == false)
{
setParallelizationEnabled(false);
Expand Down
4 changes: 3 additions & 1 deletion src/complex/Utilities/IParallelAlgorithm.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ namespace complex
class COMPLEX_EXPORT IParallelAlgorithm
{
public:
using AlgorithmArrays = std::vector<const IDataArray*>;

IParallelAlgorithm(const IParallelAlgorithm&) = default;
IParallelAlgorithm(IParallelAlgorithm&&) noexcept = default;
IParallelAlgorithm& operator=(const IParallelAlgorithm&) = default;
Expand All @@ -28,7 +30,7 @@ class COMPLEX_EXPORT IParallelAlgorithm
*/
void setParallelizationEnabled(bool doParallel);

void requireArraysInMemory(const std::vector<const IDataArray*>& arrays);
void requireArraysInMemory(const AlgorithmArrays& arrays);

protected:
IParallelAlgorithm();
Expand Down

0 comments on commit 5377309

Please sign in to comment.