From 0e323e952b27706c281cc33c5670d4950ca3f80f Mon Sep 17 00:00:00 2001 From: Ryan Aronson Date: Tue, 7 May 2024 16:44:11 -0700 Subject: [PATCH 01/34] Added implementations of mesh statistic check and field assignment check --- .../src/geos/mesh/doctor/checks/add_fields.py | 143 ++++++++++++++++++ .../src/geos/mesh/doctor/checks/mesh_stats.py | 102 +++++++++++++ .../src/geos/mesh/doctor/checks/vtk_utils.py | 44 ++++++ .../src/geos/mesh/doctor/parsing/__init__.py | 2 + .../mesh/doctor/parsing/add_fields_parsing.py | 36 +++++ .../mesh/doctor/parsing/mesh_stats_parsing.py | 40 +++++ geos-mesh/src/geos/mesh/doctor/register.py | 3 +- 7 files changed, 369 insertions(+), 1 deletion(-) create mode 100644 geos-mesh/src/geos/mesh/doctor/checks/add_fields.py create mode 100644 geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py create mode 100644 geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py create mode 100644 geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py diff --git a/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py b/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py new file mode 100644 index 0000000..9290bfd --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py @@ -0,0 +1,143 @@ +import logging +from dataclasses import dataclass +from math import sqrt +from numpy import empty +from numpy.random import rand + +from vtkmodules.util.numpy_support import ( + numpy_to_vtk, + vtk_to_numpy, ) + +from vtkmodules.vtkCommonCore import ( + vtkDoubleArray, ) + +from . import vtk_utils + +@dataclass( frozen=True ) +class Options: + support: str + field_name: str + source: str + out_vtk: vtk_utils.VtkOutput + + +@dataclass( frozen=True ) +class Result: + info: bool + +def __analytic_field(mesh, support, name) -> bool: + if support == 'node': + # example function: distance from mesh center + nn = mesh.GetNumberOfPoints() + coords = vtk_to_numpy(mesh.GetPoints().GetData()) + center = (coords.max(axis=0) + coords.min(axis=0))/2 + data_arr = vtkDoubleArray() + data_np = empty(nn) + + for i in range(nn): + val = 0 + pt = mesh.GetPoint(i) + for j in range(len(pt)): + val += (pt[j] - center[j])*(pt[j]-center[j]) + val = sqrt(val) + data_np[i] = val + + data_arr = numpy_to_vtk(data_np) + data_arr.SetName(name) + mesh.GetPointData().AddArray(data_arr) + return True + + elif support == 'cell': + # example function: random field + ne = mesh.GetNumberOfCells() + data_arr = vtkDoubleArray() + data_np = rand(ne, 1) + + data_arr = numpy_to_vtk(data_np) + data_arr.SetName(name) + mesh.GetCellData().AddArray(data_arr) + return True + else: + logging.error('incorrect support option. Options are node, cell') + return False + +def __compatible_meshes(dest_mesh, source_mesh) -> bool: + # for now, just check that meshes have same number of elements and same number of nodes + # and require that each cell has same nodes, each node has same coordinate + dest_ne = dest_mesh.GetNumberOfCells() + dest_nn = dest_mesh.GetNumberOfPoints() + source_ne = source_mesh.GetNumberOfCells() + source_nn = source_mesh.GetNumberOfPoints() + + if dest_ne != source_ne: + logging.error('meshes have different number of cells') + return False + if dest_nn != source_nn: + logging.error('meshes have different number of nodes') + return False + + for i in range(dest_nn): + if not ((dest_mesh.GetPoint(i)) == (source_mesh.GetPoint(i))): + logging.error('at least one node is in a different location') + return False + + for i in range(dest_ne): + if not (vtk_to_numpy(dest_mesh.GetCell(i).GetPoints().GetData()) == vtk_to_numpy(source_mesh.GetCell(i).GetPoints().GetData())).all(): + logging.error('at least one cell has different nodes') + return False + + return True + + + + + + +def __transfer_field(mesh, support, field_name, source) -> bool: + from_mesh = vtk_utils.read_mesh( source ) + same_mesh = __compatible_meshes(mesh, from_mesh) + if not same_mesh: + logging.error('meshes are not the same') + return False + + if support == 'cell': + data = from_mesh.GetCellData().GetArray(field_name) + if data == None: + logging.error('Requested field does not exist on source mesh') + return False + else: + mesh.GetCellData().AddArray(data) + elif support == 'node': + data = from_mesh.GetPointData().GetArray(field_name) + if data == None: + logging.error('Requested field does not exist on source mesh') + return False + else: + mesh.GetPointData().AddArray(data) + return False + else: + logging.error('incorrect support option. Options are node, cell') + return False + return True + + +def __check( mesh, options: Options ) -> Result: + if options.source == 'function': + succ =__analytic_field(mesh, options.support, options.field_name) + if succ: + vtk_utils.write_mesh( mesh, options.out_vtk ) + elif (options.source[-4:] == '.vtu' or options.source[-4:] == '.vtk'): + succ = __transfer_field(mesh, options.support, options.field_name, options.source) + if succ: + vtk_utils.write_mesh( mesh, options.out_vtk ) + else: + logging.error('incorrect source option. Options are function, *.vtu, *.vtk.') + succ = False + return Result(info=succ) + #TODO: Better exception handle + + + +def check( vtk_input_file: str, options: Options ) -> Result: + mesh = vtk_utils.read_mesh( vtk_input_file ) + return __check( mesh, options ) \ No newline at end of file diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py new file mode 100644 index 0000000..62b8203 --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -0,0 +1,102 @@ +import logging +from dataclasses import dataclass +from vtkmodules.util.numpy_support import ( + numpy_to_vtk, + vtk_to_numpy, ) + +from . import vtk_utils + +@dataclass( frozen=True ) +class Options: + info: str + + +@dataclass( frozen=True ) +class Result: + num_elements: int + num_nodes: int + num_cell_types: int + cell_types: list + cell_type_counts: list + scalar_cell_data_names: list + scalar_cell_data_mins: list + scalar_cell_data_maxs: list + tensor_cell_data_names: list + scalar_point_data_names: list + scalar_point_data_mins: list + scalar_point_data_maxs: list + tensor_point_data_names: list + has_point_global_ids: bool + has_cell_global_ids: bool + min_coords: list + max_coords: list + #TODO: compress this, or just print the stuff here and dont pass it + + +def __check( mesh, options: Options ) -> Result: + + ne=mesh.GetNumberOfCells() + nn=mesh.GetNumberOfPoints() + nct = mesh.GetDistinctCellTypesArray().GetSize() + cts = [] + for ct in range(nct): + cts.append(vtk_utils.vtkid_to_string(mesh.GetCellType(ct))) + + ct_counts = [0]*nct + for c in range(ne): + for ct in range(nct): + if vtk_utils.vtkid_to_string(mesh.GetCell(c).GetCellType()) == cts[ct]: + ct_counts[ct] += 1 + break + + cd_scalar_names = [] + cd_scalar_maxs = [] + cd_scalar_mins = [] + ncd = mesh.GetCellData().GetNumberOfArrays() + for cdi in range(ncd): + cd = mesh.GetCellData().GetArray(cdi) + if cd.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min + cd_scalar_names.append(cd.GetName()) + cd_np = vtk_to_numpy(cd) + cd_scalar_maxs.append(cd_np.max()) + cd_scalar_mins.append(cd_np.min()) + + cd_tensor_names = [] + for cdi in range(ncd): + cd = mesh.GetCellData().GetArray(cdi) + if cd.GetNumberOfComponents() != 1: + cd_tensor_names.append(cd.GetName()) + + pd_scalar_names = [] + pd_scalar_maxs = [] + pd_scalar_mins = [] + npd = mesh.GetPointData().GetNumberOfArrays() + for pdi in range(npd): + pd = mesh.GetPointData().GetArray(pdi) + if pd.GetNumberOfComponents() == 1: # assumes scalar point data for max and min + pd_scalar_names.append(pd.GetName()) + pd_np = vtk_to_numpy(pd) + pd_scalar_maxs.append(pd_np.max()) + pd_scalar_mins.append(pd_np.min()) + + pd_tensor_names = [] + for pdi in range(npd): + pd = mesh.GetPointData().GetArray(pdi) + if pd.GetNumberOfComponents() != 1: + pd_tensor_names.append(pd.GetName()) + + point_ids = bool(mesh.GetPointData().GetGlobalIds()) + cell_ids = bool(mesh.GetCellData().GetGlobalIds()) + + coords = vtk_to_numpy(mesh.GetPoints().GetData()) + center = (coords.max(axis=0) + coords.min(axis=0))/2 + + + return Result( num_elements=ne, num_nodes=nn, num_cell_types=nct, cell_types=cts, cell_type_counts=ct_counts, + scalar_cell_data_names=cd_scalar_names, scalar_cell_data_mins=cd_scalar_mins, scalar_cell_data_maxs=cd_scalar_maxs, tensor_cell_data_names=cd_tensor_names, + scalar_point_data_names=pd_scalar_names, scalar_point_data_mins=pd_scalar_mins, scalar_point_data_maxs=pd_scalar_maxs, tensor_point_data_names=pd_tensor_names, + has_point_global_ids=point_ids, has_cell_global_ids=cell_ids, min_coords=coords.min(axis=0), max_coords=coords.max(axis=0) ) + +def check( vtk_input_file: str, options: Options ) -> Result: + mesh = vtk_utils.read_mesh( vtk_input_file ) + return __check( mesh, options ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index 9beb375..6830b95 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -21,6 +21,21 @@ vtkXMLUnstructuredGridWriter, ) +from vtkmodules.vtkCommonDataModel import ( + VTK_HEXAGONAL_PRISM, + VTK_HEXAHEDRON, + VTK_PENTAGONAL_PRISM, + VTK_PYRAMID, + VTK_TETRA, + VTK_VOXEL, + VTK_WEDGE, + VTK_TRIANGLE, + VTK_QUAD, + VTK_PIXEL, + VTK_LINE, + VTK_VERTEX, +) + @dataclass( frozen=True ) class VtkOutput: @@ -138,3 +153,32 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: logging.critical( f"Could not find the appropriate VTK writer for extension \"{file_extension}\". Dying..." ) sys.exit( 1 ) return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. + +def vtkid_to_string(id: int) -> str: + match id: + case 1: # VTK_VERTEX + return 'Vertex' + case 3: #VTK_LINE + return 'Line' + case 5: #VTK_TRIANGLE + return 'Triangle' + case 8: #VTK_PIXEL + return 'Pixel' + case 9: #VTK_QUAD + return 'Quad' + case 10: #VTK_TETRA + return 'Tetra' + case 11: #VTK_VOXEL + return 'Voxel' + case 12: #VTK_HEXAHEDRON + return 'Hex' + case 13: #VTK_WEDGE + return 'Wedge' + case 14: #VTK_PYRAMID + return 'Pyramid' + case 15: #VTK_PENTAGONAL_PRISM + return 'Pentagonal prism' + case 16: #VTK_HEXAGONAL_PRISM + return 'Hexagonal Prism' + case _: + return 'Unknown type' diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py index 679f880..d5c9a57 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py @@ -11,6 +11,8 @@ NON_CONFORMAL = "non_conformal" SELF_INTERSECTING_ELEMENTS = "self_intersecting_elements" SUPPORTED_ELEMENTS = "supported_elements" +MESH_STATS = "mesh_stats" +ADD_FIELDS = "add_fields" @dataclass( frozen=True ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py new file mode 100644 index 0000000..d10d37f --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py @@ -0,0 +1,36 @@ +import logging + +from checks.add_fields import Options, Result + + +from . import vtk_output_parsing, ADD_FIELDS + +__SUPPORT = "support" +__NAME = "name" +__SOURCE = "source" + +def fill_subparser( subparsers ) -> None: + p = subparsers.add_parser( ADD_FIELDS, + help=f"Add cell or point data to a mesh." ) + p.add_argument( '--' + __SUPPORT, + type=str, + required=True, + help=f"[string]: Where to define field (point/cell)." ) + p.add_argument( '--' + __NAME, + type=str, + required=True, + help=f"[string]: Name of the field to add." ) + p.add_argument( '--' + __SOURCE, + type=str, + required=True, + help=f"[string]: Where field data to add comes from (function, mesh)." ) + vtk_output_parsing.fill_vtk_output_subparser( p ) + +def convert( parsed_options ) -> Options: + """ + """ + return Options( support=parsed_options[__SUPPORT], field_name=parsed_options[__NAME], source=parsed_options[__SOURCE], out_vtk=vtk_output_parsing.convert( parsed_options ) ) + +def display_results( options: Options, result: Result ): + if result.info != True: + logging.error( f"Field addition failed" ) \ No newline at end of file diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py new file mode 100644 index 0000000..5fdb669 --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -0,0 +1,40 @@ +import logging + +from checks.mesh_stats import Options, Result + +from . import MESH_STATS + +def fill_subparser( subparsers ) -> None: + p = subparsers.add_parser( MESH_STATS, + help=f"Outputs basic properties of a mesh." ) + +def convert( parsed_options ) -> Options: + """ + """ + return Options( info="test" ) + +def display_results( options: Options, result: Result ): + logging.info( f"The mesh has {result.num_elements} elements and {result.num_nodes} nodes." ) + logging.info( f"There are {result.num_cell_types} different types of cell in the mesh:" ) + for i in range(result.num_cell_types): + logging.info( f"\t {result.cell_types[i]} \t ({result.cell_type_counts[i]} cells)" ) + + logging.info( f"The domain is contained in {result.min_coords[0]} <= x <= {result.max_coords[0]}") + logging.info( f" {result.min_coords[1]} <= y <= {result.max_coords[1]}") + logging.info( f" {result.min_coords[2]} <= z <= {result.max_coords[2]}") + + logging.info( f"Does the mesh have global point ids: {result.has_point_global_ids}" ) + logging.info( f"Does the mesh have global cell ids: {result.has_cell_global_ids}" ) + + logging.info( f"There are {len(result.scalar_cell_data_names)} scalar fields on the cells:" ) + for i in range(len(result.scalar_cell_data_names)): + logging.info( f"\t {result.scalar_cell_data_names[i]} \t min = {result.scalar_cell_data_mins[i]} \t max = {result.scalar_cell_data_maxs[i]}" ) + logging.info( f"There are {len(result.tensor_cell_data_names)} vector/tensor fields on the cells:" ) + for i in range(len(result.tensor_cell_data_names)): + logging.info( f"\t {result.tensor_cell_data_names[i]}" ) + logging.info( f"There are {len(result.scalar_point_data_names)} scalar fields on the points:" ) + for i in range(len(result.scalar_point_data_names)): + logging.info( f"\t {result.scalar_point_data_names[i]} \t min = {result.scalar_point_data_mins[i]} \t max = {result.scalar_point_data_maxs[i]}" ) + logging.info( f"There are {len(result.tensor_point_data_names)} vector/tensor fields on the points:" ) + for i in range(len(result.tensor_point_data_names)): + logging.info( f"\t {result.tensor_point_data_names[i]}" ) diff --git a/geos-mesh/src/geos/mesh/doctor/register.py b/geos-mesh/src/geos/mesh/doctor/register.py index 75e8d48..852a8d1 100644 --- a/geos-mesh/src/geos/mesh/doctor/register.py +++ b/geos-mesh/src/geos/mesh/doctor/register.py @@ -55,7 +55,8 @@ def closure_trick( cn: str ): # Register the modules to load here. for check_name in ( parsing.COLLOCATES_NODES, parsing.ELEMENT_VOLUMES, parsing.FIX_ELEMENTS_ORDERINGS, parsing.GENERATE_CUBE, parsing.GENERATE_FRACTURES, parsing.GENERATE_GLOBAL_IDS, - parsing.NON_CONFORMAL, parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS ): + parsing.NON_CONFORMAL, parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS, + parsing.MESH_STATS, parsing.ADD_FIELDS ): closure_trick( check_name ) loaded_checks: Dict[ str, Callable[ [ str, Any ], Any ] ] = __load_checks() loaded_checks_helpers: Dict[ str, CheckHelper ] = dict() From 420a72c11be7346949d14a7e682e1e691c3a0932 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Fri, 12 Jul 2024 10:11:47 -0700 Subject: [PATCH 02/34] Initial commit --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 222 ++++++++++++------ 1 file changed, 146 insertions(+), 76 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 62b8203..80b6ecc 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -1,101 +1,171 @@ import logging +from typing import Union +import numpy as np from dataclasses import dataclass from vtkmodules.util.numpy_support import ( - numpy_to_vtk, vtk_to_numpy, ) - +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid from . import vtk_utils @dataclass( frozen=True ) class Options: info: str +np_hinting = Union[np.float32, np.float64, np.int32, np.int64] + +@dataclass( frozen=True ) +class MeshComponentData: + componentType: str + scalar_names: list[str] + scalar_min_values: list[np_hinting] + scalar_max_values: list[np_hinting] + tensor_names: list[str] + tensor_min_values: list[np.array[np_hinting]] + tensor_max_values: list[np.array[np_hinting]] @dataclass( frozen=True ) class Result: num_elements: int num_nodes: int num_cell_types: int - cell_types: list - cell_type_counts: list - scalar_cell_data_names: list - scalar_cell_data_mins: list - scalar_cell_data_maxs: list - tensor_cell_data_names: list - scalar_point_data_names: list - scalar_point_data_mins: list - scalar_point_data_maxs: list - tensor_point_data_names: list - has_point_global_ids: bool - has_cell_global_ids: bool - min_coords: list - max_coords: list - #TODO: compress this, or just print the stuff here and dont pass it + cell_types: list[str] + cell_type_counts: list[int] + min_coords: np.array + max_coords: np.array + is_empty_point_global_ids: bool + is_empty_cell_global_ids: bool + cell_data: MeshComponentData + point_data: MeshComponentData + + +def __build_MeshComponentData( + mesh: vtkUnstructuredGrid, componentType: str = "point" + ) -> MeshComponentData: + """Builds a MeshComponentData object for a specific component ("points", "cells") + If the component type chosen is invalid, chooses "points" by default. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + meshCD (MeshComponentData): Object that gathers data regarding a mesh component. + """ + if componentType not in ["points", "cells"]: + componentType = "point" + # raise warning + if componentType == "point": + number_arrays_data: int = mesh.GetPointData().GetNumberOfArrays() + else: + number_arrays_data = mesh.GetCellData().GetNumberOfArrays() + + meshCD: MeshComponentData = MeshComponentData() + for i in range(number_arrays_data): + if componentType == "point": + data_array = mesh.GetPointData().GetArray(i) + else: + data_array = mesh.GetCellData().GetArray(i) + data_array = mesh.GetCellData().GetArray(i) + data_array_name = data_array.GetName() + data_np_array = vtk_to_numpy(data_array) + if data_array.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min + meshCD.scalar_names.append(data_array_name) + meshCD.scalar_max_values.append(data_np_array.max()) + meshCD.scalar_min_values.append(data_np_array.min()) + else: + meshCD.tensor_names.append(data_array_name) + meshCD.tensor_max_values.append(data_np_array.max(axis=0)) + meshCD.tensor_min_values.append(data_np_array.min(axis=0)) + + return meshCD + + +# @dataclass( frozen=True ) +# class Result: +# num_elements: int +# num_nodes: int +# num_cell_types: int +# cell_types: list +# cell_type_counts: list +# scalar_cell_data_names: list +# scalar_cell_data_mins: list +# scalar_cell_data_maxs: list +# tensor_cell_data_names: list +# scalar_point_data_names: list +# scalar_point_data_mins: list +# scalar_point_data_maxs: list +# tensor_point_data_names: list +# is_empty_point_global_ids: bool +# is_empty_cell_global_ids: bool +# min_coords: list +# max_coords: list +# #TODO: compress this, or just print the stuff here and dont pass it def __check( mesh, options: Options ) -> Result: - ne=mesh.GetNumberOfCells() - nn=mesh.GetNumberOfPoints() - nct = mesh.GetDistinctCellTypesArray().GetSize() - cts = [] - for ct in range(nct): - cts.append(vtk_utils.vtkid_to_string(mesh.GetCellType(ct))) - - ct_counts = [0]*nct - for c in range(ne): - for ct in range(nct): - if vtk_utils.vtkid_to_string(mesh.GetCell(c).GetCellType()) == cts[ct]: - ct_counts[ct] += 1 + number_elements: int = mesh.GetNumberOfCells() + number_nodes: int = mesh.GetNumberOfPoints() + number_cell_types: int = mesh.GetDistinctCellTypesArray().GetSize() + cell_types: list[str] = [] + for cell_type in range(number_cell_types): + cell_types.append(vtk_utils.vtkid_to_string(mesh.GetCellType(cell_type))) + + cell_type_counts: list[int] = [0]*number_cell_types + for cell in range(number_elements): + for cell_type in range(number_cell_types): + if vtk_utils.vtkid_to_string(mesh.GetCell(cell).GetCellType()) == cell_types[cell_type]: + cell_type_counts[cell_type] += 1 break - cd_scalar_names = [] - cd_scalar_maxs = [] - cd_scalar_mins = [] - ncd = mesh.GetCellData().GetNumberOfArrays() - for cdi in range(ncd): - cd = mesh.GetCellData().GetArray(cdi) - if cd.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min - cd_scalar_names.append(cd.GetName()) - cd_np = vtk_to_numpy(cd) - cd_scalar_maxs.append(cd_np.max()) - cd_scalar_mins.append(cd_np.min()) - - cd_tensor_names = [] - for cdi in range(ncd): - cd = mesh.GetCellData().GetArray(cdi) - if cd.GetNumberOfComponents() != 1: - cd_tensor_names.append(cd.GetName()) - - pd_scalar_names = [] - pd_scalar_maxs = [] - pd_scalar_mins = [] - npd = mesh.GetPointData().GetNumberOfArrays() - for pdi in range(npd): - pd = mesh.GetPointData().GetArray(pdi) - if pd.GetNumberOfComponents() == 1: # assumes scalar point data for max and min - pd_scalar_names.append(pd.GetName()) - pd_np = vtk_to_numpy(pd) - pd_scalar_maxs.append(pd_np.max()) - pd_scalar_mins.append(pd_np.min()) - - pd_tensor_names = [] - for pdi in range(npd): - pd = mesh.GetPointData().GetArray(pdi) - if pd.GetNumberOfComponents() != 1: - pd_tensor_names.append(pd.GetName()) - - point_ids = bool(mesh.GetPointData().GetGlobalIds()) - cell_ids = bool(mesh.GetCellData().GetGlobalIds()) - - coords = vtk_to_numpy(mesh.GetPoints().GetData()) - center = (coords.max(axis=0) + coords.min(axis=0))/2 - - - return Result( num_elements=ne, num_nodes=nn, num_cell_types=nct, cell_types=cts, cell_type_counts=ct_counts, - scalar_cell_data_names=cd_scalar_names, scalar_cell_data_mins=cd_scalar_mins, scalar_cell_data_maxs=cd_scalar_maxs, tensor_cell_data_names=cd_tensor_names, - scalar_point_data_names=pd_scalar_names, scalar_point_data_mins=pd_scalar_mins, scalar_point_data_maxs=pd_scalar_maxs, tensor_point_data_names=pd_tensor_names, - has_point_global_ids=point_ids, has_cell_global_ids=cell_ids, min_coords=coords.min(axis=0), max_coords=coords.max(axis=0) ) + cell_data_scalar_names: list[str] = [] + cell_data_scalar_maxs: list[np_hinting] = [] + cell_data_scalar_mins: list[np_hinting] = [] + cell_data_tensor_names: list[str] = [] + cell_data_tensor_maxs: list[np_hinting] = [] + cell_data_tensor_mins: list[np_hinting] = [] + number_cell_data: int = mesh.GetCellData().GetNumberOfArrays() + for i in range(number_cell_data): + cell_data = mesh.GetCellData().GetArray(i) + if cell_data.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min + cell_data_scalar_names.append(cell_data.GetName()) + cell_data_np = vtk_to_numpy(cell_data) + cell_data_scalar_maxs.append(cell_data_np.max()) + cell_data_scalar_mins.append(cell_data_np.min()) + else: + cell_data_tensor_names.append(cell_data.GetName()) + cell_data_np = vtk_to_numpy(cell_data) + max_values = cell_data_np.max(axis=0) + min_values = cell_data_np.min(axis=0) + cell_data_tensor_maxs.append(max_values) + cell_data_tensor_mins.append(min_values) + + point_data_scalar_names: list[str] = [] + point_data_scalar_maxs: list[np_hinting] = [] + point_data_scalar_mins: list[np_hinting] = [] + point_data_tensor_names: list[str] = [] + number_point_data = mesh.GetPointData().GetNumberOfArrays() + for j in range(number_point_data): + point_data = mesh.GetPointData().GetArray(j) + if point_data.GetNumberOfComponents() == 1: # assumes scalar point data for max and min + point_data_scalar_names.append(point_data.GetName()) + point_data_np = vtk_to_numpy(point_data) + point_data_scalar_maxs.append(point_data_np.max()) + point_data_scalar_mins.append(point_data_np.min()) + else: + point_data_tensor_names.append(point_data.GetName()) + + point_ids: bool = bool(mesh.GetPointData().GetGlobalIds()) + cell_ids: bool = bool(mesh.GetCellData().GetGlobalIds()) + + coords: np.ndarray = vtk_to_numpy(mesh.GetPoints().GetData()) + min_coords: np.ndarray = coords.min(axis=0) + max_coords: np.ndarray = coords.max(axis=0) + # center = (coords.max(axis=0) + coords.min(axis=0))/2 + + return Result( num_elements=number_elements, num_nodes=number_nodes, num_cell_types=number_cell_types, cell_types=cell_types, cell_type_counts=cell_type_counts, + scalar_cell_data_names=cell_data_scalar_names, scalar_cell_data_mins=cell_data_scalar_mins, scalar_cell_data_maxs=cell_data_scalar_maxs, tensor_cell_data_names=cell_data_tensor_names, + scalar_point_data_names=point_data_scalar_names, scalar_point_data_mins=point_data_scalar_mins, scalar_point_data_maxs=point_data_scalar_maxs, tensor_point_data_names=point_data_tensor_names, + has_point_global_ids=point_ids, has_cell_global_ids=cell_ids, min_coords=min_coords, max_coords=max_coords ) def check( vtk_input_file: str, options: Options ) -> Result: mesh = vtk_utils.read_mesh( vtk_input_file ) From 3e40aaadb1ef762c0a0298fad7cd942f3d19761e Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Fri, 12 Jul 2024 16:51:21 -0700 Subject: [PATCH 03/34] Updating mesh_stats files from Aronson --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 180 ++++++++---------- .../mesh/doctor/parsing/mesh_stats_parsing.py | 37 ++-- 2 files changed, 102 insertions(+), 115 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 80b6ecc..9424580 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -2,9 +2,13 @@ from typing import Union import numpy as np from dataclasses import dataclass + from vtkmodules.util.numpy_support import ( vtk_to_numpy, ) -from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid + +from vtkmodules.vtkCommonDataModel import ( + vtkUnstructuredGrid, ) + from . import vtk_utils @dataclass( frozen=True ) @@ -25,24 +29,67 @@ class MeshComponentData: @dataclass( frozen=True ) class Result: - num_elements: int - num_nodes: int - num_cell_types: int + number_elements: int + number_nodes: int + number_cell_types: int cell_types: list[str] cell_type_counts: list[int] - min_coords: np.array - max_coords: np.array + min_coords: np.ndarray + max_coords: np.ndarray is_empty_point_global_ids: bool is_empty_cell_global_ids: bool - cell_data: MeshComponentData point_data: MeshComponentData + cell_data: MeshComponentData + +def get_cell_types_and_counts( + mesh: vtkUnstructuredGrid + )-> tuple[int, int, list[str], list[int]]: + """From an unstructured grid, collects the number of cells, + the number of cell types, the list of cell types and the counts + of each cell element. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + tuple[int, int, list[str], list[int]]: In order, + (number_cells, number_cell_types, cell_types, cell_type_counts) + """ + number_cells: int = mesh.GetNumberOfCells() + number_cell_types: int = mesh.GetDistinctCellTypesArray().GetSize() + cell_types: list[str] = [] + for cell_type in range(number_cell_types): + cell_types.append(vtk_utils.vtkid_to_string(mesh.GetCellType(cell_type))) + + cell_type_counts: list[int] = [0]*number_cell_types + for cell in range(number_cells): + for cell_type in range(number_cell_types): + if vtk_utils.vtkid_to_string(mesh.GetCell(cell).GetCellType()) == cell_types[cell_type]: + cell_type_counts[cell_type] += 1 + break + + return (number_cells, number_cell_types, cell_types, cell_type_counts) + +def get_coords_min_max(mesh: vtkUnstructuredGrid) -> tuple[np.ndarray]: + """From an unstructured mesh, returns the coordinates of + the minimum and maximum points. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + Returns: + tuple[np.ndarray]: Min and Max coordinates. + """ + coords: np.ndarray = vtk_to_numpy(mesh.GetPoints().GetData()) + min_coords: np.ndarray = coords.min(axis=0) + max_coords: np.ndarray = coords.max(axis=0) + return (min_coords, max_coords) -def __build_MeshComponentData( +def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "point" ) -> MeshComponentData: - """Builds a MeshComponentData object for a specific component ("points", "cells") - If the component type chosen is invalid, chooses "points" by default. + """Builds a MeshComponentData object for a specific component ("point", "cell") + If the component type chosen is invalid, chooses "point" by default. Args: mesh (vtkUnstructuredGrid): An unstructured grid. @@ -50,21 +97,22 @@ def __build_MeshComponentData( Returns: meshCD (MeshComponentData): Object that gathers data regarding a mesh component. """ - if componentType not in ["points", "cells"]: + if componentType not in ["point", "cell"]: componentType = "point" - # raise warning + logging.error( f"Invalid component type chosen to build MeshComponentData. Defaulted to point." ) + if componentType == "point": number_arrays_data: int = mesh.GetPointData().GetNumberOfArrays() else: number_arrays_data = mesh.GetCellData().GetNumberOfArrays() meshCD: MeshComponentData = MeshComponentData() + meshCD.componentType = componentType for i in range(number_arrays_data): if componentType == "point": data_array = mesh.GetPointData().GetArray(i) else: data_array = mesh.GetCellData().GetArray(i) - data_array = mesh.GetCellData().GetArray(i) data_array_name = data_array.GetName() data_np_array = vtk_to_numpy(data_array) if data_array.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min @@ -78,94 +126,24 @@ def __build_MeshComponentData( return meshCD - -# @dataclass( frozen=True ) -# class Result: -# num_elements: int -# num_nodes: int -# num_cell_types: int -# cell_types: list -# cell_type_counts: list -# scalar_cell_data_names: list -# scalar_cell_data_mins: list -# scalar_cell_data_maxs: list -# tensor_cell_data_names: list -# scalar_point_data_names: list -# scalar_point_data_mins: list -# scalar_point_data_maxs: list -# tensor_point_data_names: list -# is_empty_point_global_ids: bool -# is_empty_cell_global_ids: bool -# min_coords: list -# max_coords: list -# #TODO: compress this, or just print the stuff here and dont pass it - - -def __check( mesh, options: Options ) -> Result: - - number_elements: int = mesh.GetNumberOfCells() - number_nodes: int = mesh.GetNumberOfPoints() - number_cell_types: int = mesh.GetDistinctCellTypesArray().GetSize() - cell_types: list[str] = [] - for cell_type in range(number_cell_types): - cell_types.append(vtk_utils.vtkid_to_string(mesh.GetCellType(cell_type))) - - cell_type_counts: list[int] = [0]*number_cell_types - for cell in range(number_elements): - for cell_type in range(number_cell_types): - if vtk_utils.vtkid_to_string(mesh.GetCell(cell).GetCellType()) == cell_types[cell_type]: - cell_type_counts[cell_type] += 1 - break - - cell_data_scalar_names: list[str] = [] - cell_data_scalar_maxs: list[np_hinting] = [] - cell_data_scalar_mins: list[np_hinting] = [] - cell_data_tensor_names: list[str] = [] - cell_data_tensor_maxs: list[np_hinting] = [] - cell_data_tensor_mins: list[np_hinting] = [] - number_cell_data: int = mesh.GetCellData().GetNumberOfArrays() - for i in range(number_cell_data): - cell_data = mesh.GetCellData().GetArray(i) - if cell_data.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min - cell_data_scalar_names.append(cell_data.GetName()) - cell_data_np = vtk_to_numpy(cell_data) - cell_data_scalar_maxs.append(cell_data_np.max()) - cell_data_scalar_mins.append(cell_data_np.min()) - else: - cell_data_tensor_names.append(cell_data.GetName()) - cell_data_np = vtk_to_numpy(cell_data) - max_values = cell_data_np.max(axis=0) - min_values = cell_data_np.min(axis=0) - cell_data_tensor_maxs.append(max_values) - cell_data_tensor_mins.append(min_values) - - point_data_scalar_names: list[str] = [] - point_data_scalar_maxs: list[np_hinting] = [] - point_data_scalar_mins: list[np_hinting] = [] - point_data_tensor_names: list[str] = [] - number_point_data = mesh.GetPointData().GetNumberOfArrays() - for j in range(number_point_data): - point_data = mesh.GetPointData().GetArray(j) - if point_data.GetNumberOfComponents() == 1: # assumes scalar point data for max and min - point_data_scalar_names.append(point_data.GetName()) - point_data_np = vtk_to_numpy(point_data) - point_data_scalar_maxs.append(point_data_np.max()) - point_data_scalar_mins.append(point_data_np.min()) - else: - point_data_tensor_names.append(point_data.GetName()) - - point_ids: bool = bool(mesh.GetPointData().GetGlobalIds()) - cell_ids: bool = bool(mesh.GetCellData().GetGlobalIds()) - - coords: np.ndarray = vtk_to_numpy(mesh.GetPoints().GetData()) - min_coords: np.ndarray = coords.min(axis=0) - max_coords: np.ndarray = coords.max(axis=0) - # center = (coords.max(axis=0) + coords.min(axis=0))/2 - - return Result( num_elements=number_elements, num_nodes=number_nodes, num_cell_types=number_cell_types, cell_types=cell_types, cell_type_counts=cell_type_counts, - scalar_cell_data_names=cell_data_scalar_names, scalar_cell_data_mins=cell_data_scalar_mins, scalar_cell_data_maxs=cell_data_scalar_maxs, tensor_cell_data_names=cell_data_tensor_names, - scalar_point_data_names=point_data_scalar_names, scalar_point_data_mins=point_data_scalar_mins, scalar_point_data_maxs=point_data_scalar_maxs, tensor_point_data_names=point_data_tensor_names, - has_point_global_ids=point_ids, has_cell_global_ids=cell_ids, min_coords=min_coords, max_coords=max_coords ) +def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: + + number_points: int = mesh.GetNumberOfPoints() + cells_info = get_cell_types_and_counts(mesh) + number_cells: int = cells_info[0] + number_cell_types: int = cells_info[1] + cell_types: int = cells_info[2] + cell_type_counts: int = cells_info[3] + min_coords, max_coords = get_coords_min_max(mesh) + point_ids: bool = not bool(mesh.GetPointData().GetGlobalIds()) + cell_ids: bool = not bool(mesh.GetCellData().GetGlobalIds()) + point_data: MeshComponentData = build_MeshComponentData(mesh, "point") + cell_data: MeshComponentData = build_MeshComponentData(mesh, "cell") + + return Result( number_points=number_points, number_cells=number_cells, number_cell_types=number_cell_types, + cell_types=cell_types, cell_type_counts=cell_type_counts, min_coords=min_coords, max_coords=max_coords, + is_empty_point_global_ids=point_ids, is_empty_cell_global_ids=cell_ids, + point_data=point_data, cell_data=cell_data) def check( vtk_input_file: str, options: Options ) -> Result: mesh = vtk_utils.read_mesh( vtk_input_file ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 5fdb669..ccea86d 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -23,18 +23,27 @@ def display_results( options: Options, result: Result ): logging.info( f" {result.min_coords[1]} <= y <= {result.max_coords[1]}") logging.info( f" {result.min_coords[2]} <= z <= {result.max_coords[2]}") - logging.info( f"Does the mesh have global point ids: {result.has_point_global_ids}" ) - logging.info( f"Does the mesh have global cell ids: {result.has_cell_global_ids}" ) + logging.info( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) + logging.info( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) - logging.info( f"There are {len(result.scalar_cell_data_names)} scalar fields on the cells:" ) - for i in range(len(result.scalar_cell_data_names)): - logging.info( f"\t {result.scalar_cell_data_names[i]} \t min = {result.scalar_cell_data_mins[i]} \t max = {result.scalar_cell_data_maxs[i]}" ) - logging.info( f"There are {len(result.tensor_cell_data_names)} vector/tensor fields on the cells:" ) - for i in range(len(result.tensor_cell_data_names)): - logging.info( f"\t {result.tensor_cell_data_names[i]}" ) - logging.info( f"There are {len(result.scalar_point_data_names)} scalar fields on the points:" ) - for i in range(len(result.scalar_point_data_names)): - logging.info( f"\t {result.scalar_point_data_names[i]} \t min = {result.scalar_point_data_mins[i]} \t max = {result.scalar_point_data_maxs[i]}" ) - logging.info( f"There are {len(result.tensor_point_data_names)} vector/tensor fields on the points:" ) - for i in range(len(result.tensor_point_data_names)): - logging.info( f"\t {result.tensor_point_data_names[i]}" ) + logging.info( f"There are {len(result.cell_data.scalar_names)} scalar fields on the cells:" ) + for i in range(len(result.cell_data.scalar_names)): + logging.info( f"\t {result.cell_data.scalar_names[i]}" + + f" \t min = {result.cell_data.scalar_min_values[i]}" + + f" \t max = {result.cell_data.scalar_max_values[i]}" ) + logging.info( f"There are {len(result.cell_data.tensor_names)} vector/tensor fields on the cells:" ) + for i in range(len(result.cell_data.tensor_names)): + logging.info( f"\t {result.cell_data.tensor_names[i]}" + + f" \t min = {result.cell_data.tensor_min_values[i]}" + + f" \t max = {result.cell_data.tensor_max_values[i]}" ) + + logging.info( f"There are {len(result.point_data.scalar_names)} scalar fields on the points:" ) + for i in range(len(result.point_data.scalar_names)): + logging.info( f"\t {result.point_data.scalar_names[i]}" + + f" \t min = {result.point_data.scalar_min_values[i]}" + + f" \t max = {result.point_data.scalar_max_values[i]}" ) + logging.info( f"There are {len(result.point_data.tensor_names)} vector/tensor fields on the points:" ) + for i in range(len(result.point_data.tensor_names)): + logging.info( f"\t {result.point_data.tensor_names[i]}" + + f" \t min = {result.point_data.tensor_min_values[i]}" + + f" \t max = {result.point_data.tensor_max_values[i]}" ) From f8d19d5670ce137f5cbf2bb1355ab5cd5e16a8fc Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 8 Aug 2024 10:04:21 -0700 Subject: [PATCH 04/34] Mesh stats completed with node sharing number and better parsing format. --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 168 ++++++++++++------ .../mesh/doctor/parsing/mesh_stats_parsing.py | 65 ++++--- 2 files changed, 156 insertions(+), 77 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 9424580..1fc1558 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -1,6 +1,6 @@ import logging -from typing import Union import numpy as np +import numpy.typing as npt from dataclasses import dataclass from vtkmodules.util.numpy_support import ( @@ -15,25 +15,26 @@ class Options: info: str -np_hinting = Union[np.float32, np.float64, np.int32, np.int64] +ArrayGeneric = npt.NDArray[np.generic] @dataclass( frozen=True ) class MeshComponentData: componentType: str - scalar_names: list[str] - scalar_min_values: list[np_hinting] - scalar_max_values: list[np_hinting] - tensor_names: list[str] - tensor_min_values: list[np.array[np_hinting]] - tensor_max_values: list[np.array[np_hinting]] + scalar_names: list[ str ] + scalar_min_values: list[ np.generic ] # base class for all scalar types numpy + scalar_max_values: list[ np.generic ] + tensor_names: list[ str ] + tensor_min_values: list[ ArrayGeneric ] + tensor_max_values: list[ ArrayGeneric ] @dataclass( frozen=True ) class Result: - number_elements: int - number_nodes: int + number_cells: int + number_points: int number_cell_types: int - cell_types: list[str] - cell_type_counts: list[int] + cell_types: list[ str ] + cell_type_counts: list[ int ] + sum_number_cells_per_nodes: dict[ int, int ] min_coords: np.ndarray max_coords: np.ndarray is_empty_point_global_ids: bool @@ -41,9 +42,10 @@ class Result: point_data: MeshComponentData cell_data: MeshComponentData + def get_cell_types_and_counts( mesh: vtkUnstructuredGrid - )-> tuple[int, int, list[str], list[int]]: + )-> tuple[ int, int, list[ str ], list[ int ] ]: """From an unstructured grid, collects the number of cells, the number of cell types, the list of cell types and the counts of each cell element. @@ -56,21 +58,62 @@ def get_cell_types_and_counts( (number_cells, number_cell_types, cell_types, cell_type_counts) """ number_cells: int = mesh.GetNumberOfCells() - number_cell_types: int = mesh.GetDistinctCellTypesArray().GetSize() - cell_types: list[str] = [] - for cell_type in range(number_cell_types): - cell_types.append(vtk_utils.vtkid_to_string(mesh.GetCellType(cell_type))) - - cell_type_counts: list[int] = [0]*number_cell_types - for cell in range(number_cells): - for cell_type in range(number_cell_types): - if vtk_utils.vtkid_to_string(mesh.GetCell(cell).GetCellType()) == cell_types[cell_type]: - cell_type_counts[cell_type] += 1 + distinct_array_types = mesh.GetDistinctCellTypesArray() + number_cell_types: int = distinct_array_types.GetNumberOfTuples() + # Get the different cell types in the mesh + cell_types: list[ str ] = [] + for cell_type in range( number_cell_types ): + cell_types.append( vtk_utils.vtkid_to_string( distinct_array_types.GetTuple( cell_type )[ 0 ] ) ) + # Counts how many of each type are present + cell_type_counts: list[ int ] = [ 0 ] * number_cell_types + for cell in range( number_cells ): + for cell_type in range( number_cell_types ): + if vtk_utils.vtkid_to_string( mesh.GetCell( cell ).GetCellType() ) == cell_types[ cell_type ]: + cell_type_counts[ cell_type ] += 1 break + return ( number_cells, number_cell_types, cell_types, cell_type_counts ) + + +def get_number_cells_per_nodes( mesh: vtkUnstructuredGrid ) -> dict[ int, int ]: + """Finds for each point_id the number of cells sharing that same node. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + dict[ int, int ]: { point_id0: 8, ..., point_idN: 4 } + """ + number_cells_per_nodes: dict[ int, int ] = {} + for point_id in range( mesh.GetNumberOfPoints() ): + number_cells_per_nodes[ point_id ] = 0 + for cell_id in range( mesh.GetNumberOfCells() ): + cell = mesh.GetCell( cell_id ) + for v in range(cell.GetNumberOfPoints()): + point_id = cell.GetPointId( v ) + number_cells_per_nodes[ point_id ] += 1 + return number_cells_per_nodes + - return (number_cells, number_cell_types, cell_types, cell_type_counts) +def summary_number_cells_per_nodes( + number_cells_per_nodes: dict[ int, int ] ) -> dict[ int, int ]: + """Obtain the number of nodes that have X number of cells. -def get_coords_min_max(mesh: vtkUnstructuredGrid) -> tuple[np.ndarray]: + Args: + number_cells_per_nodes (dict[ int, int ]): { point_id0: 8, ..., point_idN: 4 } + + Returns: + dict[ int, int ]: Number of ce + """ + unique_number_cells = set( [ value for value in number_cells_per_nodes.values() ] ) + summary: dict[ int, int ] = {} + for unique_number in unique_number_cells: + summary[unique_number] = 0 + for number_cells in number_cells_per_nodes.values(): + summary[number_cells] += 1 + return summary + + +def get_coords_min_max( mesh: vtkUnstructuredGrid ) -> tuple[ np.ndarray ]: """From an unstructured mesh, returns the coordinates of the minimum and maximum points. @@ -80,10 +123,11 @@ def get_coords_min_max(mesh: vtkUnstructuredGrid) -> tuple[np.ndarray]: Returns: tuple[np.ndarray]: Min and Max coordinates. """ - coords: np.ndarray = vtk_to_numpy(mesh.GetPoints().GetData()) - min_coords: np.ndarray = coords.min(axis=0) - max_coords: np.ndarray = coords.max(axis=0) - return (min_coords, max_coords) + coords: np.ndarray = vtk_to_numpy( mesh.GetPoints().GetData() ) + min_coords: np.ndarray = coords.min( axis=0 ) + max_coords: np.ndarray = coords.max( axis=0 ) + return ( min_coords, max_coords ) + def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "point" @@ -97,7 +141,7 @@ def build_MeshComponentData( Returns: meshCD (MeshComponentData): Object that gathers data regarding a mesh component. """ - if componentType not in ["point", "cell"]: + if componentType not in [ "point", "cell" ]: componentType = "point" logging.error( f"Invalid component type chosen to build MeshComponentData. Defaulted to point." ) @@ -106,44 +150,56 @@ def build_MeshComponentData( else: number_arrays_data = mesh.GetCellData().GetNumberOfArrays() - meshCD: MeshComponentData = MeshComponentData() - meshCD.componentType = componentType - for i in range(number_arrays_data): + scalar_names: list[ str ] = [] + scalar_min_values: list[ np.generic ] = [] + scalar_max_values: list[ np.generic ] = [] + tensor_names: list[ str ] = [] + tensor_min_values: list[ ArrayGeneric ] = [] + tensor_max_values: list[ ArrayGeneric ] = [] + for i in range( number_arrays_data ): if componentType == "point": - data_array = mesh.GetPointData().GetArray(i) + data_array = mesh.GetPointData().GetArray( i ) else: - data_array = mesh.GetCellData().GetArray(i) + data_array = mesh.GetCellData().GetArray( i ) data_array_name = data_array.GetName() - data_np_array = vtk_to_numpy(data_array) + data_np_array = vtk_to_numpy( data_array ) if data_array.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min - meshCD.scalar_names.append(data_array_name) - meshCD.scalar_max_values.append(data_np_array.max()) - meshCD.scalar_min_values.append(data_np_array.min()) + scalar_names.append( data_array_name ) + scalar_max_values.append( data_np_array.max() ) + scalar_min_values.append( data_np_array.min() ) else: - meshCD.tensor_names.append(data_array_name) - meshCD.tensor_max_values.append(data_np_array.max(axis=0)) - meshCD.tensor_min_values.append(data_np_array.min(axis=0)) + tensor_names.append( data_array_name ) + tensor_max_values.append( data_np_array.max(axis=0) ) + tensor_min_values.append( data_np_array.min(axis=0) ) - return meshCD + return MeshComponentData( componentType=componentType, scalar_names=scalar_names, + scalar_min_values=scalar_min_values, scalar_max_values=scalar_max_values, + tensor_names=tensor_names, tensor_min_values=tensor_min_values, + tensor_max_values=tensor_max_values ) -def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: +def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: number_points: int = mesh.GetNumberOfPoints() - cells_info = get_cell_types_and_counts(mesh) - number_cells: int = cells_info[0] - number_cell_types: int = cells_info[1] - cell_types: int = cells_info[2] - cell_type_counts: int = cells_info[3] - min_coords, max_coords = get_coords_min_max(mesh) - point_ids: bool = not bool(mesh.GetPointData().GetGlobalIds()) - cell_ids: bool = not bool(mesh.GetCellData().GetGlobalIds()) - point_data: MeshComponentData = build_MeshComponentData(mesh, "point") - cell_data: MeshComponentData = build_MeshComponentData(mesh, "cell") + cells_info = get_cell_types_and_counts( mesh ) + number_cells: int = cells_info[ 0 ] + number_cell_types: int = cells_info[ 1 ] + cell_types: int = cells_info[ 2 ] + cell_type_counts: int = cells_info[ 3 ] + number_cells_per_nodes: dict[ int, int ] = get_number_cells_per_nodes( mesh ) + sum_number_cells_per_nodes: dict[ int, int ] = summary_number_cells_per_nodes( number_cells_per_nodes ) + min_coords, max_coords = get_coords_min_max( mesh ) + point_ids: bool = not bool( mesh.GetPointData().GetGlobalIds() ) + cell_ids: bool = not bool( mesh.GetCellData().GetGlobalIds() ) + point_data: MeshComponentData = build_MeshComponentData( mesh, "point" ) + cell_data: MeshComponentData = build_MeshComponentData( mesh, "cell" ) return Result( number_points=number_points, number_cells=number_cells, number_cell_types=number_cell_types, - cell_types=cell_types, cell_type_counts=cell_type_counts, min_coords=min_coords, max_coords=max_coords, + cell_types=cell_types, cell_type_counts=cell_type_counts, + sum_number_cells_per_nodes=sum_number_cells_per_nodes, + min_coords=min_coords, max_coords=max_coords, is_empty_point_global_ids=point_ids, is_empty_cell_global_ids=cell_ids, - point_data=point_data, cell_data=cell_data) + point_data=point_data, cell_data=cell_data ) + def check( vtk_input_file: str, options: Options ) -> Result: mesh = vtk_utils.read_mesh( vtk_input_file ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index ccea86d..616bdc1 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -1,6 +1,7 @@ import logging +from typing import Iterable -from checks.mesh_stats import Options, Result +from geos.mesh.doctor.checks.mesh_stats import Options, Result from . import MESH_STATS @@ -14,36 +15,58 @@ def convert( parsed_options ) -> Options: return Options( info="test" ) def display_results( options: Options, result: Result ): - logging.info( f"The mesh has {result.num_elements} elements and {result.num_nodes} nodes." ) - logging.info( f"There are {result.num_cell_types} different types of cell in the mesh:" ) - for i in range(result.num_cell_types): - logging.info( f"\t {result.cell_types[i]} \t ({result.cell_type_counts[i]} cells)" ) + logging.info( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) + logging.info( f"There are {result.number_cell_types} different types of cell in the mesh:" ) + for i in range(result.number_cell_types): + logging.info( f"\t{result.cell_types[i]}\t({result.cell_type_counts[i]} cells)" ) + + logging.info( "Number of nodes being shared between exactly N cells:" ) + logging.info( "\tCells\tNodes" ) + for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): + logging.info( f"\t{number_cells_per_node}\t{number_of_occurences}" ) - logging.info( f"The domain is contained in {result.min_coords[0]} <= x <= {result.max_coords[0]}") - logging.info( f" {result.min_coords[1]} <= y <= {result.max_coords[1]}") - logging.info( f" {result.min_coords[2]} <= z <= {result.max_coords[2]}") + logging.info( "The domain is contained in:" ) + logging.info( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) + logging.info( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) + logging.info( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) logging.info( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) logging.info( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) + space_size: int = 3 logging.info( f"There are {len(result.cell_data.scalar_names)} scalar fields on the cells:" ) for i in range(len(result.cell_data.scalar_names)): - logging.info( f"\t {result.cell_data.scalar_names[i]}" - + f" \t min = {result.cell_data.scalar_min_values[i]}" - + f" \t max = {result.cell_data.scalar_max_values[i]}" ) + logging.info( f"\t{result.cell_data.scalar_names[i]}" + + harmonious_spacing(result.cell_data.scalar_names, i, space_size) + + f"min = {result.cell_data.scalar_min_values[i]}" + " " * space_size + # + harmonious_spacing(result.cell_data.scalar_min_values, i, space_size) + + f"max = {result.cell_data.scalar_max_values[i]}" ) + logging.info( f"There are {len(result.cell_data.tensor_names)} vector/tensor fields on the cells:" ) for i in range(len(result.cell_data.tensor_names)): - logging.info( f"\t {result.cell_data.tensor_names[i]}" - + f" \t min = {result.cell_data.tensor_min_values[i]}" - + f" \t max = {result.cell_data.tensor_max_values[i]}" ) - + logging.info( f"\t{result.cell_data.tensor_names[i]}" + + harmonious_spacing(result.cell_data.tensor_names, i, space_size) + + f"min = {result.cell_data.tensor_min_values[i]}" + " " * space_size + # + harmonious_spacing(result.cell_data.tensor_min_values, i, space_size) + + f"max = {result.cell_data.tensor_max_values[i]}" ) + logging.info( f"There are {len(result.point_data.scalar_names)} scalar fields on the points:" ) for i in range(len(result.point_data.scalar_names)): - logging.info( f"\t {result.point_data.scalar_names[i]}" - + f" \t min = {result.point_data.scalar_min_values[i]}" - + f" \t max = {result.point_data.scalar_max_values[i]}" ) + logging.info( f"\t{result.point_data.scalar_names[i]}" + + harmonious_spacing(result.point_data.scalar_names, i, space_size) + + f"min = {result.point_data.scalar_min_values[i]}" + " " * space_size + # + harmonious_spacing(result.point_data.scalar_min_values, i, space_size) + + f"max = {result.point_data.scalar_max_values[i]}" ) + logging.info( f"There are {len(result.point_data.tensor_names)} vector/tensor fields on the points:" ) for i in range(len(result.point_data.tensor_names)): - logging.info( f"\t {result.point_data.tensor_names[i]}" - + f" \t min = {result.point_data.tensor_min_values[i]}" - + f" \t max = {result.point_data.tensor_max_values[i]}" ) + logging.info( f"\t{result.point_data.tensor_names[i]}" + + harmonious_spacing(result.point_data.tensor_names, i, space_size) + + f"min = {result.point_data.tensor_min_values[i]}" + " " * space_size + # + harmonious_spacing(result.point_data.tensor_min_values, i, space_size) + + f" max = {result.point_data.tensor_max_values[i]}" ) + +def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int=3 ) -> str: + longest_element: Iterable = max( iterable_objs, key=len ) + ideal_space: int = len( longest_element ) - len( iterable_objs[ indexIter ] ) + space_size + return " " * ideal_space \ No newline at end of file From cde3385d9eeb4ce98aa2d59fe1c096295ca76f0e Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Tue, 13 Aug 2024 11:47:22 -0500 Subject: [PATCH 05/34] Fields range of values are now being checked by mesh_stats --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 97 +++++++++++++++++-- .../mesh/doctor/parsing/mesh_stats_parsing.py | 88 +++++++++-------- 2 files changed, 140 insertions(+), 45 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 1fc1558..2a9e02d 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -2,6 +2,7 @@ import numpy as np import numpy.typing as npt from dataclasses import dataclass +from enum import Enum from vtkmodules.util.numpy_support import ( vtk_to_numpy, ) @@ -41,6 +42,51 @@ class Result: is_empty_cell_global_ids: bool point_data: MeshComponentData cell_data: MeshComponentData + fields_validity_point_data: dict[ str, dict[ str, bool ] ] + fields_validity_cell_data: dict[ str, dict[ str, bool ] ] + +class MIN_FIELD( float, Enum ): # SI Units + PORO = 0.0 + PERM = 0.0 + FLUIDCOMP = 0.0 + PRESSURE = 0.0 + BHP = 0.0 + TEMPERATURE = 0.0 + DENSITY = 0.0 + COMPRESSIBILITY = 0.0 + VISCOSITY = 0.0 + NTG = 0.0 + BULKMOD = 0.0 + SHEARMOD = 0.0 + +class MAX_FIELD( float, Enum ): # SI Units + PORO = 1.0 + PERM = 1.0 + FLUIDCOMP = 1.0 + PRESSURE = 1.0e9 + BHP = 1.0e9 + TEMPERATURE = 2.0e3 + DENSITY = 2.5e4 + COMPRESSIBILITY = 1.0e-4 + VISCOSITY = 1.0e24 + NTG = 1.0 + BULKMOD = 1.0e12 + SHEARMOD = 1.0e12 + + +def associate_min_max_field_values() -> dict[ str, tuple[ float ] ]: + """Using MIN_FIELD and MAX_FIELD, associate the min and max value reachable for a + property in GEOS to a property tag such as poro, perm etc... + + Returns: + dict[ str, tuple[ float ] ]: { poro: (min_value, max_value), perm: (min_value, max_value), ... } + """ + assoc_min_max_field_values: dict[ str, tuple[ float ] ] = {} + for name in MIN_FIELD.__members__: + mini = MIN_FIELD[ name ] + maxi = MAX_FIELD[ name ] + assoc_min_max_field_values[ name.lower() ] = ( mini.value, maxi.value ) + return assoc_min_max_field_values def get_cell_types_and_counts( @@ -88,7 +134,7 @@ def get_number_cells_per_nodes( mesh: vtkUnstructuredGrid ) -> dict[ int, int ]: number_cells_per_nodes[ point_id ] = 0 for cell_id in range( mesh.GetNumberOfCells() ): cell = mesh.GetCell( cell_id ) - for v in range(cell.GetNumberOfPoints()): + for v in range( cell.GetNumberOfPoints() ): point_id = cell.GetPointId( v ) number_cells_per_nodes[ point_id ] += 1 return number_cells_per_nodes @@ -107,9 +153,9 @@ def summary_number_cells_per_nodes( unique_number_cells = set( [ value for value in number_cells_per_nodes.values() ] ) summary: dict[ int, int ] = {} for unique_number in unique_number_cells: - summary[unique_number] = 0 + summary[ unique_number ] = 0 for number_cells in number_cells_per_nodes.values(): - summary[number_cells] += 1 + summary[ number_cells ] += 1 return summary @@ -169,8 +215,8 @@ def build_MeshComponentData( scalar_min_values.append( data_np_array.min() ) else: tensor_names.append( data_array_name ) - tensor_max_values.append( data_np_array.max(axis=0) ) - tensor_min_values.append( data_np_array.min(axis=0) ) + tensor_max_values.append( data_np_array.max( axis=0 ) ) + tensor_min_values.append( data_np_array.min( axis=0 ) ) return MeshComponentData( componentType=componentType, scalar_names=scalar_names, scalar_min_values=scalar_min_values, scalar_max_values=scalar_max_values, @@ -178,6 +224,42 @@ def build_MeshComponentData( tensor_max_values=tensor_max_values ) +def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple [ bool, tuple[ float] ] ]: + """Check that for every min and max values found in the scalar and tensor fields, + none of these values is out of bounds. If the value is out of bound, False validity flag + is given to the field, True if no problem. + + Args: + mcdata (MeshComponentData): Object that gathers data regarding a mesh component. + + Returns: + dict[ str, bool ]: {poro: True, perm: False, ...} + """ + field_values_validity: dict[ str, tuple [ bool, tuple[ float] ] ] = {} + assoc_min_max_field: dict[ str, tuple[ float ] ] = associate_min_max_field_values() + logging.info( f"assoc_min_max_field : {assoc_min_max_field}") + # for scalar values + for i in range( len( mcdata.scalar_names ) ): + for field_param, min_max in assoc_min_max_field.items(): + field_values_validity[ mcdata.scalar_names[ i ] ] = ( True, min_max ) + if field_param in mcdata.scalar_names[ i ].lower(): + if mcdata.scalar_min_values[ i ] < min_max[ 0 ] or mcdata.scalar_max_values[ i ] > min_max[ 1 ]: + field_values_validity[ mcdata.scalar_names[ i ] ] = ( False, min_max ) + del assoc_min_max_field[field_param] + break + # for tensor values + for i in range( len( mcdata.tensor_names ) ): + for field_param, min_max in assoc_min_max_field.items(): + field_values_validity[ mcdata.tensor_names[ i ] ] = ( True, min_max ) + if field_param in mcdata.tensor_names[ i ].lower(): + for sub_value_min, sub_value_max in zip( mcdata.tensor_min_values[ i ], mcdata.tensor_max_values[ i ] ): + if sub_value_min < min_max[ 0 ] or sub_value_max > min_max[ 1 ]: + field_values_validity[ mcdata.tensor_names[ i ] ] = ( False, min_max ) + del assoc_min_max_field[field_param] + break + return field_values_validity + + def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: number_points: int = mesh.GetNumberOfPoints() cells_info = get_cell_types_and_counts( mesh ) @@ -192,13 +274,16 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: cell_ids: bool = not bool( mesh.GetCellData().GetGlobalIds() ) point_data: MeshComponentData = build_MeshComponentData( mesh, "point" ) cell_data: MeshComponentData = build_MeshComponentData( mesh, "cell" ) + fields_validity_point_data: dict[ str, tuple [ bool, tuple[ float] ] ] = field_values_validity( point_data ) + fields_validity_cell_data: dict[ str, tuple [ bool, tuple[ float] ] ] = field_values_validity( cell_data ) return Result( number_points=number_points, number_cells=number_cells, number_cell_types=number_cell_types, cell_types=cell_types, cell_type_counts=cell_type_counts, sum_number_cells_per_nodes=sum_number_cells_per_nodes, min_coords=min_coords, max_coords=max_coords, is_empty_point_global_ids=point_ids, is_empty_cell_global_ids=cell_ids, - point_data=point_data, cell_data=cell_data ) + point_data=point_data, cell_data=cell_data, + fields_validity_point_data=fields_validity_point_data, fields_validity_cell_data=fields_validity_cell_data ) def check( vtk_input_file: str, options: Options ) -> Result: diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 616bdc1..7747763 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -15,56 +15,66 @@ def convert( parsed_options ) -> Options: return Options( info="test" ) def display_results( options: Options, result: Result ): - logging.info( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) - logging.info( f"There are {result.number_cell_types} different types of cell in the mesh:" ) + logging.critical( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) + logging.critical( f"There are {result.number_cell_types} different types of cell in the mesh:" ) for i in range(result.number_cell_types): - logging.info( f"\t{result.cell_types[i]}\t({result.cell_type_counts[i]} cells)" ) + logging.critical( f"\t{result.cell_types[ i ]}\t({result.cell_type_counts[ i ]} cells)" ) - logging.info( "Number of nodes being shared between exactly N cells:" ) - logging.info( "\tCells\tNodes" ) + logging.critical( "Number of nodes being shared between exactly N cells:" ) + logging.critical( "\tCells\tNodes" ) for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): - logging.info( f"\t{number_cells_per_node}\t{number_of_occurences}" ) + logging.critical( f"\t{number_cells_per_node}\t{number_of_occurences}" ) - logging.info( "The domain is contained in:" ) - logging.info( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) - logging.info( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) - logging.info( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) + logging.critical( "The domain is contained in:" ) + logging.critical( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) + logging.critical( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) + logging.critical( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) - logging.info( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) - logging.info( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) + logging.critical( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) + logging.critical( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) space_size: int = 3 - logging.info( f"There are {len(result.cell_data.scalar_names)} scalar fields on the cells:" ) - for i in range(len(result.cell_data.scalar_names)): - logging.info( f"\t{result.cell_data.scalar_names[i]}" - + harmonious_spacing(result.cell_data.scalar_names, i, space_size) - + f"min = {result.cell_data.scalar_min_values[i]}" + " " * space_size - # + harmonious_spacing(result.cell_data.scalar_min_values, i, space_size) - + f"max = {result.cell_data.scalar_max_values[i]}" ) + logging.critical( f"There are {len( result.cell_data.scalar_names )} scalar fields on the cells:" ) + for i in range( len( result.cell_data.scalar_names ) ): + logging.critical( f"\t{result.cell_data.scalar_names[i]}" + + harmonious_spacing( result.cell_data.scalar_names, i, space_size ) + + f"min = {result.cell_data.scalar_min_values[ i ]}" + " " * space_size + + f"max = {result.cell_data.scalar_max_values[ i ]}" ) - logging.info( f"There are {len(result.cell_data.tensor_names)} vector/tensor fields on the cells:" ) - for i in range(len(result.cell_data.tensor_names)): - logging.info( f"\t{result.cell_data.tensor_names[i]}" - + harmonious_spacing(result.cell_data.tensor_names, i, space_size) - + f"min = {result.cell_data.tensor_min_values[i]}" + " " * space_size - # + harmonious_spacing(result.cell_data.tensor_min_values, i, space_size) - + f"max = {result.cell_data.tensor_max_values[i]}" ) + logging.critical( f"There are {len( result.cell_data.tensor_names )} vector/tensor fields on the cells:" ) + for i in range(len( result.cell_data.tensor_names )): + logging.critical( f"\t{result.cell_data.tensor_names[ i ]}" + + harmonious_spacing( result.cell_data.tensor_names, i, space_size ) + + f"min = {result.cell_data.tensor_min_values[ i ]}" + " " * space_size + + f"max = {result.cell_data.tensor_max_values[ i ]}" ) - logging.info( f"There are {len(result.point_data.scalar_names)} scalar fields on the points:" ) - for i in range(len(result.point_data.scalar_names)): - logging.info( f"\t{result.point_data.scalar_names[i]}" - + harmonious_spacing(result.point_data.scalar_names, i, space_size) - + f"min = {result.point_data.scalar_min_values[i]}" + " " * space_size - # + harmonious_spacing(result.point_data.scalar_min_values, i, space_size) - + f"max = {result.point_data.scalar_max_values[i]}" ) + logging.critical( f"There are {len( result.point_data.scalar_names )} scalar fields on the points:" ) + for i in range(len( result.point_data.scalar_names )): + logging.critical( f"\t{result.point_data.scalar_names[ i ]}" + + harmonious_spacing( result.point_data.scalar_names, i, space_size ) + + f"min = {result.point_data.scalar_min_values[ i ]}" + " " * space_size + + f"max = {result.point_data.scalar_max_values[ i ]}" ) - logging.info( f"There are {len(result.point_data.tensor_names)} vector/tensor fields on the points:" ) + logging.critical( f"There are {len( result.point_data.tensor_names )} vector/tensor fields on the points:" ) for i in range(len(result.point_data.tensor_names)): - logging.info( f"\t{result.point_data.tensor_names[i]}" - + harmonious_spacing(result.point_data.tensor_names, i, space_size) - + f"min = {result.point_data.tensor_min_values[i]}" + " " * space_size - # + harmonious_spacing(result.point_data.tensor_min_values, i, space_size) - + f" max = {result.point_data.tensor_max_values[i]}" ) + logging.critical( f"\t{result.point_data.tensor_names[ i ]}" + + harmonious_spacing( result.point_data.tensor_names, i, space_size ) + + f"min = {result.point_data.tensor_min_values[ i ]}" + " " * space_size + + f"max = {result.point_data.tensor_max_values[ i ]}" ) + + logging.warning( f"Unexpected range of values for vector/tensor fields on the cells :" ) + for field_name, validity_range in result.fields_validity_cell_data.items(): + is_valid: bool = validity_range[ 0 ] + min_max: tuple[ float ] = validity_range[ 1 ] + if not is_valid: + logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) + + logging.warning( f"Unexpected range of values for vector/tensor fields on the points :" ) + for field_name, validity_range in result.fields_validity_point_data.items(): + is_valid: bool = validity_range[ 0 ] + min_max: tuple[ float ] = validity_range[ 1 ] + if not is_valid: + logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int=3 ) -> str: longest_element: Iterable = max( iterable_objs, key=len ) From 656da06445e89f137cdae1a17fd1fa8ddac5b619 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 14 Aug 2024 12:17:48 -0500 Subject: [PATCH 06/34] yapf formatting --- .../src/geos/mesh/doctor/checks/add_fields.py | 110 +++++++++--------- .../src/geos/mesh/doctor/checks/mesh_stats.py | 81 +++++++------ .../src/geos/mesh/doctor/checks/vtk_utils.py | 27 ++--- .../mesh/doctor/parsing/add_fields_parsing.py | 26 ++--- .../mesh/doctor/parsing/mesh_stats_parsing.py | 61 +++++----- geos-mesh/src/geos/mesh/doctor/register.py | 2 +- 6 files changed, 160 insertions(+), 147 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py b/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py index 9290bfd..c97379a 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py @@ -6,13 +6,15 @@ from vtkmodules.util.numpy_support import ( numpy_to_vtk, - vtk_to_numpy, ) + vtk_to_numpy, +) from vtkmodules.vtkCommonCore import ( vtkDoubleArray, ) from . import vtk_utils + @dataclass( frozen=True ) class Options: support: str @@ -25,43 +27,45 @@ class Options: class Result: info: bool -def __analytic_field(mesh, support, name) -> bool: + +def __analytic_field( mesh, support, name ) -> bool: if support == 'node': # example function: distance from mesh center nn = mesh.GetNumberOfPoints() - coords = vtk_to_numpy(mesh.GetPoints().GetData()) - center = (coords.max(axis=0) + coords.min(axis=0))/2 + coords = vtk_to_numpy( mesh.GetPoints().GetData() ) + center = ( coords.max( axis=0 ) + coords.min( axis=0 ) ) / 2 data_arr = vtkDoubleArray() - data_np = empty(nn) + data_np = empty( nn ) - for i in range(nn): + for i in range( nn ): val = 0 - pt = mesh.GetPoint(i) - for j in range(len(pt)): - val += (pt[j] - center[j])*(pt[j]-center[j]) - val = sqrt(val) - data_np[i] = val - - data_arr = numpy_to_vtk(data_np) - data_arr.SetName(name) - mesh.GetPointData().AddArray(data_arr) + pt = mesh.GetPoint( i ) + for j in range( len( pt ) ): + val += ( pt[ j ] - center[ j ] ) * ( pt[ j ] - center[ j ] ) + val = sqrt( val ) + data_np[ i ] = val + + data_arr = numpy_to_vtk( data_np ) + data_arr.SetName( name ) + mesh.GetPointData().AddArray( data_arr ) return True elif support == 'cell': # example function: random field ne = mesh.GetNumberOfCells() data_arr = vtkDoubleArray() - data_np = rand(ne, 1) + data_np = rand( ne, 1 ) - data_arr = numpy_to_vtk(data_np) - data_arr.SetName(name) - mesh.GetCellData().AddArray(data_arr) + data_arr = numpy_to_vtk( data_np ) + data_arr.SetName( name ) + mesh.GetCellData().AddArray( data_arr ) return True else: - logging.error('incorrect support option. Options are node, cell') + logging.error( 'incorrect support option. Options are node, cell' ) return False -def __compatible_meshes(dest_mesh, source_mesh) -> bool: + +def __compatible_meshes( dest_mesh, source_mesh ) -> bool: # for now, just check that meshes have same number of elements and same number of nodes # and require that each cell has same nodes, each node has same coordinate dest_ne = dest_mesh.GetNumberOfCells() @@ -70,74 +74,70 @@ def __compatible_meshes(dest_mesh, source_mesh) -> bool: source_nn = source_mesh.GetNumberOfPoints() if dest_ne != source_ne: - logging.error('meshes have different number of cells') + logging.error( 'meshes have different number of cells' ) return False if dest_nn != source_nn: - logging.error('meshes have different number of nodes') + logging.error( 'meshes have different number of nodes' ) return False - - for i in range(dest_nn): - if not ((dest_mesh.GetPoint(i)) == (source_mesh.GetPoint(i))): - logging.error('at least one node is in a different location') - return False - - for i in range(dest_ne): - if not (vtk_to_numpy(dest_mesh.GetCell(i).GetPoints().GetData()) == vtk_to_numpy(source_mesh.GetCell(i).GetPoints().GetData())).all(): - logging.error('at least one cell has different nodes') + + for i in range( dest_nn ): + if not ( ( dest_mesh.GetPoint( i ) ) == ( source_mesh.GetPoint( i ) ) ): + logging.error( 'at least one node is in a different location' ) return False - - return True - - + for i in range( dest_ne ): + if not ( vtk_to_numpy( dest_mesh.GetCell( i ).GetPoints().GetData() ) == vtk_to_numpy( + source_mesh.GetCell( i ).GetPoints().GetData() ) ).all(): + logging.error( 'at least one cell has different nodes' ) + return False + return True -def __transfer_field(mesh, support, field_name, source) -> bool: +def __transfer_field( mesh, support, field_name, source ) -> bool: from_mesh = vtk_utils.read_mesh( source ) - same_mesh = __compatible_meshes(mesh, from_mesh) + same_mesh = __compatible_meshes( mesh, from_mesh ) if not same_mesh: - logging.error('meshes are not the same') + logging.error( 'meshes are not the same' ) return False - + if support == 'cell': - data = from_mesh.GetCellData().GetArray(field_name) + data = from_mesh.GetCellData().GetArray( field_name ) if data == None: - logging.error('Requested field does not exist on source mesh') + logging.error( 'Requested field does not exist on source mesh' ) return False else: - mesh.GetCellData().AddArray(data) + mesh.GetCellData().AddArray( data ) elif support == 'node': - data = from_mesh.GetPointData().GetArray(field_name) + data = from_mesh.GetPointData().GetArray( field_name ) if data == None: - logging.error('Requested field does not exist on source mesh') + logging.error( 'Requested field does not exist on source mesh' ) return False else: - mesh.GetPointData().AddArray(data) + mesh.GetPointData().AddArray( data ) return False else: - logging.error('incorrect support option. Options are node, cell') + logging.error( 'incorrect support option. Options are node, cell' ) return False return True def __check( mesh, options: Options ) -> Result: if options.source == 'function': - succ =__analytic_field(mesh, options.support, options.field_name) + succ = __analytic_field( mesh, options.support, options.field_name ) if succ: vtk_utils.write_mesh( mesh, options.out_vtk ) - elif (options.source[-4:] == '.vtu' or options.source[-4:] == '.vtk'): - succ = __transfer_field(mesh, options.support, options.field_name, options.source) + elif ( options.source[ -4: ] == '.vtu' or options.source[ -4: ] == '.vtk' ): + succ = __transfer_field( mesh, options.support, options.field_name, options.source ) if succ: vtk_utils.write_mesh( mesh, options.out_vtk ) - else: - logging.error('incorrect source option. Options are function, *.vtu, *.vtk.') + else: + logging.error( 'incorrect source option. Options are function, *.vtu, *.vtk.' ) succ = False - return Result(info=succ) + return Result( info=succ ) #TODO: Better exception handle - def check( vtk_input_file: str, options: Options ) -> Result: mesh = vtk_utils.read_mesh( vtk_input_file ) - return __check( mesh, options ) \ No newline at end of file + return __check( mesh, options ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 2a9e02d..e23206b 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -5,29 +5,33 @@ from enum import Enum from vtkmodules.util.numpy_support import ( - vtk_to_numpy, ) + vtk_to_numpy, ) from vtkmodules.vtkCommonDataModel import ( vtkUnstructuredGrid, ) from . import vtk_utils + @dataclass( frozen=True ) class Options: info: str -ArrayGeneric = npt.NDArray[np.generic] + +ArrayGeneric = npt.NDArray[ np.generic ] + @dataclass( frozen=True ) class MeshComponentData: componentType: str scalar_names: list[ str ] - scalar_min_values: list[ np.generic ] # base class for all scalar types numpy + scalar_min_values: list[ np.generic ] # base class for all scalar types numpy scalar_max_values: list[ np.generic ] tensor_names: list[ str ] tensor_min_values: list[ ArrayGeneric ] tensor_max_values: list[ ArrayGeneric ] + @dataclass( frozen=True ) class Result: number_cells: int @@ -45,7 +49,8 @@ class Result: fields_validity_point_data: dict[ str, dict[ str, bool ] ] fields_validity_cell_data: dict[ str, dict[ str, bool ] ] -class MIN_FIELD( float, Enum ): # SI Units + +class MIN_FIELD( float, Enum ): # SI Units PORO = 0.0 PERM = 0.0 FLUIDCOMP = 0.0 @@ -59,7 +64,8 @@ class MIN_FIELD( float, Enum ): # SI Units BULKMOD = 0.0 SHEARMOD = 0.0 -class MAX_FIELD( float, Enum ): # SI Units + +class MAX_FIELD( float, Enum ): # SI Units PORO = 1.0 PERM = 1.0 FLUIDCOMP = 1.0 @@ -89,9 +95,7 @@ def associate_min_max_field_values() -> dict[ str, tuple[ float ] ]: return assoc_min_max_field_values -def get_cell_types_and_counts( - mesh: vtkUnstructuredGrid - )-> tuple[ int, int, list[ str ], list[ int ] ]: +def get_cell_types_and_counts( mesh: vtkUnstructuredGrid ) -> tuple[ int, int, list[ str ], list[ int ] ]: """From an unstructured grid, collects the number of cells, the number of cell types, the list of cell types and the counts of each cell element. @@ -140,8 +144,7 @@ def get_number_cells_per_nodes( mesh: vtkUnstructuredGrid ) -> dict[ int, int ]: return number_cells_per_nodes -def summary_number_cells_per_nodes( - number_cells_per_nodes: dict[ int, int ] ) -> dict[ int, int ]: +def summary_number_cells_per_nodes( number_cells_per_nodes: dict[ int, int ] ) -> dict[ int, int ]: """Obtain the number of nodes that have X number of cells. Args: @@ -157,7 +160,7 @@ def summary_number_cells_per_nodes( for number_cells in number_cells_per_nodes.values(): summary[ number_cells ] += 1 return summary - + def get_coords_min_max( mesh: vtkUnstructuredGrid ) -> tuple[ np.ndarray ]: """From an unstructured mesh, returns the coordinates of @@ -175,9 +178,7 @@ def get_coords_min_max( mesh: vtkUnstructuredGrid ) -> tuple[ np.ndarray ]: return ( min_coords, max_coords ) -def build_MeshComponentData( - mesh: vtkUnstructuredGrid, componentType: str = "point" - ) -> MeshComponentData: +def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "point" ) -> MeshComponentData: """Builds a MeshComponentData object for a specific component ("point", "cell") If the component type chosen is invalid, chooses "point" by default. @@ -190,7 +191,7 @@ def build_MeshComponentData( if componentType not in [ "point", "cell" ]: componentType = "point" logging.error( f"Invalid component type chosen to build MeshComponentData. Defaulted to point." ) - + if componentType == "point": number_arrays_data: int = mesh.GetPointData().GetNumberOfArrays() else: @@ -209,22 +210,25 @@ def build_MeshComponentData( data_array = mesh.GetCellData().GetArray( i ) data_array_name = data_array.GetName() data_np_array = vtk_to_numpy( data_array ) - if data_array.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min + if data_array.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min scalar_names.append( data_array_name ) - scalar_max_values.append( data_np_array.max() ) + scalar_max_values.append( data_np_array.max() ) scalar_min_values.append( data_np_array.min() ) else: tensor_names.append( data_array_name ) tensor_max_values.append( data_np_array.max( axis=0 ) ) tensor_min_values.append( data_np_array.min( axis=0 ) ) - return MeshComponentData( componentType=componentType, scalar_names=scalar_names, - scalar_min_values=scalar_min_values, scalar_max_values=scalar_max_values, - tensor_names=tensor_names, tensor_min_values=tensor_min_values, - tensor_max_values=tensor_max_values ) + return MeshComponentData( componentType=componentType, + scalar_names=scalar_names, + scalar_min_values=scalar_min_values, + scalar_max_values=scalar_max_values, + tensor_names=tensor_names, + tensor_min_values=tensor_min_values, + tensor_max_values=tensor_max_values ) -def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple [ bool, tuple[ float] ] ]: +def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple[ bool, tuple[ float ] ] ]: """Check that for every min and max values found in the scalar and tensor fields, none of these values is out of bounds. If the value is out of bound, False validity flag is given to the field, True if no problem. @@ -235,9 +239,9 @@ def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple [ boo Returns: dict[ str, bool ]: {poro: True, perm: False, ...} """ - field_values_validity: dict[ str, tuple [ bool, tuple[ float] ] ] = {} + field_values_validity: dict[ str, tuple[ bool, tuple[ float ] ] ] = {} assoc_min_max_field: dict[ str, tuple[ float ] ] = associate_min_max_field_values() - logging.info( f"assoc_min_max_field : {assoc_min_max_field}") + logging.info( f"assoc_min_max_field : {assoc_min_max_field}" ) # for scalar values for i in range( len( mcdata.scalar_names ) ): for field_param, min_max in assoc_min_max_field.items(): @@ -245,7 +249,7 @@ def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple [ boo if field_param in mcdata.scalar_names[ i ].lower(): if mcdata.scalar_min_values[ i ] < min_max[ 0 ] or mcdata.scalar_max_values[ i ] > min_max[ 1 ]: field_values_validity[ mcdata.scalar_names[ i ] ] = ( False, min_max ) - del assoc_min_max_field[field_param] + del assoc_min_max_field[ field_param ] break # for tensor values for i in range( len( mcdata.tensor_names ) ): @@ -255,7 +259,7 @@ def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple [ boo for sub_value_min, sub_value_max in zip( mcdata.tensor_min_values[ i ], mcdata.tensor_max_values[ i ] ): if sub_value_min < min_max[ 0 ] or sub_value_max > min_max[ 1 ]: field_values_validity[ mcdata.tensor_names[ i ] ] = ( False, min_max ) - del assoc_min_max_field[field_param] + del assoc_min_max_field[ field_param ] break return field_values_validity @@ -274,16 +278,23 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: cell_ids: bool = not bool( mesh.GetCellData().GetGlobalIds() ) point_data: MeshComponentData = build_MeshComponentData( mesh, "point" ) cell_data: MeshComponentData = build_MeshComponentData( mesh, "cell" ) - fields_validity_point_data: dict[ str, tuple [ bool, tuple[ float] ] ] = field_values_validity( point_data ) - fields_validity_cell_data: dict[ str, tuple [ bool, tuple[ float] ] ] = field_values_validity( cell_data ) - - return Result( number_points=number_points, number_cells=number_cells, number_cell_types=number_cell_types, - cell_types=cell_types, cell_type_counts=cell_type_counts, + fields_validity_point_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( point_data ) + fields_validity_cell_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( cell_data ) + + return Result( number_points=number_points, + number_cells=number_cells, + number_cell_types=number_cell_types, + cell_types=cell_types, + cell_type_counts=cell_type_counts, sum_number_cells_per_nodes=sum_number_cells_per_nodes, - min_coords=min_coords, max_coords=max_coords, - is_empty_point_global_ids=point_ids, is_empty_cell_global_ids=cell_ids, - point_data=point_data, cell_data=cell_data, - fields_validity_point_data=fields_validity_point_data, fields_validity_cell_data=fields_validity_cell_data ) + min_coords=min_coords, + max_coords=max_coords, + is_empty_point_global_ids=point_ids, + is_empty_cell_global_ids=cell_ids, + point_data=point_data, + cell_data=cell_data, + fields_validity_point_data=fields_validity_point_data, + fields_validity_cell_data=fields_validity_cell_data ) def check( vtk_input_file: str, options: Options ) -> Result: diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index 6830b95..c3af155 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -154,31 +154,32 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: sys.exit( 1 ) return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. -def vtkid_to_string(id: int) -> str: + +def vtkid_to_string( id: int ) -> str: match id: - case 1: # VTK_VERTEX + case 1: # VTK_VERTEX return 'Vertex' - case 3: #VTK_LINE + case 3: #VTK_LINE return 'Line' - case 5: #VTK_TRIANGLE + case 5: #VTK_TRIANGLE return 'Triangle' - case 8: #VTK_PIXEL + case 8: #VTK_PIXEL return 'Pixel' - case 9: #VTK_QUAD + case 9: #VTK_QUAD return 'Quad' - case 10: #VTK_TETRA + case 10: #VTK_TETRA return 'Tetra' - case 11: #VTK_VOXEL + case 11: #VTK_VOXEL return 'Voxel' - case 12: #VTK_HEXAHEDRON + case 12: #VTK_HEXAHEDRON return 'Hex' - case 13: #VTK_WEDGE + case 13: #VTK_WEDGE return 'Wedge' - case 14: #VTK_PYRAMID + case 14: #VTK_PYRAMID return 'Pyramid' - case 15: #VTK_PENTAGONAL_PRISM + case 15: #VTK_PENTAGONAL_PRISM return 'Pentagonal prism' - case 16: #VTK_HEXAGONAL_PRISM + case 16: #VTK_HEXAGONAL_PRISM return 'Hexagonal Prism' case _: return 'Unknown type' diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py index d10d37f..8b50d13 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py @@ -2,35 +2,33 @@ from checks.add_fields import Options, Result - from . import vtk_output_parsing, ADD_FIELDS __SUPPORT = "support" __NAME = "name" __SOURCE = "source" + def fill_subparser( subparsers ) -> None: - p = subparsers.add_parser( ADD_FIELDS, - help=f"Add cell or point data to a mesh." ) - p.add_argument( '--' + __SUPPORT, - type=str, - required=True, - help=f"[string]: Where to define field (point/cell)." ) - p.add_argument( '--' + __NAME, - type=str, - required=True, - help=f"[string]: Name of the field to add." ) + p = subparsers.add_parser( ADD_FIELDS, help=f"Add cell or point data to a mesh." ) + p.add_argument( '--' + __SUPPORT, type=str, required=True, help=f"[string]: Where to define field (point/cell)." ) + p.add_argument( '--' + __NAME, type=str, required=True, help=f"[string]: Name of the field to add." ) p.add_argument( '--' + __SOURCE, type=str, required=True, help=f"[string]: Where field data to add comes from (function, mesh)." ) vtk_output_parsing.fill_vtk_output_subparser( p ) - + + def convert( parsed_options ) -> Options: """ """ - return Options( support=parsed_options[__SUPPORT], field_name=parsed_options[__NAME], source=parsed_options[__SOURCE], out_vtk=vtk_output_parsing.convert( parsed_options ) ) + return Options( support=parsed_options[ __SUPPORT ], + field_name=parsed_options[ __NAME ], + source=parsed_options[ __SOURCE ], + out_vtk=vtk_output_parsing.convert( parsed_options ) ) + def display_results( options: Options, result: Result ): if result.info != True: - logging.error( f"Field addition failed" ) \ No newline at end of file + logging.error( f"Field addition failed" ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 7747763..8714dbd 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -5,26 +5,28 @@ from . import MESH_STATS + def fill_subparser( subparsers ) -> None: - p = subparsers.add_parser( MESH_STATS, - help=f"Outputs basic properties of a mesh." ) - + p = subparsers.add_parser( MESH_STATS, help=f"Outputs basic properties of a mesh." ) + + def convert( parsed_options ) -> Options: """ """ return Options( info="test" ) - + + def display_results( options: Options, result: Result ): logging.critical( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) logging.critical( f"There are {result.number_cell_types} different types of cell in the mesh:" ) - for i in range(result.number_cell_types): + for i in range( result.number_cell_types ): logging.critical( f"\t{result.cell_types[ i ]}\t({result.cell_type_counts[ i ]} cells)" ) logging.critical( "Number of nodes being shared between exactly N cells:" ) logging.critical( "\tCells\tNodes" ) for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): logging.critical( f"\t{number_cells_per_node}\t{number_of_occurences}" ) - + logging.critical( "The domain is contained in:" ) logging.critical( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) logging.critical( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) @@ -36,32 +38,32 @@ def display_results( options: Options, result: Result ): space_size: int = 3 logging.critical( f"There are {len( result.cell_data.scalar_names )} scalar fields on the cells:" ) for i in range( len( result.cell_data.scalar_names ) ): - logging.critical( f"\t{result.cell_data.scalar_names[i]}" - + harmonious_spacing( result.cell_data.scalar_names, i, space_size ) - + f"min = {result.cell_data.scalar_min_values[ i ]}" + " " * space_size - + f"max = {result.cell_data.scalar_max_values[ i ]}" ) + logging.critical( f"\t{result.cell_data.scalar_names[i]}" + + harmonious_spacing( result.cell_data.scalar_names, i, space_size ) + + f"min = {result.cell_data.scalar_min_values[ i ]}" + " " * space_size + + f"max = {result.cell_data.scalar_max_values[ i ]}" ) logging.critical( f"There are {len( result.cell_data.tensor_names )} vector/tensor fields on the cells:" ) - for i in range(len( result.cell_data.tensor_names )): - logging.critical( f"\t{result.cell_data.tensor_names[ i ]}" - + harmonious_spacing( result.cell_data.tensor_names, i, space_size ) - + f"min = {result.cell_data.tensor_min_values[ i ]}" + " " * space_size - + f"max = {result.cell_data.tensor_max_values[ i ]}" ) + for i in range( len( result.cell_data.tensor_names ) ): + logging.critical( f"\t{result.cell_data.tensor_names[ i ]}" + + harmonious_spacing( result.cell_data.tensor_names, i, space_size ) + + f"min = {result.cell_data.tensor_min_values[ i ]}" + " " * space_size + + f"max = {result.cell_data.tensor_max_values[ i ]}" ) logging.critical( f"There are {len( result.point_data.scalar_names )} scalar fields on the points:" ) - for i in range(len( result.point_data.scalar_names )): - logging.critical( f"\t{result.point_data.scalar_names[ i ]}" - + harmonious_spacing( result.point_data.scalar_names, i, space_size ) - + f"min = {result.point_data.scalar_min_values[ i ]}" + " " * space_size - + f"max = {result.point_data.scalar_max_values[ i ]}" ) - + for i in range( len( result.point_data.scalar_names ) ): + logging.critical( f"\t{result.point_data.scalar_names[ i ]}" + + harmonious_spacing( result.point_data.scalar_names, i, space_size ) + + f"min = {result.point_data.scalar_min_values[ i ]}" + " " * space_size + + f"max = {result.point_data.scalar_max_values[ i ]}" ) + logging.critical( f"There are {len( result.point_data.tensor_names )} vector/tensor fields on the points:" ) - for i in range(len(result.point_data.tensor_names)): - logging.critical( f"\t{result.point_data.tensor_names[ i ]}" - + harmonious_spacing( result.point_data.tensor_names, i, space_size ) - + f"min = {result.point_data.tensor_min_values[ i ]}" + " " * space_size - + f"max = {result.point_data.tensor_max_values[ i ]}" ) - + for i in range( len( result.point_data.tensor_names ) ): + logging.critical( f"\t{result.point_data.tensor_names[ i ]}" + + harmonious_spacing( result.point_data.tensor_names, i, space_size ) + + f"min = {result.point_data.tensor_min_values[ i ]}" + " " * space_size + + f"max = {result.point_data.tensor_max_values[ i ]}" ) + logging.warning( f"Unexpected range of values for vector/tensor fields on the cells :" ) for field_name, validity_range in result.fields_validity_cell_data.items(): is_valid: bool = validity_range[ 0 ] @@ -76,7 +78,8 @@ def display_results( options: Options, result: Result ): if not is_valid: logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) -def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int=3 ) -> str: + +def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int = 3 ) -> str: longest_element: Iterable = max( iterable_objs, key=len ) ideal_space: int = len( longest_element ) - len( iterable_objs[ indexIter ] ) + space_size - return " " * ideal_space \ No newline at end of file + return " " * ideal_space diff --git a/geos-mesh/src/geos/mesh/doctor/register.py b/geos-mesh/src/geos/mesh/doctor/register.py index 852a8d1..b6c8da9 100644 --- a/geos-mesh/src/geos/mesh/doctor/register.py +++ b/geos-mesh/src/geos/mesh/doctor/register.py @@ -55,7 +55,7 @@ def closure_trick( cn: str ): # Register the modules to load here. for check_name in ( parsing.COLLOCATES_NODES, parsing.ELEMENT_VOLUMES, parsing.FIX_ELEMENTS_ORDERINGS, parsing.GENERATE_CUBE, parsing.GENERATE_FRACTURES, parsing.GENERATE_GLOBAL_IDS, - parsing.NON_CONFORMAL, parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS, + parsing.NON_CONFORMAL, parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS, parsing.MESH_STATS, parsing.ADD_FIELDS ): closure_trick( check_name ) loaded_checks: Dict[ str, Callable[ [ str, Any ], Any ] ] = __load_checks() From 4e3479496504024fc5363dc5a47e1c3418c649dd Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 15 Aug 2024 15:42:24 -0500 Subject: [PATCH 07/34] Check for disconnected nodes added --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 56 ++++++++++++++++--- 1 file changed, 49 insertions(+), 7 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index e23206b..1927ab6 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -151,7 +151,7 @@ def summary_number_cells_per_nodes( number_cells_per_nodes: dict[ int, int ] ) - number_cells_per_nodes (dict[ int, int ]): { point_id0: 8, ..., point_idN: 4 } Returns: - dict[ int, int ]: Number of ce + dict[ int, int ]: Connected to N cells as key, Number of nodes concerned as value """ unique_number_cells = set( [ value for value in number_cells_per_nodes.values() ] ) summary: dict[ int, int ] = {} @@ -237,33 +237,75 @@ def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple[ bool mcdata (MeshComponentData): Object that gathers data regarding a mesh component. Returns: - dict[ str, bool ]: {poro: True, perm: False, ...} + dict[ str, bool ]: {poro: (True, Min_Max_poro), perm: (False, Min_Max_perm), ...} """ field_values_validity: dict[ str, tuple[ bool, tuple[ float ] ] ] = {} assoc_min_max_field: dict[ str, tuple[ float ] ] = associate_min_max_field_values() - logging.info( f"assoc_min_max_field : {assoc_min_max_field}" ) # for scalar values for i in range( len( mcdata.scalar_names ) ): for field_param, min_max in assoc_min_max_field.items(): - field_values_validity[ mcdata.scalar_names[ i ] ] = ( True, min_max ) if field_param in mcdata.scalar_names[ i ].lower(): + field_values_validity[ mcdata.scalar_names[ i ] ] = ( True, min_max ) if mcdata.scalar_min_values[ i ] < min_max[ 0 ] or mcdata.scalar_max_values[ i ] > min_max[ 1 ]: field_values_validity[ mcdata.scalar_names[ i ] ] = ( False, min_max ) - del assoc_min_max_field[ field_param ] break # for tensor values for i in range( len( mcdata.tensor_names ) ): for field_param, min_max in assoc_min_max_field.items(): - field_values_validity[ mcdata.tensor_names[ i ] ] = ( True, min_max ) if field_param in mcdata.tensor_names[ i ].lower(): + field_values_validity[ mcdata.tensor_names[ i ] ] = ( True, min_max ) for sub_value_min, sub_value_max in zip( mcdata.tensor_min_values[ i ], mcdata.tensor_max_values[ i ] ): if sub_value_min < min_max[ 0 ] or sub_value_max > min_max[ 1 ]: field_values_validity[ mcdata.tensor_names[ i ] ] = ( False, min_max ) - del assoc_min_max_field[ field_param ] + break break return field_values_validity +def get_disconnected_nodes_id( mesh: vtkUnstructuredGrid ) -> list[ int ]: + """Checks the nodes of the mesh to see if they are disconnected. + If a node does not appear in connectivity graph, we can assume that it is disconnected. + Returns the list of node ids that are disconnected. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + list[ int ]: [nodeId0, nodeId23, ..., nodeIdM] + """ + disconnected_nodes_id: list[ int ] = [] + connectivity = mesh.GetCells().GetConnectivityArray() + connectivity_unique_points: set = set() + for i in range( connectivity.GetNumberOfValues() ): + connectivity_unique_points.add( connectivity.GetValue( i ) ) + for v in range( mesh.GetNumberOfPoints() ): + if v in connectivity_unique_points: + connectivity_unique_points.remove( v ) + else: + disconnected_nodes_id.append( v ) + return disconnected_nodes_id + + +def get_disconnected_nodes_coords( mesh: vtkUnstructuredGrid ) -> dict[ int, tuple[ float ] ]: + """Checks the nodes of the mesh to see if they are disconnected. + If a node does not appear in connectivity graph, we can assume that it is disconnected. + Returns a dict zhere the keys are the node id of disconnected nodes and the values are their coordinates. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + dict[ int, tuple[ float ] ]: {nodeId0: (x0, y0, z0), nodeId23: (x23, y23, z23), ..., nodeIdM: (xM, yM, zM)] + """ + disconnected_nodes_id: list[ int ] = get_disconnected_nodes_id( mesh ) + disconnected_nodes_coords: dict[ int, tuple[ float ] ] = {} + points = mesh.GetPoints() + for node_id in disconnected_nodes_id: + node_coords: tuple[ float ] = points.GetPoint( node_id ) + disconnected_nodes_coords[ node_id ] = node_coords + return disconnected_nodes_coords + + def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: number_points: int = mesh.GetNumberOfPoints() cells_info = get_cell_types_and_counts( mesh ) From 50a0ee901ea750e88c5280d7daeabbd29c966491 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 15 Aug 2024 15:42:41 -0500 Subject: [PATCH 08/34] Test file created for mesh stats --- geos-mesh/tests/test_mesh_stats.py | 148 +++++++++++++++++++++++++++++ 1 file changed, 148 insertions(+) create mode 100644 geos-mesh/tests/test_mesh_stats.py diff --git a/geos-mesh/tests/test_mesh_stats.py b/geos-mesh/tests/test_mesh_stats.py new file mode 100644 index 0000000..3b5a69e --- /dev/null +++ b/geos-mesh/tests/test_mesh_stats.py @@ -0,0 +1,148 @@ +import numpy as np + +from geos.mesh.doctor.checks import mesh_stats as ms +from geos.mesh.doctor.checks.generate_cube import Options, FieldInfo, __build +from geos.mesh.doctor.checks.vtk_utils import VtkOutput +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid +from vtkmodules.util.numpy_support import numpy_to_vtk + +# First mesh: no anomalies to look for +out: VtkOutput = VtkOutput( "test", False ) +field0: FieldInfo = FieldInfo( "scalar_cells", 1, "CELLS" ) +field1: FieldInfo = FieldInfo( "tensor_cells", 3, "CELLS" ) +field2: FieldInfo = FieldInfo( "scalar_points", 1, "POINTS" ) +field3: FieldInfo = FieldInfo( "tensor_points", 3, "POINTS" ) +options_cube0: Options = Options( vtk_output=out, + generate_cells_global_ids=True, + generate_points_global_ids=True, + xs=np.array( [ 0.0, 1.0, 2.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0 ] ), + nxs=[ 1, 1 ], + nys=[ 1, 1 ], + nzs=[ 1, 1 ], + fields=[ field0, field1, field2, field3 ] ) +cube0: vtkUnstructuredGrid = __build( options_cube0 ) + +# Second mesh: disconnected nodes are added +cube1: vtkUnstructuredGrid = __build( options_cube0 ) +cube1.GetPoints().InsertNextPoint( ( 3.0, 0.0, 0.0 ) ) +cube1.GetPoints().InsertNextPoint( ( 3.0, 1.0, 0.0 ) ) +cube1.GetPoints().InsertNextPoint( ( 3.0, 2.0, 0.0 ) ) + +# Third mesh: fields with invalid ranges of values are added +field_poro: FieldInfo = FieldInfo( "POROSITY", 1, "CELLS" ) +field_perm: FieldInfo = FieldInfo( "PERMEABILITY", 3, "CELLS" ) +field_density: FieldInfo = FieldInfo( "DENSITY", 1, "CELLS" ) +field_temp: FieldInfo = FieldInfo( "TEMPERATURE", 1, "POINTS" ) +field_pressure: FieldInfo = FieldInfo( "PRESSURE", 3, "POINTS" ) +options_cube2: Options = Options( vtk_output=out, + generate_cells_global_ids=True, + generate_points_global_ids=True, + xs=np.array( [ 0.0, 1.0, 2.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0 ] ), + nxs=[ 1, 1 ], + nys=[ 1, 1 ], + nzs=[ 1, 1 ], + fields=[ field_poro, field_perm, field_density, field_temp, field_pressure ] ) +cube2: vtkUnstructuredGrid = __build( options_cube2 ) +number_cells: int = cube2.GetNumberOfCells() +number_points: int = cube2.GetNumberOfPoints() +array_poro: np.array = np.ones( ( number_cells, field_poro.dimension ), dtype=float ) * ( -1.0 ) +array_perm: np.array = np.ones( ( number_cells, field_perm.dimension ), dtype=float ) * 2.0 +array_density: np.array = np.ones( ( number_cells, field_density.dimension ), dtype=float ) * ( 100000.0 ) +array_temp: np.array = np.ones( ( number_points, field_temp.dimension ), dtype=float ) * ( -1.0 ) +array_pressure: np.array = np.ones( ( number_points, field_pressure.dimension ), dtype=float ) * ( -1.0 ) +vtk_array_poro = numpy_to_vtk( array_poro ) +vtk_array_perm = numpy_to_vtk( array_perm ) +vtk_array_density = numpy_to_vtk( array_density ) +vtk_array_temp = numpy_to_vtk( array_temp ) +vtk_array_pressure = numpy_to_vtk( array_pressure ) +vtk_array_poro.SetName( field_poro.name + "_invalid" ) +vtk_array_perm.SetName( field_perm.name + "_invalid" ) +vtk_array_density.SetName( field_density.name + "_invalid" ) +vtk_array_temp.SetName( field_temp.name + "_invalid" ) +vtk_array_pressure.SetName( field_pressure.name + "_invalid" ) +cell_data = cube2.GetCellData() +point_data = cube2.GetPointData() +cell_data.AddArray( vtk_array_poro ) +cell_data.AddArray( vtk_array_perm ) +cell_data.AddArray( vtk_array_density ) +point_data.AddArray( vtk_array_temp ) +point_data.AddArray( vtk_array_pressure ) + + +class TestClass: + + def test_get_cell_types_and_counts( self ): + result: tuple[ int, int, list[ str ], list[ int ] ] = ms.get_cell_types_and_counts( cube0 ) + assert result[ 0 ] == 8 + assert result[ 1 ] == 1 + assert result[ 2 ] == [ "Hex" ] + assert result[ 3 ] == [ 8 ] + + def test_get_number_cells_per_nodes( self ): + result: dict[ int, int ] = ms.get_number_cells_per_nodes( cube0 ) + for node_id in [ 0, 2, 6, 8, 18, 20, 24, 26 ]: + assert result[ node_id ] == 1 + for node_id in [ 1, 3, 5, 7, 9, 11, 15, 17, 19, 21, 23, 25 ]: + assert result[ node_id ] == 2 + for node_id in [ 4, 10, 12, 14, 16, 22 ]: + assert result[ node_id ] == 4 + assert result[ 13 ] == 8 + result2: dict[ int, int ] = ms.summary_number_cells_per_nodes( result ) + assert result2 == { 1: 8, 2: 12, 4: 6, 8: 1 } + + def test_get_coords_min_max( self ): + result: tuple[ np.ndarray ] = ms.get_coords_min_max( cube0 ) + assert np.array_equal( result[ 0 ], np.array( [ 0.0, 0.0, 0.0 ] ) ) + assert np.array_equal( result[ 1 ], np.array( [ 2.0, 2.0, 2.0 ] ) ) + + def test_build_MeshComponentData( self ): + result: ms.MeshComponentData = ms.build_MeshComponentData( cube0, "point" ) + assert result.componentType == "point" + assert result.scalar_names == [ "scalar_points", "GLOBAL_IDS_POINTS" ] + assert result.scalar_min_values == [ np.float64( 1.0 ), np.int64( 0 ) ] + assert result.scalar_max_values == [ np.float64( 1.0 ), np.int64( 26 ) ] + assert result.tensor_names == [ "tensor_points" ] + assert np.array_equal( result.tensor_min_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + assert np.array_equal( result.tensor_max_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + + result2: ms.MeshComponentData = ms.build_MeshComponentData( cube0, "cell" ) + assert result2.componentType == "cell" + assert result2.scalar_names == [ "scalar_cells", "GLOBAL_IDS_CELLS" ] + assert result2.scalar_min_values == [ np.float64( 1.0 ), np.int64( 0 ) ] + assert result2.scalar_max_values == [ np.float64( 1.0 ), np.int64( 7 ) ] + assert result2.tensor_names == [ "tensor_cells" ] + assert np.array_equal( result2.tensor_min_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + assert np.array_equal( result2.tensor_max_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + + result3: ms.MeshComponentData = ms.build_MeshComponentData( cube0, "random" ) + assert result3.componentType == "point" + + def test_get_disconnected_nodes( self ): + result: list[ int ] = ms.get_disconnected_nodes_id( cube1 ) + assert result == [ 27, 28, 29 ] + result2: dict[ int, tuple[ float ] ] = ms.get_disconnected_nodes_coords( cube1 ) + assert result2 == { 27: ( 3.0, 0.0, 0.0 ), 28: ( 3.0, 1.0, 0.0 ), 29: ( 3.0, 2.0, 0.0 ) } + + def test_field_values_validity( self ): + mcd_point: ms.MeshComponentData = ms.build_MeshComponentData( cube2, "point" ) + mcd_cell: ms.MeshComponentData = ms.build_MeshComponentData( cube2, "cell" ) + result_points: dict[ str, tuple[ bool, tuple[ float ] ] ] = ms.field_values_validity( mcd_point ) + result_cells: dict[ str, tuple[ bool, tuple[ float ] ] ] = ms.field_values_validity( mcd_cell ) + assert result_points == { + "TEMPERATURE": ( True, ( ms.MIN_FIELD.TEMPERATURE.value, ms.MAX_FIELD.TEMPERATURE.value ) ), + "PRESSURE": ( True, ( ms.MIN_FIELD.PRESSURE.value, ms.MAX_FIELD.PRESSURE.value ) ), + "TEMPERATURE_invalid": ( False, ( ms.MIN_FIELD.TEMPERATURE.value, ms.MAX_FIELD.TEMPERATURE.value ) ), + "PRESSURE_invalid": ( False, ( ms.MIN_FIELD.PRESSURE.value, ms.MAX_FIELD.PRESSURE.value ) ), + } + assert result_cells == { + "PERMEABILITY": ( True, ( ms.MIN_FIELD.PERM.value, ms.MAX_FIELD.PERM.value ) ), + "POROSITY": ( True, ( ms.MIN_FIELD.PORO.value, ms.MAX_FIELD.PORO.value ) ), + "DENSITY": ( True, ( ms.MIN_FIELD.DENSITY.value, ms.MAX_FIELD.DENSITY.value ) ), + "PERMEABILITY_invalid": ( False, ( ms.MIN_FIELD.PERM.value, ms.MAX_FIELD.PERM.value ) ), + "POROSITY_invalid": ( False, ( ms.MIN_FIELD.PORO.value, ms.MAX_FIELD.PORO.value ) ), + "DENSITY_invalid": ( False, ( ms.MIN_FIELD.DENSITY.value, ms.MAX_FIELD.DENSITY.value ) ), + } From be2aa6971950b9a812e0a3736272a2d1f3c41668 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 15 Aug 2024 16:00:45 -0500 Subject: [PATCH 09/34] Disconnected nodes added to parsing --- geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py | 5 ++++- .../src/geos/mesh/doctor/parsing/mesh_stats_parsing.py | 9 +++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 1927ab6..5c6dbea 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -40,6 +40,7 @@ class Result: cell_types: list[ str ] cell_type_counts: list[ int ] sum_number_cells_per_nodes: dict[ int, int ] + disconnected_nodes: dict[ int, tuple[ float ] ] min_coords: np.ndarray max_coords: np.ndarray is_empty_point_global_ids: bool @@ -295,7 +296,7 @@ def get_disconnected_nodes_coords( mesh: vtkUnstructuredGrid ) -> dict[ int, tup mesh (vtkUnstructuredGrid): An unstructured grid. Returns: - dict[ int, tuple[ float ] ]: {nodeId0: (x0, y0, z0), nodeId23: (x23, y23, z23), ..., nodeIdM: (xM, yM, zM)] + dict[ int, tuple[ float ] ]: {nodeId0: (x0, y0, z0), nodeId23: (x23, y23, z23), ..., nodeIdM: (xM, yM, zM)} """ disconnected_nodes_id: list[ int ] = get_disconnected_nodes_id( mesh ) disconnected_nodes_coords: dict[ int, tuple[ float ] ] = {} @@ -315,6 +316,7 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: cell_type_counts: int = cells_info[ 3 ] number_cells_per_nodes: dict[ int, int ] = get_number_cells_per_nodes( mesh ) sum_number_cells_per_nodes: dict[ int, int ] = summary_number_cells_per_nodes( number_cells_per_nodes ) + disconnected_nodes: dict[ int, tuple[ float ] ] = get_disconnected_nodes_coords( mesh ) min_coords, max_coords = get_coords_min_max( mesh ) point_ids: bool = not bool( mesh.GetPointData().GetGlobalIds() ) cell_ids: bool = not bool( mesh.GetCellData().GetGlobalIds() ) @@ -329,6 +331,7 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: cell_types=cell_types, cell_type_counts=cell_type_counts, sum_number_cells_per_nodes=sum_number_cells_per_nodes, + disconnected_nodes=disconnected_nodes, min_coords=min_coords, max_coords=max_coords, is_empty_point_global_ids=point_ids, diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 8714dbd..0048521 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -27,6 +27,11 @@ def display_results( options: Options, result: Result ): for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): logging.critical( f"\t{number_cells_per_node}\t{number_of_occurences}" ) + logging.critical( f"Number of disconnected nodes found in the mesh: {len( result.disconnected_nodes )}" ) + logging.critical( "\tNodeId\tCoordinates" ) + for node_id, coordinates in result.disconnected_nodes.items(): + logging.critical( f"\t{node_id}\t{coordinates}" ) + logging.critical( "The domain is contained in:" ) logging.critical( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) logging.critical( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) @@ -64,14 +69,14 @@ def display_results( options: Options, result: Result ): f"min = {result.point_data.tensor_min_values[ i ]}" + " " * space_size + f"max = {result.point_data.tensor_max_values[ i ]}" ) - logging.warning( f"Unexpected range of values for vector/tensor fields on the cells :" ) + logging.warning( f"Unexpected range of values for vector/tensor fields on the cells:" ) for field_name, validity_range in result.fields_validity_cell_data.items(): is_valid: bool = validity_range[ 0 ] min_max: tuple[ float ] = validity_range[ 1 ] if not is_valid: logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) - logging.warning( f"Unexpected range of values for vector/tensor fields on the points :" ) + logging.warning( f"Unexpected range of values for vector/tensor fields on the points:" ) for field_name, validity_range in result.fields_validity_point_data.items(): is_valid: bool = validity_range[ 0 ] min_max: tuple[ float ] = validity_range[ 1 ] From 38bde63b1bccf6dd47b48d5c55f1afd85fb97dc0 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 15 Aug 2024 19:31:06 -0500 Subject: [PATCH 10/34] Check for NaN values added + fields from FieldData now checked --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 54 +++++++++--- .../mesh/doctor/parsing/mesh_stats_parsing.py | 84 +++++++++---------- geos-mesh/tests/test_mesh_stats.py | 19 +++++ 3 files changed, 100 insertions(+), 57 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 5c6dbea..7517b97 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -45,10 +45,13 @@ class Result: max_coords: np.ndarray is_empty_point_global_ids: bool is_empty_cell_global_ids: bool + fields_with_NaNs: dict[ int, str ] point_data: MeshComponentData cell_data: MeshComponentData + field_data: MeshComponentData fields_validity_point_data: dict[ str, dict[ str, bool ] ] fields_validity_cell_data: dict[ str, dict[ str, bool ] ] + fields_validity_field_data: dict[ str, dict[ str, bool ] ] class MIN_FIELD( float, Enum ): # SI Units @@ -179,8 +182,32 @@ def get_coords_min_max( mesh: vtkUnstructuredGrid ) -> tuple[ np.ndarray ]: return ( min_coords, max_coords ) +def check_NaN_fields( mesh: vtkUnstructuredGrid ) -> dict[ str, int ]: + """For every array of the mesh belonging to CellData, PointData or FieldArray, + checks that no NaN value was found. + If a NaN value is found, the name of the array is outputed with the number of NaNs encountered. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + dict[ str, int ]: { array_mame0: 12, array_name4: 2, ... } + """ + fields_number_of_NaNs: dict[ str, int ] = {} + data_to_use = ( mesh.GetCellData, mesh.GetPointData, mesh.GetFieldData ) + for getDataFuncion in data_to_use: + data = getDataFuncion() + for i in range( data.GetNumberOfArrays() ): + array = data.GetArray( i ) + array_name: str = data.GetArrayName( i ) + number_nans: int = np.count_nonzero( np.isnan( vtk_to_numpy( array ) ) ) + if number_nans > 0: + fields_number_of_NaNs[ array_name ] = number_nans + return fields_number_of_NaNs + + def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "point" ) -> MeshComponentData: - """Builds a MeshComponentData object for a specific component ("point", "cell") + """Builds a MeshComponentData object for a specific component ("point", "cell", "field") If the component type chosen is invalid, chooses "point" by default. Args: @@ -189,26 +216,21 @@ def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "po Returns: meshCD (MeshComponentData): Object that gathers data regarding a mesh component. """ - if componentType not in [ "point", "cell" ]: + if componentType not in [ "point", "cell", "field" ]: componentType = "point" logging.error( f"Invalid component type chosen to build MeshComponentData. Defaulted to point." ) - if componentType == "point": - number_arrays_data: int = mesh.GetPointData().GetNumberOfArrays() - else: - number_arrays_data = mesh.GetCellData().GetNumberOfArrays() - scalar_names: list[ str ] = [] scalar_min_values: list[ np.generic ] = [] scalar_max_values: list[ np.generic ] = [] tensor_names: list[ str ] = [] tensor_min_values: list[ ArrayGeneric ] = [] tensor_max_values: list[ ArrayGeneric ] = [] - for i in range( number_arrays_data ): - if componentType == "point": - data_array = mesh.GetPointData().GetArray( i ) - else: - data_array = mesh.GetCellData().GetArray( i ) + + data_to_use = { "cell": mesh.GetCellData, "point": mesh.GetPointData, "field": mesh.GetFieldData } + data = data_to_use[ componentType ]() + for i in range( data.GetNumberOfArrays() ): + data_array = data.GetArray( i ) data_array_name = data_array.GetName() data_np_array = vtk_to_numpy( data_array ) if data_array.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min @@ -320,10 +342,13 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: min_coords, max_coords = get_coords_min_max( mesh ) point_ids: bool = not bool( mesh.GetPointData().GetGlobalIds() ) cell_ids: bool = not bool( mesh.GetCellData().GetGlobalIds() ) + fields_with_NaNs: dict[ str, int ] = check_NaN_fields( mesh ) point_data: MeshComponentData = build_MeshComponentData( mesh, "point" ) cell_data: MeshComponentData = build_MeshComponentData( mesh, "cell" ) + field_data: MeshComponentData = build_MeshComponentData( mesh, "field" ) fields_validity_point_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( point_data ) fields_validity_cell_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( cell_data ) + fields_validity_field_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( field_data ) return Result( number_points=number_points, number_cells=number_cells, @@ -336,10 +361,13 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: max_coords=max_coords, is_empty_point_global_ids=point_ids, is_empty_cell_global_ids=cell_ids, + fields_with_NaNs=fields_with_NaNs, point_data=point_data, cell_data=cell_data, + field_data=field_data, fields_validity_point_data=fields_validity_point_data, - fields_validity_cell_data=fields_validity_cell_data ) + fields_validity_cell_data=fields_validity_cell_data, + fields_validity_field_data=fields_validity_field_data ) def check( vtk_input_file: str, options: Options ) -> Result: diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 0048521..8faf61f 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -28,9 +28,10 @@ def display_results( options: Options, result: Result ): logging.critical( f"\t{number_cells_per_node}\t{number_of_occurences}" ) logging.critical( f"Number of disconnected nodes found in the mesh: {len( result.disconnected_nodes )}" ) - logging.critical( "\tNodeId\tCoordinates" ) - for node_id, coordinates in result.disconnected_nodes.items(): - logging.critical( f"\t{node_id}\t{coordinates}" ) + if len( result.disconnected_nodes ) > 0: + logging.critical( "\tNodeId\tCoordinates" ) + for node_id, coordinates in result.disconnected_nodes.items(): + logging.critical( f"\t{node_id}\t{coordinates}" ) logging.critical( "The domain is contained in:" ) logging.critical( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) @@ -40,48 +41,43 @@ def display_results( options: Options, result: Result ): logging.critical( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) logging.critical( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) + logging.critical( f"Number of fields data containing NaNs values: {len( result.fields_with_NaNs )}" ) + if len( result.fields_with_NaNs ) > 0: + logging.critical( "\tFieldName\tNumber of NaNs" ) + for field_name, number_NaNs in result.fields_with_NaNs.items(): + logging.critical( f"\t{field_name}\t{number_NaNs}" ) + space_size: int = 3 - logging.critical( f"There are {len( result.cell_data.scalar_names )} scalar fields on the cells:" ) - for i in range( len( result.cell_data.scalar_names ) ): - logging.critical( f"\t{result.cell_data.scalar_names[i]}" + - harmonious_spacing( result.cell_data.scalar_names, i, space_size ) + - f"min = {result.cell_data.scalar_min_values[ i ]}" + " " * space_size + - f"max = {result.cell_data.scalar_max_values[ i ]}" ) - - logging.critical( f"There are {len( result.cell_data.tensor_names )} vector/tensor fields on the cells:" ) - for i in range( len( result.cell_data.tensor_names ) ): - logging.critical( f"\t{result.cell_data.tensor_names[ i ]}" + - harmonious_spacing( result.cell_data.tensor_names, i, space_size ) + - f"min = {result.cell_data.tensor_min_values[ i ]}" + " " * space_size + - f"max = {result.cell_data.tensor_max_values[ i ]}" ) - - logging.critical( f"There are {len( result.point_data.scalar_names )} scalar fields on the points:" ) - for i in range( len( result.point_data.scalar_names ) ): - logging.critical( f"\t{result.point_data.scalar_names[ i ]}" + - harmonious_spacing( result.point_data.scalar_names, i, space_size ) + - f"min = {result.point_data.scalar_min_values[ i ]}" + " " * space_size + - f"max = {result.point_data.scalar_max_values[ i ]}" ) - - logging.critical( f"There are {len( result.point_data.tensor_names )} vector/tensor fields on the points:" ) - for i in range( len( result.point_data.tensor_names ) ): - logging.critical( f"\t{result.point_data.tensor_names[ i ]}" + - harmonious_spacing( result.point_data.tensor_names, i, space_size ) + - f"min = {result.point_data.tensor_min_values[ i ]}" + " " * space_size + - f"max = {result.point_data.tensor_max_values[ i ]}" ) - - logging.warning( f"Unexpected range of values for vector/tensor fields on the cells:" ) - for field_name, validity_range in result.fields_validity_cell_data.items(): - is_valid: bool = validity_range[ 0 ] - min_max: tuple[ float ] = validity_range[ 1 ] - if not is_valid: - logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) - - logging.warning( f"Unexpected range of values for vector/tensor fields on the points:" ) - for field_name, validity_range in result.fields_validity_point_data.items(): - is_valid: bool = validity_range[ 0 ] - min_max: tuple[ float ] = validity_range[ 1 ] - if not is_valid: - logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) + data_types: dict[ str, any ] = { + "CellData": result.cell_data, + "PointData": result.point_data, + "FieldData": result.field_data + } + for data_type, data in data_types.items(): + logging.critical( f"There are {len( data.scalar_names )} scalar fields from the {data_type}:" ) + for i in range( len( data.scalar_names ) ): + logging.critical( f"\t{data.scalar_names[i]}" + harmonious_spacing( data.scalar_names, i, space_size ) + + f"min = {data.scalar_min_values[ i ]}" + " " * space_size + + f"max = {data.scalar_max_values[ i ]}" ) + + logging.critical( f"There are {len( data.tensor_names )} vector/tensor fields from the {data_type}:" ) + for i in range( len( data.tensor_names ) ): + logging.critical( f"\t{data.tensor_names[ i ]}" + harmonious_spacing( data.tensor_names, i, space_size ) + + f"min = {data.tensor_min_values[ i ]}" + " " * space_size + + f"max = {data.tensor_max_values[ i ]}" ) + + fields_validity_types: dict[ str, any ] = { + "CellData": result.fields_validity_cell_data, + "PointData": result.fields_validity_point_data, + "FieldData": result.fields_validity_field_data + } + for field_vailidity_type, data in fields_validity_types.items(): + logging.warning( f"Unexpected range of values for vector/tensor fields from the {field_vailidity_type}:" ) + for field_name, validity_range in data.items(): + is_valid: bool = validity_range[ 0 ] + min_max: tuple[ float ] = validity_range[ 1 ] + if not is_valid: + logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int = 3 ) -> str: diff --git a/geos-mesh/tests/test_mesh_stats.py b/geos-mesh/tests/test_mesh_stats.py index 3b5a69e..657ae8a 100644 --- a/geos-mesh/tests/test_mesh_stats.py +++ b/geos-mesh/tests/test_mesh_stats.py @@ -72,6 +72,21 @@ point_data.AddArray( vtk_array_temp ) point_data.AddArray( vtk_array_pressure ) +# In this mesh, certain fields have NaN values +cube3: vtkUnstructuredGrid = __build( options_cube2 ) +array_poro = array_poro * ( -1 ) +array_temp = array_temp * ( -1 ) +array_poro[ 0 ], array_poro[ -1 ] = np.nan, np.nan +array_temp[ 0 ], array_temp[ -1 ] = np.nan, np.nan +vtk_array_poro = numpy_to_vtk( array_poro ) +vtk_array_temp = numpy_to_vtk( array_temp ) +vtk_array_poro.SetName( field_poro.name + "_invalid" ) +vtk_array_temp.SetName( field_temp.name + "_invalid" ) +cell_data = cube3.GetCellData() +point_data = cube3.GetPointData() +cell_data.AddArray( vtk_array_poro ) +point_data.AddArray( vtk_array_temp ) + class TestClass: @@ -146,3 +161,7 @@ def test_field_values_validity( self ): "POROSITY_invalid": ( False, ( ms.MIN_FIELD.PORO.value, ms.MAX_FIELD.PORO.value ) ), "DENSITY_invalid": ( False, ( ms.MIN_FIELD.DENSITY.value, ms.MAX_FIELD.DENSITY.value ) ), } + + def test_check_NaN_fields( self ): + result: dict[ str, int ] = ms.check_NaN_fields( cube3 ) + assert result == { "POROSITY_invalid": 2, "TEMPERATURE_invalid": 2 } From b4c0f188f3e1e76522baca75f2ce3505e8066fa9 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Fri, 16 Aug 2024 18:37:08 -0500 Subject: [PATCH 11/34] Number of cells neighbors + number of disconnected cells added --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 66 ++++++++++++++++++- .../mesh/doctor/parsing/mesh_stats_parsing.py | 37 ++++++++--- geos-mesh/tests/test_mesh_stats.py | 32 ++++++++- 3 files changed, 123 insertions(+), 12 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 7517b97..ad56451 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -8,7 +8,8 @@ vtk_to_numpy, ) from vtkmodules.vtkCommonDataModel import ( - vtkUnstructuredGrid, ) + vtkUnstructuredGrid, + vtkCell ) from . import vtk_utils @@ -41,6 +42,7 @@ class Result: cell_type_counts: list[ int ] sum_number_cells_per_nodes: dict[ int, int ] disconnected_nodes: dict[ int, tuple[ float ] ] + cells_neighbors_number: np.array min_coords: np.ndarray max_coords: np.ndarray is_empty_point_global_ids: bool @@ -329,6 +331,66 @@ def get_disconnected_nodes_coords( mesh: vtkUnstructuredGrid ) -> dict[ int, tup return disconnected_nodes_coords +def get_cell_faces_node_ids( cell: vtkCell, sort_ids: bool = False ) -> tuple[ tuple[ int ] ]: + """For any vtkCell given, returns the list of faces node ids. + + Args: + cell (vtkCell): A vtk cell object. + sort_ids (bool, optional): If you want the node ids to be sorted by increasing value, use True. + Defaults to False. + + Returns: + tuple[ tuple[ int ] ]: [ [face0_nodeId0, ..., face0_nodeIdN], ..., [faceN_nodeId0, ..., faceN_nodeIdN] ] + """ + cell_faces_node_ids: list[ tuple[ int ] ] = [] + for f in range( cell.GetNumberOfFaces() ): + face = cell.GetFace( f ) + node_ids: list[ int ] = [] + for i in range( face.GetNumberOfPoints() ): + node_ids.append( face.GetPointId( i ) ) + if sort_ids: + node_ids.sort() + cell_faces_node_ids.append( tuple( node_ids ) ) + return tuple( cell_faces_node_ids ) + + +def get_cells_neighbors_number( mesh: vtkUnstructuredGrid ) -> np.array: + """For every cell of a mesh, returns the number of neighbors that it has.\n + WARNINGS:\n + 1) Will give invalid numbers if "supposedly" neighbor cells faces do not share node ids + because of collocated nodes. + 2) Node ids for each face are sorted to avoid comparison issues, because cell faces node ids + can be read in different order regarding spatial orientation. Therefore, we lose the ordering of + the nodes that construct the face. It should not cause problems unless you have degenerated faces. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + np.array: Every index of this array represents a cell_id of the mesh, the value contained at this index + is the number of neighbors for that cell. + """ + # First we need to get the node ids for all faces of every cell in the mesh. + # The keys are face node ids, values are cell_id of cells that have this face node ids in common + faces_node_ids: dict[ tuple[ int ], list[ int ] ] = {} + for cell_id in range( mesh.GetNumberOfCells() ): + cell_faces_node_ids: tuple[ tuple[ int ] ] = get_cell_faces_node_ids( mesh.GetCell( cell_id ), True ) + for cell_face_node_ids in cell_faces_node_ids: + if cell_face_node_ids not in faces_node_ids: + faces_node_ids[ cell_face_node_ids ] = [ cell_id ] + else: + faces_node_ids[ cell_face_node_ids ].append( cell_id ) + # Now that we know for each face node ids, which cell_ids share it. + # We can identify if a cell is disconnected by checking that one of its face node ids is shared with another cell. + # If a cell_id ends up having no neighbor = cell is disconnected + cells_neighbors_number: np.array = np.zeros( ( mesh.GetNumberOfCells(), 1 ), dtype=int ) + for cell_ids in faces_node_ids.values(): + if len(cell_ids) > 1: # if a face node ids is shared by more than 1 cell = all cells sharing are neighbors + for cell_id in cell_ids: + cells_neighbors_number[ cell_id ] += 1 + return cells_neighbors_number + + def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: number_points: int = mesh.GetNumberOfPoints() cells_info = get_cell_types_and_counts( mesh ) @@ -339,6 +401,7 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: number_cells_per_nodes: dict[ int, int ] = get_number_cells_per_nodes( mesh ) sum_number_cells_per_nodes: dict[ int, int ] = summary_number_cells_per_nodes( number_cells_per_nodes ) disconnected_nodes: dict[ int, tuple[ float ] ] = get_disconnected_nodes_coords( mesh ) + cells_neighbors_number: np.array = get_cells_neighbors_number( mesh ) min_coords, max_coords = get_coords_min_max( mesh ) point_ids: bool = not bool( mesh.GetPointData().GetGlobalIds() ) cell_ids: bool = not bool( mesh.GetCellData().GetGlobalIds() ) @@ -357,6 +420,7 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: cell_type_counts=cell_type_counts, sum_number_cells_per_nodes=sum_number_cells_per_nodes, disconnected_nodes=disconnected_nodes, + cells_neighbors_number=cells_neighbors_number, min_coords=min_coords, max_coords=max_coords, is_empty_point_global_ids=point_ids, diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 8faf61f..8b09ae1 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -1,5 +1,6 @@ import logging from typing import Iterable +from numpy import unique, where from geos.mesh.doctor.checks.mesh_stats import Options, Result @@ -18,20 +19,36 @@ def convert( parsed_options ) -> Options: def display_results( options: Options, result: Result ): logging.critical( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) - logging.critical( f"There are {result.number_cell_types} different types of cell in the mesh:" ) + logging.critical( f"There are {result.number_cell_types} different types of cells in the mesh:" ) for i in range( result.number_cell_types ): - logging.critical( f"\t{result.cell_types[ i ]}\t({result.cell_type_counts[ i ]} cells)" ) - - logging.critical( "Number of nodes being shared between exactly N cells:" ) - logging.critical( "\tCells\tNodes" ) + logging.critical( f"\t{result.cell_types[ i ]}\t\t({result.cell_type_counts[ i ]} cells)" ) + + logging.critical( f"Number of cells that have exactly N neighbors:" ) + unique_numbers_neighbors, counts = unique( result.cells_neighbors_number, return_counts=True ) + logging.critical( "\tNeighbors\tNumber of cells concerned" ) + for number_neighbors, count in zip( unique_numbers_neighbors, counts ): + logging.critical( f"\t{number_neighbors}\t\t{count}" ) + + logging.critical( "Number of nodes being shared by exactly N cells:" ) + logging.critical( "\tCells\t\tNumber of nodes" ) for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): - logging.critical( f"\t{number_cells_per_node}\t{number_of_occurences}" ) - - logging.critical( f"Number of disconnected nodes found in the mesh: {len( result.disconnected_nodes )}" ) + logging.critical( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) + + if 0 in unique_numbers_neighbors: # unique_numbers_neighbors sorted in ascending order from minimum positive number + number_cells_disconnected: int = unique_numbers_neighbors[ 0 ] + else: + number_cells_disconnected = 0 + logging.critical( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) + if number_cells_disconnected > 0: + logging.info( "\tIndexes of disconnected cells" ) + indexes = where(result.cells_neighbors_number == 0) + logging.info( f"{indexes[ 0 ]}" ) + + logging.critical( f"Number of disconnected nodes in the mesh: {len( result.disconnected_nodes )}" ) if len( result.disconnected_nodes ) > 0: - logging.critical( "\tNodeId\tCoordinates" ) + logging.info( "\tNodeId\t\tCoordinates" ) for node_id, coordinates in result.disconnected_nodes.items(): - logging.critical( f"\t{node_id}\t{coordinates}" ) + logging.info( f"\t{node_id}\t\t{coordinates}" ) logging.critical( "The domain is contained in:" ) logging.critical( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) diff --git a/geos-mesh/tests/test_mesh_stats.py b/geos-mesh/tests/test_mesh_stats.py index 657ae8a..b869e45 100644 --- a/geos-mesh/tests/test_mesh_stats.py +++ b/geos-mesh/tests/test_mesh_stats.py @@ -3,7 +3,7 @@ from geos.mesh.doctor.checks import mesh_stats as ms from geos.mesh.doctor.checks.generate_cube import Options, FieldInfo, __build from geos.mesh.doctor.checks.vtk_utils import VtkOutput -from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkHexahedron from vtkmodules.util.numpy_support import numpy_to_vtk # First mesh: no anomalies to look for @@ -87,6 +87,27 @@ cell_data.AddArray( vtk_array_poro ) point_data.AddArray( vtk_array_temp ) +# cube4 is a cube with an extra hex cell disconnected added +options_cube4: Options = Options( vtk_output=out, + generate_cells_global_ids=False, + generate_points_global_ids=False, + xs=np.array( [ 0.0, 1.0, 2.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0 ] ), + nxs=[ 1, 1 ], + nys=[ 1, 1 ], + nzs=[ 1, 1 ], + fields=[] ) +cube4: vtkUnstructuredGrid = __build( options_cube4 ) +number_cells_cube4: int = cube4.GetNumberOfCells() +hex = vtkHexahedron() +coords_new_hex = ( (3.0, 0.0, 0.0), (4.0, 0.0, 0.0), (4.0, 1.0, 0.0), (3.0, 1.0, 0.0), + (3.0, 0.0, 1.0), (4.0, 0.0, 1.0), (4.0, 1.0, 1.0), (3.0, 1.0, 1.0) ) +for i in range( len( coords_new_hex ) ): + hex.GetPoints().InsertNextPoint( coords_new_hex[ i ] ) + hex.GetPointIds().SetId( i, number_cells_cube4 + i ) +cube4.InsertNextCell( hex.GetCellType(), hex.GetPointIds() ) + class TestClass: @@ -165,3 +186,12 @@ def test_field_values_validity( self ): def test_check_NaN_fields( self ): result: dict[ str, int ] = ms.check_NaN_fields( cube3 ) assert result == { "POROSITY_invalid": 2, "TEMPERATURE_invalid": 2 } + + def test_get_cells_neighbors_number( self ): + result: np.array = ms.get_cells_neighbors_number( cube0 ) + expected: np.array = np.ones( ( 8, 1 ), dtype=int ) * 3 + assert np.array_equal( result, expected ) + result2: np.array = ms.get_cells_neighbors_number( cube4 ) + expected2: np.array = np.ones( ( 9, 1 ), dtype=int ) * 3 + expected2[ 8 ] = 0 + assert np.array_equal( result2, expected2 ) \ No newline at end of file From 6f774ecf96e4776e7ba65b40d394040ad076ccee Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Fri, 13 Sep 2024 13:29:30 -0500 Subject: [PATCH 12/34] To revert --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 40 +++++----- .../src/geos/mesh/doctor/checks/vtk_utils.py | 46 +++++------- .../mesh/doctor/parsing/mesh_stats_parsing.py | 75 ++++++++++++------- 3 files changed, 87 insertions(+), 74 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index ad56451..39ce8e1 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -3,23 +3,21 @@ import numpy.typing as npt from dataclasses import dataclass from enum import Enum - -from vtkmodules.util.numpy_support import ( - vtk_to_numpy, ) - -from vtkmodules.vtkCommonDataModel import ( - vtkUnstructuredGrid, - vtkCell ) - -from . import vtk_utils +from typing import TypeAlias +from vtkmodules.util.numpy_support import vtk_to_numpy +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkCell +from geos.mesh.doctor.checks import vtk_utils +""" +TypeAliases for this file +""" +ArrayGeneric: TypeAlias = npt.NDArray[ np.generic ] +FieldValidity: TypeAlias = dict[ str, tuple[ bool, tuple[ float ] ] ] @dataclass( frozen=True ) class Options: - info: str - - -ArrayGeneric = npt.NDArray[ np.generic ] + output_stats_in_file: bool + filepath: str @dataclass( frozen=True ) @@ -51,9 +49,9 @@ class Result: point_data: MeshComponentData cell_data: MeshComponentData field_data: MeshComponentData - fields_validity_point_data: dict[ str, dict[ str, bool ] ] - fields_validity_cell_data: dict[ str, dict[ str, bool ] ] - fields_validity_field_data: dict[ str, dict[ str, bool ] ] + fields_validity_point_data: FieldValidity + fields_validity_cell_data: FieldValidity + fields_validity_field_data: FieldValidity class MIN_FIELD( float, Enum ): # SI Units @@ -253,7 +251,7 @@ def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "po tensor_max_values=tensor_max_values ) -def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple[ bool, tuple[ float ] ] ]: +def field_values_validity( mcdata: MeshComponentData ) -> FieldValidity: """Check that for every min and max values found in the scalar and tensor fields, none of these values is out of bounds. If the value is out of bound, False validity flag is given to the field, True if no problem. @@ -262,7 +260,7 @@ def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple[ bool mcdata (MeshComponentData): Object that gathers data regarding a mesh component. Returns: - dict[ str, bool ]: {poro: (True, Min_Max_poro), perm: (False, Min_Max_perm), ...} + FieldValidity: {poro: (True, Min_Max_poro), perm: (False, Min_Max_perm), ...} """ field_values_validity: dict[ str, tuple[ bool, tuple[ float ] ] ] = {} assoc_min_max_field: dict[ str, tuple[ float ] ] = associate_min_max_field_values() @@ -409,9 +407,9 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: point_data: MeshComponentData = build_MeshComponentData( mesh, "point" ) cell_data: MeshComponentData = build_MeshComponentData( mesh, "cell" ) field_data: MeshComponentData = build_MeshComponentData( mesh, "field" ) - fields_validity_point_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( point_data ) - fields_validity_cell_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( cell_data ) - fields_validity_field_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( field_data ) + fields_validity_point_data: FieldValidity = field_values_validity( point_data ) + fields_validity_cell_data: FieldValidity = field_values_validity( cell_data ) + fields_validity_field_data: FieldValidity = field_values_validity( field_data ) return Result( number_points=number_points, number_cells=number_cells, diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index c3af155..acf9a5c 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -156,30 +156,22 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: def vtkid_to_string( id: int ) -> str: - match id: - case 1: # VTK_VERTEX - return 'Vertex' - case 3: #VTK_LINE - return 'Line' - case 5: #VTK_TRIANGLE - return 'Triangle' - case 8: #VTK_PIXEL - return 'Pixel' - case 9: #VTK_QUAD - return 'Quad' - case 10: #VTK_TETRA - return 'Tetra' - case 11: #VTK_VOXEL - return 'Voxel' - case 12: #VTK_HEXAHEDRON - return 'Hex' - case 13: #VTK_WEDGE - return 'Wedge' - case 14: #VTK_PYRAMID - return 'Pyramid' - case 15: #VTK_PENTAGONAL_PRISM - return 'Pentagonal prism' - case 16: #VTK_HEXAGONAL_PRISM - return 'Hexagonal Prism' - case _: - return 'Unknown type' + choices: dict[ int, str ] = { + 1: 'Vertex', + 3: 'Line', + 5: 'Triangle', + 7: 'Polygon', + 8: 'Pixel', + 9: 'Quad', + 10: 'Tetra', + 11: 'Voxel', + 12: 'Hex', + 13: 'Wedge', + 14: 'Pyramid', + 15: 'Pentagonal prism', + 16: 'Hexagonal Prism' + } + if id in choices: + return choices[ id ] + else: + return 'Unknown type' diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 8b09ae1..4de4a66 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -1,15 +1,32 @@ import logging +from io import StringIO from typing import Iterable from numpy import unique, where from geos.mesh.doctor.checks.mesh_stats import Options, Result - from . import MESH_STATS +__DISCONNECTED = "disconnected" +__DISCONNECTED_DEFAULT = 0 + +__FIELD_VALUES = "field_values" +__FIELD_VALUES_DEFAULT = 0 + def fill_subparser( subparsers ) -> None: p = subparsers.add_parser( MESH_STATS, help=f"Outputs basic properties of a mesh." ) - + p.add_argument( '--' + __DISCONNECTED, + type=int, + required=False, + metavar=[0, 1], + default=__DISCONNECTED_DEFAULT, + help=f"\tDisplay all disconnected nodes ids and disconnected cell ids." ) + p.add_argument( '--' + __FIELD_VALUES, + type=int, + required=False, + metavar=[0, 1], + default=__FIELD_VALUES_DEFAULT, + help=f"\tDisplay all range of field values that seem not realistic." ) def convert( parsed_options ) -> Options: """ @@ -18,51 +35,54 @@ def convert( parsed_options ) -> Options: def display_results( options: Options, result: Result ): - logging.critical( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) - logging.critical( f"There are {result.number_cell_types} different types of cells in the mesh:" ) + log_stream = StringIO() + logging.basicConfig(level=logging.INFO, handlers=[logging.StreamHandler(log_stream)]) + + logging.info( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) + logging.info( f"There are {result.number_cell_types} different types of cells in the mesh:" ) for i in range( result.number_cell_types ): - logging.critical( f"\t{result.cell_types[ i ]}\t\t({result.cell_type_counts[ i ]} cells)" ) + logging.info( f"\t{result.cell_types[ i ]}\t\t({result.cell_type_counts[ i ]} cells)" ) - logging.critical( f"Number of cells that have exactly N neighbors:" ) + logging.info( f"Number of cells that have exactly N neighbors:" ) unique_numbers_neighbors, counts = unique( result.cells_neighbors_number, return_counts=True ) - logging.critical( "\tNeighbors\tNumber of cells concerned" ) + logging.info( "\tNeighbors\tNumber of cells concerned" ) for number_neighbors, count in zip( unique_numbers_neighbors, counts ): - logging.critical( f"\t{number_neighbors}\t\t{count}" ) + logging.info( f"\t{number_neighbors}\t\t{count}" ) - logging.critical( "Number of nodes being shared by exactly N cells:" ) - logging.critical( "\tCells\t\tNumber of nodes" ) + logging.info( "Number of nodes being shared by exactly N cells:" ) + logging.info( "\tCells\t\tNumber of nodes" ) for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): - logging.critical( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) + logging.info( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) if 0 in unique_numbers_neighbors: # unique_numbers_neighbors sorted in ascending order from minimum positive number number_cells_disconnected: int = unique_numbers_neighbors[ 0 ] else: number_cells_disconnected = 0 - logging.critical( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) + logging.info( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) if number_cells_disconnected > 0: logging.info( "\tIndexes of disconnected cells" ) indexes = where(result.cells_neighbors_number == 0) logging.info( f"{indexes[ 0 ]}" ) - logging.critical( f"Number of disconnected nodes in the mesh: {len( result.disconnected_nodes )}" ) + logging.info( f"Number of disconnected nodes in the mesh: {len( result.disconnected_nodes )}" ) if len( result.disconnected_nodes ) > 0: logging.info( "\tNodeId\t\tCoordinates" ) for node_id, coordinates in result.disconnected_nodes.items(): logging.info( f"\t{node_id}\t\t{coordinates}" ) - logging.critical( "The domain is contained in:" ) - logging.critical( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) - logging.critical( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) - logging.critical( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) + logging.info( "The domain is contained in:" ) + logging.info( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) + logging.info( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) + logging.info( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) - logging.critical( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) - logging.critical( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) + logging.info( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) + logging.info( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) - logging.critical( f"Number of fields data containing NaNs values: {len( result.fields_with_NaNs )}" ) + logging.info( f"Number of fields data containing NaNs values: {len( result.fields_with_NaNs )}" ) if len( result.fields_with_NaNs ) > 0: - logging.critical( "\tFieldName\tNumber of NaNs" ) + logging.info( "\tFieldName\tNumber of NaNs" ) for field_name, number_NaNs in result.fields_with_NaNs.items(): - logging.critical( f"\t{field_name}\t{number_NaNs}" ) + logging.info( f"\t{field_name}\t{number_NaNs}" ) space_size: int = 3 data_types: dict[ str, any ] = { @@ -71,15 +91,15 @@ def display_results( options: Options, result: Result ): "FieldData": result.field_data } for data_type, data in data_types.items(): - logging.critical( f"There are {len( data.scalar_names )} scalar fields from the {data_type}:" ) + logging.info( f"There are {len( data.scalar_names )} scalar fields from the {data_type}:" ) for i in range( len( data.scalar_names ) ): - logging.critical( f"\t{data.scalar_names[i]}" + harmonious_spacing( data.scalar_names, i, space_size ) + + logging.info( f"\t{data.scalar_names[i]}" + harmonious_spacing( data.scalar_names, i, space_size ) + f"min = {data.scalar_min_values[ i ]}" + " " * space_size + f"max = {data.scalar_max_values[ i ]}" ) - logging.critical( f"There are {len( data.tensor_names )} vector/tensor fields from the {data_type}:" ) + logging.info( f"There are {len( data.tensor_names )} vector/tensor fields from the {data_type}:" ) for i in range( len( data.tensor_names ) ): - logging.critical( f"\t{data.tensor_names[ i ]}" + harmonious_spacing( data.tensor_names, i, space_size ) + + logging.info( f"\t{data.tensor_names[ i ]}" + harmonious_spacing( data.tensor_names, i, space_size ) + f"min = {data.tensor_min_values[ i ]}" + " " * space_size + f"max = {data.tensor_max_values[ i ]}" ) @@ -96,6 +116,9 @@ def display_results( options: Options, result: Result ): if not is_valid: logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) + if options.output_stats_in_file: + pass + def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int = 3 ) -> str: longest_element: Iterable = max( iterable_objs, key=len ) From 548ae2a94bedbd372dbf6e198aa8a1c6643113f5 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Mon, 16 Sep 2024 09:42:26 -0700 Subject: [PATCH 13/34] To revert --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 50 +++--- .../src/geos/mesh/doctor/checks/vtk_utils.py | 46 +++-- .../mesh/doctor/parsing/mesh_stats_parsing.py | 161 ++++++++++++++---- 3 files changed, 175 insertions(+), 82 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index ad56451..4d47cbf 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -3,23 +3,24 @@ import numpy.typing as npt from dataclasses import dataclass from enum import Enum - -from vtkmodules.util.numpy_support import ( - vtk_to_numpy, ) - -from vtkmodules.vtkCommonDataModel import ( - vtkUnstructuredGrid, - vtkCell ) - -from . import vtk_utils - - -@dataclass( frozen=True ) +from typing import TypeAlias +from vtkmodules.util.numpy_support import vtk_to_numpy +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkCell +from geos.mesh.doctor.checks import vtk_utils +""" +TypeAliases for this file +""" +ArrayGeneric: TypeAlias = npt.NDArray[ np.generic ] +FieldValidity: TypeAlias = dict[ str, tuple[ bool, tuple[ float ] ] ] + + +@dataclass( frozen=False ) class Options: - info: str - - -ArrayGeneric = npt.NDArray[ np.generic ] + write_stats: int + output_folder: str + input_filepath: str + disconnected: int + field_values: int @dataclass( frozen=True ) @@ -51,9 +52,9 @@ class Result: point_data: MeshComponentData cell_data: MeshComponentData field_data: MeshComponentData - fields_validity_point_data: dict[ str, dict[ str, bool ] ] - fields_validity_cell_data: dict[ str, dict[ str, bool ] ] - fields_validity_field_data: dict[ str, dict[ str, bool ] ] + fields_validity_point_data: FieldValidity + fields_validity_cell_data: FieldValidity + fields_validity_field_data: FieldValidity class MIN_FIELD( float, Enum ): # SI Units @@ -253,7 +254,7 @@ def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "po tensor_max_values=tensor_max_values ) -def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple[ bool, tuple[ float ] ] ]: +def field_values_validity( mcdata: MeshComponentData ) -> FieldValidity: """Check that for every min and max values found in the scalar and tensor fields, none of these values is out of bounds. If the value is out of bound, False validity flag is given to the field, True if no problem. @@ -262,7 +263,7 @@ def field_values_validity( mcdata: MeshComponentData ) -> dict[ str, tuple[ bool mcdata (MeshComponentData): Object that gathers data regarding a mesh component. Returns: - dict[ str, bool ]: {poro: (True, Min_Max_poro), perm: (False, Min_Max_perm), ...} + FieldValidity: {poro: (True, Min_Max_poro), perm: (False, Min_Max_perm), ...} """ field_values_validity: dict[ str, tuple[ bool, tuple[ float ] ] ] = {} assoc_min_max_field: dict[ str, tuple[ float ] ] = associate_min_max_field_values() @@ -409,9 +410,9 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: point_data: MeshComponentData = build_MeshComponentData( mesh, "point" ) cell_data: MeshComponentData = build_MeshComponentData( mesh, "cell" ) field_data: MeshComponentData = build_MeshComponentData( mesh, "field" ) - fields_validity_point_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( point_data ) - fields_validity_cell_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( cell_data ) - fields_validity_field_data: dict[ str, tuple[ bool, tuple[ float ] ] ] = field_values_validity( field_data ) + fields_validity_point_data: FieldValidity = field_values_validity( point_data ) + fields_validity_cell_data: FieldValidity = field_values_validity( cell_data ) + fields_validity_field_data: FieldValidity = field_values_validity( field_data ) return Result( number_points=number_points, number_cells=number_cells, @@ -436,4 +437,5 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: def check( vtk_input_file: str, options: Options ) -> Result: mesh = vtk_utils.read_mesh( vtk_input_file ) + options.input_filepath = vtk_input_file return __check( mesh, options ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index c3af155..acf9a5c 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -156,30 +156,22 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: def vtkid_to_string( id: int ) -> str: - match id: - case 1: # VTK_VERTEX - return 'Vertex' - case 3: #VTK_LINE - return 'Line' - case 5: #VTK_TRIANGLE - return 'Triangle' - case 8: #VTK_PIXEL - return 'Pixel' - case 9: #VTK_QUAD - return 'Quad' - case 10: #VTK_TETRA - return 'Tetra' - case 11: #VTK_VOXEL - return 'Voxel' - case 12: #VTK_HEXAHEDRON - return 'Hex' - case 13: #VTK_WEDGE - return 'Wedge' - case 14: #VTK_PYRAMID - return 'Pyramid' - case 15: #VTK_PENTAGONAL_PRISM - return 'Pentagonal prism' - case 16: #VTK_HEXAGONAL_PRISM - return 'Hexagonal Prism' - case _: - return 'Unknown type' + choices: dict[ int, str ] = { + 1: 'Vertex', + 3: 'Line', + 5: 'Triangle', + 7: 'Polygon', + 8: 'Pixel', + 9: 'Quad', + 10: 'Tetra', + 11: 'Voxel', + 12: 'Hex', + 13: 'Wedge', + 14: 'Pyramid', + 15: 'Pentagonal prism', + 16: 'Hexagonal Prism' + } + if id in choices: + return choices[ id ] + else: + return 'Unknown type' diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 8b09ae1..7870865 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -1,68 +1,123 @@ import logging +import os +from io import StringIO +from datetime import datetime from typing import Iterable from numpy import unique, where - from geos.mesh.doctor.checks.mesh_stats import Options, Result - from . import MESH_STATS +__WRITE_STATS = "write_stats" +__WRITE_STATS_DEFAULT = 0 + +__OUTPUT = "output" + +__DISCONNECTED = "disconnected" +__DISCONNECTED_DEFAULT = 0 + +__FIELD_VALUES = "field_values" +__FIELD_VALUES_DEFAULT = 0 + + def fill_subparser( subparsers ) -> None: p = subparsers.add_parser( MESH_STATS, help=f"Outputs basic properties of a mesh." ) + p.add_argument( '--' + __WRITE_STATS, + type=int, + required=True, + metavar=[0, 1], + default=__WRITE_STATS_DEFAULT, + help=( f"\t[int]: The stats of the mesh will be printed in a file" + + " to the folder specified in --output." ) ) + p.add_argument( '--' + __DISCONNECTED, + type=int, + required=False, + metavar=[0, 1], + default=__DISCONNECTED_DEFAULT, + help=f"\t[int]: Display all disconnected nodes ids and disconnected cell ids." ) + p.add_argument( '--' + __FIELD_VALUES, + type=int, + required=False, + metavar=[0, 1], + default=__FIELD_VALUES_DEFAULT, + help=f"\t[int]: Display all range of field values that seem not realistic." ) + + # Check the value of __WRITE_STATS argument to determine if argument __OUTPUT is a required argument + args, unknown = p.parse_known_args() + p.add_argument( '--' + __OUTPUT, + type=str, + required=getattr(args, __WRITE_STATS), + help=f"[string]: The output folder destination where the stats will be written." ) def convert( parsed_options ) -> Options: - """ - """ - return Options( info="test" ) + write_stats = parsed_options[ __WRITE_STATS ] + output_folder = parsed_options[ __OUTPUT ] + disconnected = parsed_options[ __DISCONNECTED ] + field_values = parsed_options[ __FIELD_VALUES ] + # input_filepath will be defined in check function before calling __check + return Options( write_stats=write_stats, + output_folder=output_folder, + input_filepath="", + disconnected=disconnected, + field_values=field_values ) def display_results( options: Options, result: Result ): - logging.critical( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) - logging.critical( f"There are {result.number_cell_types} different types of cells in the mesh:" ) + log_stream = StringIO() + stream_handler = logging.StreamHandler(log_stream) + stream_handler.setLevel(logging.INFO) + + # Get the root logger and add the StreamHandler to it to possibly output the log to an external file + logger = logging.getLogger() + logger.addHandler(stream_handler) + logger.setLevel(logging.INFO) + + logging.info( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) + logging.info( f"There are {result.number_cell_types} different types of cells in the mesh:" ) for i in range( result.number_cell_types ): - logging.critical( f"\t{result.cell_types[ i ]}\t\t({result.cell_type_counts[ i ]} cells)" ) + logging.info( f"\t{result.cell_types[ i ]}\t\t({result.cell_type_counts[ i ]} cells)" ) - logging.critical( f"Number of cells that have exactly N neighbors:" ) + logging.info( f"Number of cells that have exactly N neighbors:" ) unique_numbers_neighbors, counts = unique( result.cells_neighbors_number, return_counts=True ) - logging.critical( "\tNeighbors\tNumber of cells concerned" ) + logging.info( "\tNeighbors\tNumber of cells concerned" ) for number_neighbors, count in zip( unique_numbers_neighbors, counts ): - logging.critical( f"\t{number_neighbors}\t\t{count}" ) + logging.info( f"\t{number_neighbors}\t\t{count}" ) - logging.critical( "Number of nodes being shared by exactly N cells:" ) - logging.critical( "\tCells\t\tNumber of nodes" ) + logging.info( "Number of nodes being shared by exactly N cells:" ) + logging.info( "\tCells\t\tNumber of nodes" ) for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): - logging.critical( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) + logging.info( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) if 0 in unique_numbers_neighbors: # unique_numbers_neighbors sorted in ascending order from minimum positive number number_cells_disconnected: int = unique_numbers_neighbors[ 0 ] else: number_cells_disconnected = 0 - logging.critical( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) + logging.info( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) if number_cells_disconnected > 0: logging.info( "\tIndexes of disconnected cells" ) indexes = where(result.cells_neighbors_number == 0) logging.info( f"{indexes[ 0 ]}" ) - logging.critical( f"Number of disconnected nodes in the mesh: {len( result.disconnected_nodes )}" ) + logging.info( f"Number of disconnected nodes in the mesh: {len( result.disconnected_nodes )}" ) if len( result.disconnected_nodes ) > 0: logging.info( "\tNodeId\t\tCoordinates" ) for node_id, coordinates in result.disconnected_nodes.items(): logging.info( f"\t{node_id}\t\t{coordinates}" ) - logging.critical( "The domain is contained in:" ) - logging.critical( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) - logging.critical( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) - logging.critical( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) + logging.info( "The domain is contained in:" ) + logging.info( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) + logging.info( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) + logging.info( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) - logging.critical( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) - logging.critical( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) + logging.info( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) + logging.info( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) - logging.critical( f"Number of fields data containing NaNs values: {len( result.fields_with_NaNs )}" ) + logging.info( f"Number of fields data containing NaNs values: {len( result.fields_with_NaNs )}" ) if len( result.fields_with_NaNs ) > 0: - logging.critical( "\tFieldName\tNumber of NaNs" ) + logging.info( "\tFieldName\tNumber of NaNs" ) for field_name, number_NaNs in result.fields_with_NaNs.items(): - logging.critical( f"\t{field_name}\t{number_NaNs}" ) + logging.info( f"\t{field_name}\t{number_NaNs}" ) space_size: int = 3 data_types: dict[ str, any ] = { @@ -71,15 +126,15 @@ def display_results( options: Options, result: Result ): "FieldData": result.field_data } for data_type, data in data_types.items(): - logging.critical( f"There are {len( data.scalar_names )} scalar fields from the {data_type}:" ) + logging.info( f"There are {len( data.scalar_names )} scalar fields from the {data_type}:" ) for i in range( len( data.scalar_names ) ): - logging.critical( f"\t{data.scalar_names[i]}" + harmonious_spacing( data.scalar_names, i, space_size ) + + logging.info( f"\t{data.scalar_names[i]}" + harmonious_spacing( data.scalar_names, i, space_size ) + f"min = {data.scalar_min_values[ i ]}" + " " * space_size + f"max = {data.scalar_max_values[ i ]}" ) - logging.critical( f"There are {len( data.tensor_names )} vector/tensor fields from the {data_type}:" ) + logging.info( f"There are {len( data.tensor_names )} vector/tensor fields from the {data_type}:" ) for i in range( len( data.tensor_names ) ): - logging.critical( f"\t{data.tensor_names[ i ]}" + harmonious_spacing( data.tensor_names, i, space_size ) + + logging.info( f"\t{data.tensor_names[ i ]}" + harmonious_spacing( data.tensor_names, i, space_size ) + f"min = {data.tensor_min_values[ i ]}" + " " * space_size + f"max = {data.tensor_max_values[ i ]}" ) @@ -89,15 +144,59 @@ def display_results( options: Options, result: Result ): "FieldData": result.fields_validity_field_data } for field_vailidity_type, data in fields_validity_types.items(): - logging.warning( f"Unexpected range of values for vector/tensor fields from the {field_vailidity_type}:" ) + logging.info( f"Unexpected range of values for vector/tensor fields from the {field_vailidity_type}:" ) for field_name, validity_range in data.items(): is_valid: bool = validity_range[ 0 ] min_max: tuple[ float ] = validity_range[ 1 ] if not is_valid: - logging.warning( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) + logging.info( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) + + if options.write_stats: + if is_valid_to_write_folder( options.output_folder ): + filepath: str = build_filepath_output_file( options ) + with open( filepath, 'w' ) as file: + file.writelines( log_stream.getvalue() ) def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int = 3 ) -> str: longest_element: Iterable = max( iterable_objs, key=len ) ideal_space: int = len( longest_element ) - len( iterable_objs[ indexIter ] ) + space_size return " " * ideal_space + + +def is_valid_to_write_folder( folder_path: str ) -> bool: + """Checks if a folder path is valid to write a file within it. + + Args: + folder_path (str): Path to a folder. + + Returns: + bool: + """ + if not os.path.exists( folder_path ): + return False + if not os.path.isdir( folder_path ): + return False + if not os.access( folder_path, os.W_OK ): + return False + return True + + +def build_filepath_output_file( options: Options ) -> str: + """Knowing the filepath of the mesh on which the stats will be gathered, and the directory path to where the user + wants to save the stats, builds a unique filename. + + Args: + options (Options): Options given by the user. + + Returns: + str: Complete filepath for the creation of the output file. + """ + base_name = os.path.basename( options.input_filepath ) + # Split the base name into a mesh name and extension + mesh_name, _ = os.path.splitext( base_name ) + current_time = datetime.now() + time = current_time.strftime( "%Y%m%d_%H%M%S" ) + filename: str = mesh_name + "_" + time + filepath: str = os.path.join( options.output_folder, filename ) + return filepath From eaab7053f84e980dcdcac0f6d028a204ffe52428 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Mon, 14 Oct 2024 17:23:47 -0700 Subject: [PATCH 14/34] Mesh_stats tests done --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 43 ++--- geos-mesh/src/geos/mesh/doctor/mesh_doctor.py | 12 +- .../mesh/doctor/parsing/mesh_stats_parsing.py | 16 +- geos-mesh/tests/test_mesh_stats.py | 153 +++++++++++++++++- 4 files changed, 186 insertions(+), 38 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 4d47cbf..182e5d3 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -118,7 +118,7 @@ def get_cell_types_and_counts( mesh: vtkUnstructuredGrid ) -> tuple[ int, int, l distinct_array_types = mesh.GetDistinctCellTypesArray() number_cell_types: int = distinct_array_types.GetNumberOfTuples() # Get the different cell types in the mesh - cell_types: list[ str ] = [] + cell_types: list[ str ] = list() for cell_type in range( number_cell_types ): cell_types.append( vtk_utils.vtkid_to_string( distinct_array_types.GetTuple( cell_type )[ 0 ] ) ) # Counts how many of each type are present @@ -152,7 +152,7 @@ def get_number_cells_per_nodes( mesh: vtkUnstructuredGrid ) -> dict[ int, int ]: def summary_number_cells_per_nodes( number_cells_per_nodes: dict[ int, int ] ) -> dict[ int, int ]: - """Obtain the number of nodes that have X number of cells. + """Obtain the number of nodes that are a node of X number of cells. Args: number_cells_per_nodes (dict[ int, int ]): { point_id0: 8, ..., point_idN: 4 } @@ -223,27 +223,34 @@ def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "po componentType = "point" logging.error( f"Invalid component type chosen to build MeshComponentData. Defaulted to point." ) - scalar_names: list[ str ] = [] - scalar_min_values: list[ np.generic ] = [] - scalar_max_values: list[ np.generic ] = [] - tensor_names: list[ str ] = [] - tensor_min_values: list[ ArrayGeneric ] = [] - tensor_max_values: list[ ArrayGeneric ] = [] + scalar_names: list[ str ] = list() + scalar_min_values: list[ float] = list() + scalar_max_values: list[ float] = list() + tensor_names: list[ str ] = list() + tensor_min_values: list[ list[ float ] ] = list() + tensor_max_values: list[ list[ float ] ] = list() data_to_use = { "cell": mesh.GetCellData, "point": mesh.GetPointData, "field": mesh.GetFieldData } data = data_to_use[ componentType ]() for i in range( data.GetNumberOfArrays() ): data_array = data.GetArray( i ) - data_array_name = data_array.GetName() - data_np_array = vtk_to_numpy( data_array ) - if data_array.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min + data_array_name: str = data_array.GetName() + number_components: int = data_array.GetNumberOfComponents() + if number_components == 1: # assumes scalar cell data for max and min scalar_names.append( data_array_name ) - scalar_max_values.append( data_np_array.max() ) - scalar_min_values.append( data_np_array.min() ) + min_value, max_value = data_array.GetRange() + scalar_min_values.append( min_value ) + scalar_max_values.append( max_value ) else: tensor_names.append( data_array_name ) - tensor_max_values.append( data_np_array.max( axis=0 ) ) - tensor_min_values.append( data_np_array.min( axis=0 ) ) + min_values: list[ float ] = list() + max_values: list[ float ] = list() + for component_index in range( number_components ): + min_value, max_value = data_array.GetRange( component_index ) + min_values.append( min_value ) + max_values.append( max_value ) + tensor_min_values.append( min_values ) + tensor_max_values.append( max_values ) return MeshComponentData( componentType=componentType, scalar_names=scalar_names, @@ -299,7 +306,7 @@ def get_disconnected_nodes_id( mesh: vtkUnstructuredGrid ) -> list[ int ]: Returns: list[ int ]: [nodeId0, nodeId23, ..., nodeIdM] """ - disconnected_nodes_id: list[ int ] = [] + disconnected_nodes_id: list[ int ] = list() connectivity = mesh.GetCells().GetConnectivityArray() connectivity_unique_points: set = set() for i in range( connectivity.GetNumberOfValues() ): @@ -343,10 +350,10 @@ def get_cell_faces_node_ids( cell: vtkCell, sort_ids: bool = False ) -> tuple[ t Returns: tuple[ tuple[ int ] ]: [ [face0_nodeId0, ..., face0_nodeIdN], ..., [faceN_nodeId0, ..., faceN_nodeIdN] ] """ - cell_faces_node_ids: list[ tuple[ int ] ] = [] + cell_faces_node_ids: list[ tuple[ int ] ] = list() for f in range( cell.GetNumberOfFaces() ): face = cell.GetFace( f ) - node_ids: list[ int ] = [] + node_ids: list[ int ] = list() for i in range( face.GetNumberOfPoints() ): node_ids.append( face.GetPointId( i ) ) if sort_ids: diff --git a/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py b/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py index ea1bfe8..1311145 100644 --- a/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py +++ b/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py @@ -1,17 +1,17 @@ import sys +import logging +from geos.mesh.doctor.parsing import CheckHelper +from geos.mesh.doctor.parsing.cli_parsing import parse_and_set_verbosity +import geos.mesh.doctor.register as register +min_python_version = ( 3, 7 ) try: - min_python_version = ( 3, 7 ) assert sys.version_info >= min_python_version except AssertionError as e: print( f"Please update python to at least version {'.'.join(map(str, min_python_version))}." ) sys.exit( 1 ) -import logging - -from geos.mesh.doctor.parsing import CheckHelper -from geos.mesh.doctor.parsing.cli_parsing import parse_and_set_verbosity -import geos.mesh.doctor.register as register +MESH_DOCTOR_FILEPATH = __file__ def main(): diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 6a46496..75b3362 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -7,13 +7,6 @@ from geos.mesh.doctor.checks.mesh_stats import Options, Result from . import MESH_STATS -__DISCONNECTED = "disconnected" -__DISCONNECTED_DEFAULT = 0 - -__FIELD_VALUES = "field_values" -__FIELD_VALUES_DEFAULT = 0 - - __WRITE_STATS = "write_stats" __WRITE_STATS_DEFAULT = 0 @@ -162,9 +155,6 @@ def display_results( options: Options, result: Result ): with open( filepath, 'w' ) as file: file.writelines( log_stream.getvalue() ) - if options.output_stats_in_file: - pass - def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int = 3 ) -> str: longest_element: Iterable = max( iterable_objs, key=len ) @@ -182,10 +172,14 @@ def is_valid_to_write_folder( folder_path: str ) -> bool: bool: """ if not os.path.exists( folder_path ): + logging.error( f"The folder path given '{folder_path}' to write the log in does not exist. No file written." ) return False if not os.path.isdir( folder_path ): + logging.error( f"The path given '{folder_path}' to write the log is not a directory path. No file written." ) return False if not os.access( folder_path, os.W_OK ): + logging.error( f"You do not have writing access to the folder chosen '{folder_path}' to write the log." + + " No file written." ) return False return True @@ -205,6 +199,6 @@ def build_filepath_output_file( options: Options ) -> str: mesh_name, _ = os.path.splitext( base_name ) current_time = datetime.now() time = current_time.strftime( "%Y%m%d_%H%M%S" ) - filename: str = mesh_name + "_" + time + filename: str = mesh_name + "_stats_" + time + ".txt" filepath: str = os.path.join( options.output_folder, filename ) return filepath diff --git a/geos-mesh/tests/test_mesh_stats.py b/geos-mesh/tests/test_mesh_stats.py index b869e45..f5bf0af 100644 --- a/geos-mesh/tests/test_mesh_stats.py +++ b/geos-mesh/tests/test_mesh_stats.py @@ -1,11 +1,27 @@ +import os +import re +import logging +import subprocess import numpy as np - +from geos.mesh.doctor.mesh_doctor import MESH_DOCTOR_FILEPATH from geos.mesh.doctor.checks import mesh_stats as ms from geos.mesh.doctor.checks.generate_cube import Options, FieldInfo, __build -from geos.mesh.doctor.checks.vtk_utils import VtkOutput +from geos.mesh.doctor.checks.vtk_utils import VtkOutput, write_mesh from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkHexahedron from vtkmodules.util.numpy_support import numpy_to_vtk + +""" +For creation of output test meshes +""" +current_file_path: str = __file__ +dir_name: str = os.path.dirname( current_file_path ) +pattern_test: str = "to_check_mesh" +filepath_mesh_for_stats: str = os.path.join( dir_name, pattern_test + ".vtu" ) +test_mesh_for_stats: VtkOutput = VtkOutput( filepath_mesh_for_stats, True ) +""" +Grids for stats tests +""" # First mesh: no anomalies to look for out: VtkOutput = VtkOutput( "test", False ) field0: FieldInfo = FieldInfo( "scalar_cells", 1, "CELLS" ) @@ -109,6 +125,63 @@ cube4.InsertNextCell( hex.GetCellType(), hex.GetPointIds() ) +# Last mesh: test mesh for output and check of execution of mesh_stats +f_poro: FieldInfo = FieldInfo( "POROSITY", 1, "CELLS" ) +f_perm: FieldInfo = FieldInfo( "PERMEABILITY", 3, "CELLS" ) +f_density: FieldInfo = FieldInfo( "DENSITY", 1, "CELLS" ) +f_pressure: FieldInfo = FieldInfo( "PRESSURE", 1, "CELLS" ) +f_temp: FieldInfo = FieldInfo( "TEMPERATURE", 1, "POINTS" ) +f_displacement: FieldInfo = FieldInfo( "DISPLACEMENT", 3, "POINTS" ) +options_cube_output: Options = Options( vtk_output=out, + generate_cells_global_ids=True, + generate_points_global_ids=True, + xs=np.array( [ 0.0, 1.0, 2.0, 3.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0, 3.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0, 3.0 ] ), + nxs=[ 1, 1, 1 ], + nys=[ 1, 1, 1 ], + nzs=[ 1, 1, 1 ], + fields=[ f_poro, f_perm, f_density, f_pressure, f_temp, f_displacement ] ) +cube_output: vtkUnstructuredGrid = __build( options_cube_output ) +number_cells: int = cube_output.GetNumberOfCells() +number_points: int = cube_output.GetNumberOfPoints() +a_poro: np.array = np.linspace( 0, 1, number_cells ) +a_perm: np.array = np.empty( ( number_cells, f_perm.dimension ) ) +for i in range( f_perm.dimension ): + a_perm[:, i] = np.linspace( 1e-14 * 10**i, 1e-12 * 10**i, number_cells ) +a_density: np.array = np.linspace( 500, 40000, number_cells ) +a_pressure: np.array = np.linspace( 1e5, 1e7, number_cells ) +a_temp: np.array = np.linspace( 1e2, 5e3, number_points ) +a_temp = a_temp.reshape( number_points, 1 ) +a_displacement: np.array = np.empty( ( number_points, f_displacement.dimension ) ) +for i in range( f_displacement.dimension ): + a_displacement[:, i] = np.linspace( 1e-4 * 10**i, 1e-2 * 10**i, number_points ) +for array in [ a_density, a_pressure, a_poro ]: + array = array.reshape( number_cells, 1 ) + +vtk_a_poro = numpy_to_vtk( a_poro ) +vtk_a_perm = numpy_to_vtk( a_perm ) +vtk_a_density = numpy_to_vtk( a_density ) +vtk_a_pressure = numpy_to_vtk( a_pressure ) +vtk_a_temp = numpy_to_vtk( a_temp ) +vtk_a_displacement = numpy_to_vtk( a_displacement ) +vtk_a_poro.SetName( f_poro.name ) +vtk_a_perm.SetName( f_perm.name ) +vtk_a_density.SetName( f_density.name + "_invalid" ) +vtk_a_pressure.SetName( f_pressure.name ) +vtk_a_temp.SetName( f_temp.name + "_invalid" ) +vtk_a_displacement.SetName( f_displacement.name ) + +cell_data_output = cube_output.GetCellData() +point_data_output = cube_output.GetPointData() +cell_data_output.AddArray( vtk_a_poro ) +cell_data_output.AddArray( vtk_a_perm ) +cell_data_output.AddArray( vtk_a_density ) +cell_data_output.AddArray( vtk_a_pressure ) +point_data_output.AddArray( vtk_a_temp ) +point_data_output.AddArray( vtk_a_displacement ) + + class TestClass: def test_get_cell_types_and_counts( self ): @@ -194,4 +267,78 @@ def test_get_cells_neighbors_number( self ): result2: np.array = ms.get_cells_neighbors_number( cube4 ) expected2: np.array = np.ones( ( 9, 1 ), dtype=int ) * 3 expected2[ 8 ] = 0 - assert np.array_equal( result2, expected2 ) \ No newline at end of file + assert np.array_equal( result2, expected2 ) + + + def test_mesh_stats_execution( self ): + write_mesh( cube_output, test_mesh_for_stats ) + invalidTest = False + command = [ + "python", MESH_DOCTOR_FILEPATH, "-v", "-i", test_mesh_for_stats.output, "mesh_stats", "--write_stats", + "0", "--output", dir_name, "--disconnected", "0", "--field_values", "0" + ] + try: + result = subprocess.run( command, shell=True, stderr=subprocess.PIPE, universal_newlines=True ) + os.remove( test_mesh_for_stats.output ) + stderr = result.stderr + assert result.returncode == 0 + raw_stderr = r"{}".format( stderr ) + pattern = r"\[.*?\]\[.*?\] (.*)" + matches = re.findall( pattern, raw_stderr ) + no_log = "\n".join( matches ) + mesh_output_stats: str = no_log[ no_log.index( "The mesh has" ): ] + # yapf: disable + expected_stats: str = ( "The mesh has 27 cells and 64 points.\n" + + "There are 1 different types of cells in the mesh:\n" + + "\tHex\t\t(27 cells)\n" + + "Number of cells that have exactly N neighbors:\n" + + "\tNeighbors\tNumber of cells concerned\n" + + "\t3\t\t8\n" + + "\t4\t\t12\n" + + "\t5\t\t6\n" + + "\t6\t\t1\n" + + "Number of nodes being shared by exactly N cells:\n" + + "\tCells\t\tNumber of nodes\n" + + "\t8\t\t8\n" + + "\t1\t\t8\n" + + "\t2\t\t24\n" + + "\t4\t\t24\n" + + "Number of disconnected cells in the mesh: 0\n" + + "Number of disconnected nodes in the mesh: 0\n" + + "The domain is contained in:\n" + + "\t0.0 <= x <= 3.0\n" + + "\t0.0 <= y <= 3.0\n" + + "\t0.0 <= z <= 3.0\n" + + "Does the mesh have global point ids: True\n" + + "Does the mesh have global cell ids: True\n" + + "Number of fields data containing NaNs values: 0\n" + + "There are 5 scalar fields from the CellData:\n" + + "\tPOROSITY min = 0.0 max = 1.0\n" + + "\tDENSITY min = 1.0 max = 1.0\n" + + "\tPRESSURE min = 100000.0 max = 10000000.0\n" + + "\tGLOBAL_IDS_CELLS min = 0.0 max = 26.0\n" + + "\tDENSITY_invalid min = 500.0 max = 40000.0\n" + + "There are 1 vector/tensor fields from the CellData:\n" + + "\tPERMEABILITY min = [1e-14, 1e-13, 1e-12] max = [1e-12, 1e-11, 1e-10]\n" + + "There are 3 scalar fields from the PointData:\n" + + "\tTEMPERATURE min = 1.0 max = 1.0\n" + + "\tGLOBAL_IDS_POINTS min = 0.0 max = 63.0\n" + + "\tTEMPERATURE_invalid min = 100.0 max = 5000.0\n" + + "There are 1 vector/tensor fields from the PointData:\n" + + "\tDISPLACEMENT min = [0.0001, 0.001, 0.01] max = [0.01, 0.1, 1.0]\n" + + "There are 0 scalar fields from the FieldData:\n" + + "There are 0 vector/tensor fields from the FieldData:\n" + + "Unexpected range of values for vector/tensor fields from the CellData:\n" + + "DENSITY_invalid expected to be between 0.0 and 25000.0.\n" + + "Unexpected range of values for vector/tensor fields from the PointData:\n" + + "TEMPERATURE_invalid expected to be between 0.0 and 2000.0.\n" + + "Unexpected range of values for vector/tensor fields from the FieldData:" ) + # yapf: enable + assert mesh_output_stats == expected_stats + except Exception as e: + logging.error( "Invalid command input. Test has failed." ) + logging.error( e ) + invalidTest = True + + if invalidTest: + raise ValueError( "test_mesh_stats_execution has failed." ) \ No newline at end of file From 645f3e4362e2381959a77312d775af6bc997fc02 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Sun, 20 Oct 2024 21:30:03 -0700 Subject: [PATCH 15/34] Bug fix where args of mesh_doctor.py execution would be mesh_stats --- geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 75b3362..f65b09e 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -40,12 +40,9 @@ def fill_subparser( subparsers ) -> None: metavar=[0, 1], default=__FIELD_VALUES_DEFAULT, help=f"\t[int]: Display all range of field values that seem not realistic." ) - - # Check the value of __WRITE_STATS argument to determine if argument __OUTPUT is a required argument - args, unknown = p.parse_known_args() p.add_argument( '--' + __OUTPUT, type=str, - required=getattr(args, __WRITE_STATS), + required=False, help=f"[string]: The output folder destination where the stats will be written." ) def convert( parsed_options ) -> Options: From 404bc227b0e12bd1e7b0ac1dd8d35ecf1d8d2736 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 6 Nov 2024 13:44:55 -0800 Subject: [PATCH 16/34] Change name from "add_fields" to "fields_manipulation" --- .../checks/{add_fields.py => fields_manipulation.py} | 12 +++--------- geos-mesh/src/geos/mesh/doctor/parsing/__init__.py | 2 +- ...lds_parsing.py => fields_manipulation_parsing.py} | 8 +++----- 3 files changed, 7 insertions(+), 15 deletions(-) rename geos-mesh/src/geos/mesh/doctor/checks/{add_fields.py => fields_manipulation.py} (96%) rename geos-mesh/src/geos/mesh/doctor/parsing/{add_fields_parsing.py => fields_manipulation_parsing.py} (80%) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py b/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py similarity index 96% rename from geos-mesh/src/geos/mesh/doctor/checks/add_fields.py rename to geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py index c97379a..c73edf1 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/add_fields.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py @@ -4,15 +4,9 @@ from numpy import empty from numpy.random import rand -from vtkmodules.util.numpy_support import ( - numpy_to_vtk, - vtk_to_numpy, -) - -from vtkmodules.vtkCommonCore import ( - vtkDoubleArray, ) - -from . import vtk_utils +from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy +from vtkmodules.vtkCommonCore import vtkDoubleArray +from geos.mesh.doctor.checks import vtk_utils @dataclass( frozen=True ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py index d5c9a57..df601e8 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py @@ -12,7 +12,7 @@ SELF_INTERSECTING_ELEMENTS = "self_intersecting_elements" SUPPORTED_ELEMENTS = "supported_elements" MESH_STATS = "mesh_stats" -ADD_FIELDS = "add_fields" +FIELDS_MANIPULATION = "fields_manipulation" @dataclass( frozen=True ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py similarity index 80% rename from geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py rename to geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py index 8b50d13..90a266f 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/add_fields_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py @@ -1,8 +1,6 @@ import logging - -from checks.add_fields import Options, Result - -from . import vtk_output_parsing, ADD_FIELDS +from geos.mesh.doctor.checks.fields_manipulation import Options, Result +from geos.mesh.doctor.parsing import vtk_output_parsing, FIELDS_MANIPULATION __SUPPORT = "support" __NAME = "name" @@ -10,7 +8,7 @@ def fill_subparser( subparsers ) -> None: - p = subparsers.add_parser( ADD_FIELDS, help=f"Add cell or point data to a mesh." ) + p = subparsers.add_parser( FIELDS_MANIPULATION, help=f"Add cell or point data to a mesh." ) p.add_argument( '--' + __SUPPORT, type=str, required=True, help=f"[string]: Where to define field (point/cell)." ) p.add_argument( '--' + __NAME, type=str, required=True, help=f"[string]: Name of the field to add." ) p.add_argument( '--' + __SOURCE, From 06769670d6fcb8932139212e78da5fb18dd797ec Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Tue, 12 Nov 2024 10:46:29 -0800 Subject: [PATCH 17/34] intermediate commit --- .../mesh/doctor/checks/fields_manipulation.py | 16 ++--- .../parsing/fields_manipulation_parsing.py | 63 ++++++++++++++++--- 2 files changed, 64 insertions(+), 15 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py b/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py index c73edf1..0f63b3f 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py @@ -6,15 +6,17 @@ from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy from vtkmodules.vtkCommonCore import vtkDoubleArray -from geos.mesh.doctor.checks import vtk_utils +from geos.mesh.doctor.checks.vtk_utils import VtkOutput, read_mesh, write_mesh @dataclass( frozen=True ) class Options: + manipulation: str support: str - field_name: str + field_names: list[ str ] source: str - out_vtk: vtk_utils.VtkOutput + vtm_index: int + out_vtk: VtkOutput @dataclass( frozen=True ) @@ -89,7 +91,7 @@ def __compatible_meshes( dest_mesh, source_mesh ) -> bool: def __transfer_field( mesh, support, field_name, source ) -> bool: - from_mesh = vtk_utils.read_mesh( source ) + from_mesh = read_mesh( source ) same_mesh = __compatible_meshes( mesh, from_mesh ) if not same_mesh: logging.error( 'meshes are not the same' ) @@ -120,11 +122,11 @@ def __check( mesh, options: Options ) -> Result: if options.source == 'function': succ = __analytic_field( mesh, options.support, options.field_name ) if succ: - vtk_utils.write_mesh( mesh, options.out_vtk ) + write_mesh( mesh, options.out_vtk ) elif ( options.source[ -4: ] == '.vtu' or options.source[ -4: ] == '.vtk' ): succ = __transfer_field( mesh, options.support, options.field_name, options.source ) if succ: - vtk_utils.write_mesh( mesh, options.out_vtk ) + write_mesh( mesh, options.out_vtk ) else: logging.error( 'incorrect source option. Options are function, *.vtu, *.vtk.' ) succ = False @@ -133,5 +135,5 @@ def __check( mesh, options: Options ) -> Result: def check( vtk_input_file: str, options: Options ) -> Result: - mesh = vtk_utils.read_mesh( vtk_input_file ) + mesh = read_mesh( vtk_input_file ) return __check( mesh, options ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py index 90a266f..b1cbe7f 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py @@ -2,28 +2,75 @@ from geos.mesh.doctor.checks.fields_manipulation import Options, Result from geos.mesh.doctor.parsing import vtk_output_parsing, FIELDS_MANIPULATION +__MANIPULATION = "manipulation" +__MANIPULATION_CHOICES = [ "transfer" ] + __SUPPORT = "support" +__SUPPORT_CHOICES = [ "point", "cell" ] + __NAME = "name" __SOURCE = "source" +__WHICH_VTM = "which_vtm" +__WHICH_VTM_SUGGESTIONS = [ "first", "last" ] + def fill_subparser( subparsers ) -> None: - p = subparsers.add_parser( FIELDS_MANIPULATION, help=f"Add cell or point data to a mesh." ) - p.add_argument( '--' + __SUPPORT, type=str, required=True, help=f"[string]: Where to define field (point/cell)." ) - p.add_argument( '--' + __NAME, type=str, required=True, help=f"[string]: Name of the field to add." ) + p = subparsers.add_parser( FIELDS_MANIPULATION, help=f"Allows to perform an operation from a source to your input mesh." ) + p.add_argument( '--' + __MANIPULATION, + type=str, + required=True, + metavar=", ".join( map( str, __MANIPULATION_CHOICES ) ), + default=__MANIPULATION_CHOICES[ 0 ], + help="[string]: Choose what operation you want to perform from the source to your input mesh. " + f"'{__MANIPULATION_CHOICES[ 0 ]}' copies field(s) from the source to the input mesh." ) + p.add_argument( '--' + __SUPPORT, + type=str, + required=True, + metavar=", ".join( map( str, __SUPPORT_CHOICES ) ), + default=__SUPPORT_CHOICES[ 0 ], + help="[string]: Where to define field." ) + p.add_argument( '--' + __NAME, + type=str, + required=True, + help="[list of string comma separated]: Name of the field(s) to manipulate." ) p.add_argument( '--' + __SOURCE, type=str, required=True, - help=f"[string]: Where field data to add comes from (function, mesh)." ) + help="[string]: Where field data to use for operation comes from (function, .vtu, .vtm, .pvd file)." ) + p.add_argument( '--' + __WHICH_VTM, + type=str, + required=False, + default=__WHICH_VTM_SUGGESTIONS[ 1 ], + help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " + "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " + "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " + "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) vtk_output_parsing.fill_vtk_output_subparser( p ) def convert( parsed_options ) -> Options: - """ - """ - return Options( support=parsed_options[ __SUPPORT ], - field_name=parsed_options[ __NAME ], + manipulation: str = parsed_options[ __MANIPULATION ] + if support not in __MANIPULATION_CHOICES: + raise ValueError( f"For --{__MANIPULATION}, the only choices available are {__MANIPULATION_CHOICES}." ) + support: str = parsed_options[ __SUPPORT ] + if support not in __SUPPORT_CHOICES: + raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) + field_names: list[ str ] = list( map( int, parsed_options[ __NAME ].split( "," ) ) ) + which_vtm: str = parsed_options[ __WHICH_VTM ] + if which_vtm in __WHICH_VTM_SUGGESTIONS: + vtm_index: int = 0 if __WHICH_VTM_SUGGESTIONS[ 0 ] else -1 + else: + try: + vtm_index = int( which_vtm ) + except ValueError: + raise ValueError( f"The choice for --{__WHICH_VTM} needs to be an integer or " + + f"'{__WHICH_VTM_SUGGESTIONS[ 0 ]}' or '{__WHICH_VTM_SUGGESTIONS[ 1 ]}'." ) + return Options( manipulation=manipulation, + support=support, + field_names=field_names, source=parsed_options[ __SOURCE ], + vtm_index=vtm_index, out_vtk=vtk_output_parsing.convert( parsed_options ) ) From ea4eb841d9a5c4e5cc793efc2104829e6675dea6 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Tue, 12 Nov 2024 16:15:54 -0800 Subject: [PATCH 18/34] Changed names from manipulations to operations --- ...ds_manipulation.py => field_operations.py} | 2 +- .../src/geos/mesh/doctor/parsing/__init__.py | 4 +-- ...parsing.py => field_operations_parsing.py} | 26 +++++++++---------- geos-mesh/src/geos/mesh/doctor/register.py | 8 +++--- 4 files changed, 20 insertions(+), 20 deletions(-) rename geos-mesh/src/geos/mesh/doctor/checks/{fields_manipulation.py => field_operations.py} (99%) rename geos-mesh/src/geos/mesh/doctor/parsing/{fields_manipulation_parsing.py => field_operations_parsing.py} (77%) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py similarity index 99% rename from geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py rename to geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index 0f63b3f..65a62a1 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/fields_manipulation.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -11,7 +11,7 @@ @dataclass( frozen=True ) class Options: - manipulation: str + operation: str support: str field_names: list[ str ] source: str diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py index df601e8..4c9e8d2 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py @@ -4,15 +4,15 @@ COLLOCATES_NODES = "collocated_nodes" ELEMENT_VOLUMES = "element_volumes" +FIELD_OPERATIONS = "field_operations" FIX_ELEMENTS_ORDERINGS = "fix_elements_orderings" GENERATE_CUBE = "generate_cube" GENERATE_FRACTURES = "generate_fractures" GENERATE_GLOBAL_IDS = "generate_global_ids" +MESH_STATS = "mesh_stats" NON_CONFORMAL = "non_conformal" SELF_INTERSECTING_ELEMENTS = "self_intersecting_elements" SUPPORTED_ELEMENTS = "supported_elements" -MESH_STATS = "mesh_stats" -FIELDS_MANIPULATION = "fields_manipulation" @dataclass( frozen=True ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py similarity index 77% rename from geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py rename to geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index b1cbe7f..fb80671 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/fields_manipulation_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -1,9 +1,9 @@ import logging -from geos.mesh.doctor.checks.fields_manipulation import Options, Result -from geos.mesh.doctor.parsing import vtk_output_parsing, FIELDS_MANIPULATION +from geos.mesh.doctor.checks.field_operations import Options, Result +from geos.mesh.doctor.parsing import vtk_output_parsing, FIELD_OPERATIONS -__MANIPULATION = "manipulation" -__MANIPULATION_CHOICES = [ "transfer" ] +__OPERATION = "operation" +__OPERATION_CHOICES = [ "transfer" ] __SUPPORT = "support" __SUPPORT_CHOICES = [ "point", "cell" ] @@ -16,14 +16,14 @@ def fill_subparser( subparsers ) -> None: - p = subparsers.add_parser( FIELDS_MANIPULATION, help=f"Allows to perform an operation from a source to your input mesh." ) - p.add_argument( '--' + __MANIPULATION, + p = subparsers.add_parser( FIELD_OPERATIONS, help=f"Allows to perform an operation on fields from a source to your input mesh." ) + p.add_argument( '--' + __OPERATION, type=str, required=True, - metavar=", ".join( map( str, __MANIPULATION_CHOICES ) ), - default=__MANIPULATION_CHOICES[ 0 ], + metavar=", ".join( map( str, __OPERATION_CHOICES ) ), + default=__OPERATION_CHOICES[ 0 ], help="[string]: Choose what operation you want to perform from the source to your input mesh. " - f"'{__MANIPULATION_CHOICES[ 0 ]}' copies field(s) from the source to the input mesh." ) + f"'{__OPERATION_CHOICES[ 0 ]}' copies field(s) from the source to the input mesh." ) p.add_argument( '--' + __SUPPORT, type=str, required=True, @@ -50,9 +50,9 @@ def fill_subparser( subparsers ) -> None: def convert( parsed_options ) -> Options: - manipulation: str = parsed_options[ __MANIPULATION ] - if support not in __MANIPULATION_CHOICES: - raise ValueError( f"For --{__MANIPULATION}, the only choices available are {__MANIPULATION_CHOICES}." ) + operation: str = parsed_options[ __OPERATION ] + if support not in __OPERATION_CHOICES: + raise ValueError( f"For --{__OPERATION}, the only choices available are {__OPERATION_CHOICES}." ) support: str = parsed_options[ __SUPPORT ] if support not in __SUPPORT_CHOICES: raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) @@ -66,7 +66,7 @@ def convert( parsed_options ) -> Options: except ValueError: raise ValueError( f"The choice for --{__WHICH_VTM} needs to be an integer or " + f"'{__WHICH_VTM_SUGGESTIONS[ 0 ]}' or '{__WHICH_VTM_SUGGESTIONS[ 1 ]}'." ) - return Options( manipulation=manipulation, + return Options( operation=operation, support=support, field_names=field_names, source=parsed_options[ __SOURCE ], diff --git a/geos-mesh/src/geos/mesh/doctor/register.py b/geos-mesh/src/geos/mesh/doctor/register.py index b6c8da9..12a4519 100644 --- a/geos-mesh/src/geos/mesh/doctor/register.py +++ b/geos-mesh/src/geos/mesh/doctor/register.py @@ -53,10 +53,10 @@ def closure_trick( cn: str ): __CHECKS[ check_name ] = lambda: __load_module_check( cn ) # Register the modules to load here. - for check_name in ( parsing.COLLOCATES_NODES, parsing.ELEMENT_VOLUMES, parsing.FIX_ELEMENTS_ORDERINGS, - parsing.GENERATE_CUBE, parsing.GENERATE_FRACTURES, parsing.GENERATE_GLOBAL_IDS, - parsing.NON_CONFORMAL, parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS, - parsing.MESH_STATS, parsing.ADD_FIELDS ): + for check_name in ( parsing.COLLOCATES_NODES, parsing.ELEMENT_VOLUMES, parsing.FIELD_OPERATIONS, + parsing.FIX_ELEMENTS_ORDERINGS, parsing.GENERATE_CUBE, parsing.GENERATE_FRACTURES, + parsing.GENERATE_GLOBAL_IDS, parsing.MESH_STATS, parsing.NON_CONFORMAL, + parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS ): closure_trick( check_name ) loaded_checks: Dict[ str, Callable[ [ str, Any ], Any ] ] = __load_checks() loaded_checks_helpers: Dict[ str, CheckHelper ] = dict() From 983b3bc660396bf793007f850f98a3935f58a255 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 14 Nov 2024 18:16:34 -0800 Subject: [PATCH 19/34] vtk_utils new functions to read .vtm file directly or from a .pvd file --- .../src/geos/mesh/doctor/checks/vtk_utils.py | 222 +++++++++++++----- 1 file changed, 158 insertions(+), 64 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index acf9a5c..40b04d8 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -1,40 +1,12 @@ -from dataclasses import dataclass -import os.path import logging -import sys -from typing import ( - Any, - Iterator, - Optional, -) - -from vtkmodules.vtkCommonCore import ( - vtkIdList, ) -from vtkmodules.vtkCommonDataModel import ( - vtkUnstructuredGrid, ) -from vtkmodules.vtkIOLegacy import ( - vtkUnstructuredGridWriter, - vtkUnstructuredGridReader, -) -from vtkmodules.vtkIOXML import ( - vtkXMLUnstructuredGridReader, - vtkXMLUnstructuredGridWriter, -) - -from vtkmodules.vtkCommonDataModel import ( - VTK_HEXAGONAL_PRISM, - VTK_HEXAHEDRON, - VTK_PENTAGONAL_PRISM, - VTK_PYRAMID, - VTK_TETRA, - VTK_VOXEL, - VTK_WEDGE, - VTK_TRIANGLE, - VTK_QUAD, - VTK_PIXEL, - VTK_LINE, - VTK_VERTEX, -) +import os.path +import xml.etree.ElementTree as ET +from dataclasses import dataclass +from typing import Iterator, Optional +from vtkmodules.vtkCommonCore import vtkIdList +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkMultiBlockDataSet +from vtkmodules.vtkIOLegacy import vtkUnstructuredGridWriter, vtkUnstructuredGridReader +from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader, vtkXMLUnstructuredGridWriter, vtkXMLMultiBlockDataReader @dataclass( frozen=True ) @@ -51,7 +23,7 @@ def to_vtk_id_list( data ) -> vtkIdList: return result -def vtk_iter( l ) -> Iterator[ Any ]: +def vtk_iter( l ) -> Iterator[ any ]: """ Utility function transforming a vtk "container" (e.g. vtkIdList) into an iterable to be used for building built-ins python containers. :param l: A vtk container. @@ -65,6 +37,38 @@ def vtk_iter( l ) -> Iterator[ Any ]: yield l.GetCellType( i ) +def has_invalid_field( mesh: vtkUnstructuredGrid, invalid_fields: list[ str ] ) -> bool: + """Checks if a mesh contains at least a data arrays within its cell, field or point data + having a certain name. If so, returns True, else False. + + Args: + mesh (vtkUnstructuredGrid): An unstructured mesh. + invalid_fields (list[str]): Field name of an array in any data from the data. + + Returns: + bool: True if one field found, else False. + """ + # Check the cell data fields + cell_data = mesh.GetCellData() + for i in range( cell_data.GetNumberOfArrays() ): + if cell_data.GetArrayName( i ) in invalid_fields: + logging.error( f"The mesh contains an invalid cell field name '{cell_data.GetArrayName( i )}'." ) + return True + # Check the field data fields + field_data = mesh.GetFieldData() + for i in range( field_data.GetNumberOfArrays() ): + if field_data.GetArrayName( i ) in invalid_fields: + logging.error( f"The mesh contains an invalid field name '{field_data.GetArrayName( i )}'." ) + return True + # Check the point data fields + point_data = mesh.GetPointData() + for i in range( point_data.GetNumberOfArrays() ): + if point_data.GetArrayName( i ) in invalid_fields: + logging.error( f"The mesh contains an invalid point field name '{point_data.GetArrayName( i )}'." ) + return True + return False + + def __read_vtk( vtk_input_file: str ) -> Optional[ vtkUnstructuredGrid ]: reader = vtkUnstructuredGridReader() logging.info( f"Testing file format \"{vtk_input_file}\" using legacy format reader..." ) @@ -98,6 +102,10 @@ def read_mesh( vtk_input_file: str ) -> vtkUnstructuredGrid: If first guess does not work, eventually all the others reader available will be tested. :return: A unstructured grid. """ + if not os.path.exists( vtk_input_file ): + err_msg: str = f"Invalid file path. Could not read \"{vtk_input_file}\"." + logging.error( err_msg ) + raise ValueError( err_msg ) file_extension = os.path.splitext( vtk_input_file )[ -1 ] extension_to_reader = { ".vtk": __read_vtk, ".vtu": __read_vtu } # Testing first the reader that should match @@ -111,8 +119,115 @@ def read_mesh( vtk_input_file: str ) -> vtkUnstructuredGrid: if output_mesh: return output_mesh # No reader did work. Dying. - logging.critical( f"Could not find the appropriate VTK reader for file \"{vtk_input_file}\". Dying..." ) - sys.exit( 1 ) + err_msg = f"Could not find the appropriate VTK reader for file \"{vtk_input_file}\"." + logging.error( err_msg ) + raise ValueError( err_msg ) + + +def read_vtm( vtk_input_file: str ) -> vtkMultiBlockDataSet: + if not vtk_input_file.endswith( ".vtm" ): + raise ValueError( f"Input file '{vtk_input_file}' is not a .vtm file. Cannot read it." ) + reader = vtkXMLMultiBlockDataReader() + reader.SetFileName( vtk_input_file ) + reader.Update() + return reader.GetOutput() + + +def get_vtm_filepath_from_pvd( vtk_input_file: str, vtm_index: int ) -> str: + """From a GEOS output .pvd file, extracts one .vtm file and returns its filepath. + + Args: + vtk_input_file (str): .pvd filepath + vtm_index (int): Index that will select which .vtm to choose. + + Returns: + str: Filepath to the .vtm at the chosen index. + """ + if not vtk_input_file.endswith( ".pvd" ): + raise ValueError( f"Input file '{vtk_input_file}' is not a .pvd file. Cannot read it." ) + tree = ET.parse( vtk_input_file ) + root = tree.getroot() + # Extract all .vtm file paths contained in the .pvd + vtm_paths: list[ str ] = list() + for dataset in root.findall( ".//DataSet" ): + file_path = dataset.get( "file" ) + if file_path.endswith( ".vtm" ): + vtm_paths.append( file_path ) + number_vtms: int = len( vtm_paths ) + if number_vtms == 0: + raise ValueError( f"The '{vtk_input_file}' does not contain any .vtm path." ) + if vtm_index >= number_vtms: + raise ValueError( f"Cannot access the .vtm at index '{vtm_index}' in the '{vtk_input_file}'." + + f" The indexes available are between 0 and {number_vtms - 1}." ) + # build the complete filepath of the vtm to use + directory: str = os.path.dirname( vtk_input_file ) + vtm_filepath: str = os.path.join( directory, vtm_paths[ vtm_index ] ) + return vtm_filepath + + +def get_vtu_filepaths_from_vtm( vtm_filepath: str ) -> tuple[ str ]: + """By reading a vtm file, returns all the vtu filepaths present inside it. + + Args: + vtm_filepath (str): Filepath to a .vtm + + Returns: + tuple[ str ]: ( "file/path/0.vtu", ..., "file/path/N.vtu" ) + """ + if not vtm_filepath.endswith( ".vtm" ): + raise ValueError( f"Input file '{vtm_filepath}' is not a .vtm file. Cannot read it." ) + # Parse the XML file and find all DataSet elements + tree = ET.parse( vtm_filepath ) + root = tree.getroot() + dataset_elements = root.findall( ".//DataSet" ) + # Extract the file attribute from each DataSet + vtu_filepaths: list[ str ] = [ ds.get( 'file' ) for ds in dataset_elements if ds.get( 'file' ).endswith( '.vtu' ) ] + directory: str = os.path.dirname( vtm_filepath ) + vtu_filepaths = [ os.path.join( directory, vtu_filepath ) for vtu_filepath in vtu_filepaths ] + return tuple( vtu_filepaths ) # to lock the order of the vtus like in the vtm + + +def get_number_of_cells_vtm_multiblock( multiblock: vtkMultiBlockDataSet ) -> int: + """Counts the total number of cells that are part of a vtkMultiBlockDataSet from a .vtm produced by GEOS. + + Args: + multiblock (vtkMultiBlockDataSet): Dataset obtained from reading a .vtm file from GEOS output simulation. + The tree hierarchy must look like this: + + + + + + + ... + + + ... + + + ... + + + + + + + + Returns: + int: The number of cells when combining all cell blocks. + """ + try: + cell_element_region: vtkMultiBlockDataSet = multiblock.GetBlock( 0 ).GetBlock( 0 ).GetBlock( 0 ) + assert cell_element_region.IsA( "vtkMultiBlockDataSet" ) + except AssertionError: + raise ValueError( "The multiblock provided from a .vtm is not of the expected GEOS format." ) + total_number_cells: int = 0 + for region_id in range( cell_element_region.GetNumberOfBlocks() ): + region: vtkMultiBlockDataSet = cell_element_region.GetBlock( region_id ) + for rank_id in range( region.GetNumberOfBlocks() ): + rank: vtkUnstructuredGrid = region.GetBlock( rank_id ) + total_number_cells += rank.GetNumberOfCells() + return total_number_cells def __write_vtk( mesh: vtkUnstructuredGrid, output: str ) -> int: @@ -150,28 +265,7 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: success_code = __write_vtu( mesh, vtk_output.output, vtk_output.is_data_mode_binary ) else: # No writer found did work. Dying. - logging.critical( f"Could not find the appropriate VTK writer for extension \"{file_extension}\". Dying..." ) - sys.exit( 1 ) - return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. - - -def vtkid_to_string( id: int ) -> str: - choices: dict[ int, str ] = { - 1: 'Vertex', - 3: 'Line', - 5: 'Triangle', - 7: 'Polygon', - 8: 'Pixel', - 9: 'Quad', - 10: 'Tetra', - 11: 'Voxel', - 12: 'Hex', - 13: 'Wedge', - 14: 'Pyramid', - 15: 'Pentagonal prism', - 16: 'Hexagonal Prism' - } - if id in choices: - return choices[ id ] - else: - return 'Unknown type' + err_msg = f"Could not find the appropriate VTK writer for extension \"{file_extension}\"." + logging.error( err_msg ) + raise ValueError( err_msg ) + return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. \ No newline at end of file From a613e08d7cb9a691525f35de74b4715963eaa2f9 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Mon, 18 Nov 2024 11:20:27 -0800 Subject: [PATCH 20/34] First version working with "cell" as support --- .../mesh/doctor/checks/field_operations.py | 191 ++++++++++++++++-- .../parsing/field_operations_parsing.py | 18 +- 2 files changed, 184 insertions(+), 25 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index 65a62a1..0f44b67 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -1,12 +1,16 @@ import logging +import numpy as np from dataclasses import dataclass from math import sqrt from numpy import empty from numpy.random import rand - +from scipy.spatial import KDTree from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy +from vtkmodules.vtkFiltersCore import vtkCellCenters from vtkmodules.vtkCommonCore import vtkDoubleArray -from geos.mesh.doctor.checks.vtk_utils import VtkOutput, read_mesh, write_mesh +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid +from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, read_mesh, write_mesh, get_vtm_filepath_from_pvd, + get_vtu_filepaths_from_vtm ) @dataclass( frozen=True ) @@ -23,6 +27,68 @@ class Options: class Result: info: bool +__SUPPORT_CHOICES = [ "point", "cell" ] +__OPERATION_CHOICES = [ "transfer" ] + + +def get_vtu_filepaths( options: Options ) -> tuple[ str ]: + """Returns the vtu filepaths to use for the rest of the workflow. + + Args: + options (Options): Options chosen by the user. + + Returns: + tuple[ str ]: ( "file/path/0.vtu", ..., "file/path/N.vtu" ) + """ + source_filepath: str = options.source + if source_filepath.endswith( ".vtu" ): + return ( source_filepath ) + elif source_filepath.endswith( ".vtm" ): + return get_vtu_filepaths_from_vtm( source_filepath ) + elif source_filepath.endswith( ".pvd" ): + vtm_filepath: str = get_vtm_filepath_from_pvd( source_filepath, options.vtm_index ) + return get_vtu_filepaths_from_vtm( vtm_filepath ) + else: + raise ValueError( f"The filepath '{options.source}' provided targets neither a .vtu, a .vtm nor a .pvd file." ) + + +def build_cell_centers_array( mesh: vtkUnstructuredGrid ) -> np.array: + """Builds an array containing the cell centers coordinates for every cell of a mesh. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + + Returns: + np.array: Shape=( 3, mesh number of cells ) + """ + cell_centers_filter: vtkCellCenters = vtkCellCenters() + cell_centers_filter.SetInputData( mesh ) + cell_centers_filter.Update() + cell_centers = cell_centers_filter.GetOutput() + points = cell_centers.GetPoints() + num_points: int = points.GetNumberOfPoints() + cell_centers_array: np.array = np.array( [ points.GetPoint( i ) for i in range( num_points ) ], dtype=float ) + return cell_centers_array + + +def get_cells_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid ) -> np.array: + """Builds an array containing the indexes of the reference grid linked to every cell ids of the subset grid. + + Args: + kd_tree_grid_ref (KDTree): A KDTree of the nearest neighbor cell centers for every cells of the reference grid. + sub_grid (vtkUnstructuredGrid): A vtk grid that is a subset of the reference grid. + + Returns: + np.array: [ cell_idK_grid, cell_idN_grid, ... ] + """ + cell_centers: np.array = build_cell_centers_array( sub_grid ) + number_cells: int = sub_grid.GetNumberOfCells() + mapping: np.array = np.empty( number_cells, dtype=np.int64 ) + for cell_id in range( number_cells ): + _, index = kd_tree_grid_ref.query( cell_centers[ cell_id ] ) + mapping[ cell_id ] = index + return mapping + def __analytic_field( mesh, support, name ) -> bool: if support == 'node': @@ -118,20 +184,115 @@ def __transfer_field( mesh, support, field_name, source ) -> bool: return True -def __check( mesh, options: Options ) -> Result: - if options.source == 'function': - succ = __analytic_field( mesh, options.support, options.field_name ) - if succ: - write_mesh( mesh, options.out_vtk ) - elif ( options.source[ -4: ] == '.vtu' or options.source[ -4: ] == '.vtk' ): - succ = __transfer_field( mesh, options.support, options.field_name, options.source ) - if succ: - write_mesh( mesh, options.out_vtk ) +def perform_operation_on_array( global_array: np.array, + local_array: np.array, + mapping: np.array, + options: Options ) -> None: + """Perform an operation that will fill the values of a global_array using the values contained in a local_array + that is smaller or equal to the size as the global_array. A mapping is used to copy the values from the + local_array to the right indexes in the global_array. + + Args: + global_array (np.array): Array of size N. + local_array (np.array): Array of size M <= N that is representing a subset of the global_array. + mapping (np.array): Array of global indexes of size M. + options (Options): Options chosen by the user. + """ + size_global, size_local = global_array.shape, local_array.shape + assert size_global[ 0 ] >= size_local[ 0 ], "The array to fill is smaller than the array to use." + number_columns_global: int = size_global[ 1 ] if len( size_global ) == 2 else 1 + number_columns_local: int = size_local[ 1 ] if len( size_local ) == 2 else 1 + assert number_columns_global == number_columns_local, "The arrays do not have same number of columns." + if options.operation == __OPERATION_CHOICES[ 0 ]: # transfer + if len(size_local) == 1: + local_array = local_array.reshape(-1, 1) + global_array[ mapping ] = local_array else: - logging.error( 'incorrect source option. Options are function, *.vtu, *.vtk.' ) - succ = False - return Result( info=succ ) - #TODO: Better exception handle + raise ValueError( f"Cannot perform operation '{options.operation}'. Only operations are {__OPERATION_CHOICES}" ) + + +def implement_arrays( grid_ref: vtkUnstructuredGrid, global_arrays: dict[ str, np.array ], options: Options ) -> None: + """Implement the arrays that are contained in global_arrays into the Data of a grid_ref. + + Args: + grid_ref (vtkUnstructuredGrid): A vtk grid. + global_arrays (dict[ str, np.array ]): { "array_name0": np.array, ..., "array_nameN": np.array } + options (Options): Options chosen by the user. + """ + if options.support == __SUPPORT_CHOICES[ 0 ]: + data = grid_ref.GetPointData() + number_elements: int = grid_ref.GetNumberOfPoints() + elif options.support == __SUPPORT_CHOICES[ 1 ]: + data = grid_ref.GetCellData() + number_elements = grid_ref.GetNumberOfCells() + else: + raise ValueError( f"Support choices should be one of these: {__SUPPORT_CHOICES}." ) + + for name, array in global_arrays.items(): + dimension: int = array.shape[ 1 ] if len( array.shape ) == 2 else 1 + if dimension > 1: # Reshape the VTK array to match the original dimensions + vtk_array = numpy_to_vtk( array.flatten() ) + vtk_array.SetNumberOfComponents( dimension ) + vtk_array.SetNumberOfTuples( number_elements ) + else: + vtk_array = numpy_to_vtk( array ) + vtk_array.SetName( name ) + if options.operation == __OPERATION_CHOICES[ 0 ]: # transfer + data.AddArray( vtk_array ) + + +def __check_on_points( grid_ref: vtkUnstructuredGrid, vtu_filepaths: tuple[ str ], options: Options ) -> None: + # First part needs to create the empty arrays for each field name that we want to operate on. + # The goal is then to fill them with the actual values from the different sub vtus on which we apply the operation. + ... + + +def __check_on_cells( grid_ref: vtkUnstructuredGrid, vtu_filepaths: tuple[ str ], options: Options ) -> None: + # First part needs to create the empty arrays for each field name that we want to operate on. + # The goal is then to fill them with the actual values from the different sub vtus on which we apply the operation. + global_arrays: dict[ str, np.array ] = dict() + cell_centers_ref: np.array = build_cell_centers_array( grid_ref ) + kd_tree_ref: KDTree = KDTree( cell_centers_ref ) + for vtu_id in range( len( vtu_filepaths ) ): + sub_grid: vtkUnstructuredGrid = read_mesh( vtu_filepaths[ vtu_id ] ) + sub_data = sub_grid.GetCellData() + # We need to make sure that the arrays we are looking at exist in the sub grid + arrays_available: list[ str ] = [ sub_data.GetArrayName( i ) for i in range( sub_data.GetNumberOfArrays() ) ] + to_operate_on_indexes: list[ int ] = list() + for name in options.field_names: + if name not in arrays_available: + logging.warning( f"The field named '{name}' does not exist in '{vtu_filepaths[ vtu_id ]}'." + + " Cannot perform operation on it. Default values set to NaN." ) + else: + array_index: int = arrays_available.index( name ) + to_operate_on_indexes.append( array_index ) + if not name in global_arrays: + dimension: int = sub_data.GetArray( array_index ).GetNumberOfComponents() + global_arrays[ name ] = np.full( ( grid_ref.GetNumberOfCells(), dimension ), np.nan ) + # If the arrays exist, we can perform the operation and fill the empty arrays + if len( to_operate_on_indexes ) > 0: + mapping: np.array = get_cells_reorder_mapping( kd_tree_ref, sub_grid ) + for index in to_operate_on_indexes: + name = arrays_available[ index ] + sub_array: np.array = vtk_to_numpy( sub_data.GetArray( index ) ) + perform_operation_on_array( global_arrays[ name ], sub_array, mapping, options ) + # the global arrays have been filled, so now we need to implement them in the grid_ref + implement_arrays( grid_ref, global_arrays, options ) + + +def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: + all_sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) + # create the output grid + output_mesh: vtkUnstructuredGrid = grid_ref.NewInstance() + output_mesh.CopyStructure( grid_ref ) + output_mesh.CopyAttributes( grid_ref ) + # perform operations on the grid + if options.support == __SUPPORT_CHOICES[ 0 ]: + __check_on_points( output_mesh, all_sub_vtu_filepaths, options ) + elif options.support == __SUPPORT_CHOICES[ 1 ]: + __check_on_cells( output_mesh, all_sub_vtu_filepaths, options ) + write_mesh( output_mesh, options.out_vtk ) + return Result( info="OK" ) def check( vtk_input_file: str, options: Options ) -> Result: diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index fb80671..780ddb3 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -1,14 +1,12 @@ import logging -from geos.mesh.doctor.checks.field_operations import Options, Result +from geos.mesh.doctor.checks.field_operations import Options, Result, __SUPPORT_CHOICES, __OPERATION_CHOICES from geos.mesh.doctor.parsing import vtk_output_parsing, FIELD_OPERATIONS __OPERATION = "operation" -__OPERATION_CHOICES = [ "transfer" ] __SUPPORT = "support" -__SUPPORT_CHOICES = [ "point", "cell" ] -__NAME = "name" +__FIELD_NAMES = "field_names" __SOURCE = "source" __WHICH_VTM = "which_vtm" @@ -30,10 +28,10 @@ def fill_subparser( subparsers ) -> None: metavar=", ".join( map( str, __SUPPORT_CHOICES ) ), default=__SUPPORT_CHOICES[ 0 ], help="[string]: Where to define field." ) - p.add_argument( '--' + __NAME, + p.add_argument( '--' + __FIELD_NAMES, type=str, required=True, - help="[list of string comma separated]: Name of the field(s) to manipulate." ) + help="[list of string comma separated]: Name of each field to use for the operation." ) p.add_argument( '--' + __SOURCE, type=str, required=True, @@ -51,15 +49,15 @@ def fill_subparser( subparsers ) -> None: def convert( parsed_options ) -> Options: operation: str = parsed_options[ __OPERATION ] - if support not in __OPERATION_CHOICES: + if operation not in __OPERATION_CHOICES: raise ValueError( f"For --{__OPERATION}, the only choices available are {__OPERATION_CHOICES}." ) support: str = parsed_options[ __SUPPORT ] if support not in __SUPPORT_CHOICES: raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) - field_names: list[ str ] = list( map( int, parsed_options[ __NAME ].split( "," ) ) ) + field_names: list[ str ] = list( map( str, parsed_options[ __FIELD_NAMES ].split( "," ) ) ) which_vtm: str = parsed_options[ __WHICH_VTM ] if which_vtm in __WHICH_VTM_SUGGESTIONS: - vtm_index: int = 0 if __WHICH_VTM_SUGGESTIONS[ 0 ] else -1 + vtm_index: int = 0 if which_vtm == __WHICH_VTM_SUGGESTIONS[ 0 ] else -1 else: try: vtm_index = int( which_vtm ) @@ -75,5 +73,5 @@ def convert( parsed_options ) -> Options: def display_results( options: Options, result: Result ): - if result.info != True: + if result.info != "OK": logging.error( f"Field addition failed" ) From e863fd15dcde56522fdfbbf82f102949dbf2eaea Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Tue, 19 Nov 2024 16:49:53 -0800 Subject: [PATCH 21/34] Added points as support --- .../mesh/doctor/checks/field_operations.py | 144 ++++++++---------- .../src/geos/mesh/doctor/checks/vtk_utils.py | 36 ++++- 2 files changed, 96 insertions(+), 84 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index 0f44b67..24c8d08 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -1,16 +1,15 @@ import logging -import numpy as np from dataclasses import dataclass from math import sqrt -from numpy import empty +from numpy import array, empty, full, int64, nan from numpy.random import rand from scipy.spatial import KDTree from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy -from vtkmodules.vtkFiltersCore import vtkCellCenters from vtkmodules.vtkCommonCore import vtkDoubleArray from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid -from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, read_mesh, write_mesh, get_vtm_filepath_from_pvd, - get_vtu_filepaths_from_vtm ) +from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, + get_vtm_filepath_from_pvd, get_vtu_filepaths_from_vtm, read_mesh, + write_mesh ) @dataclass( frozen=True ) @@ -27,8 +26,15 @@ class Options: class Result: info: bool -__SUPPORT_CHOICES = [ "point", "cell" ] + __OPERATION_CHOICES = [ "transfer" ] +__SUPPORT_CHOICES = [ "point", "cell" ] + + +support_construction: dict[ str, tuple[ any ] ] = { + __SUPPORT_CHOICES[ 0 ]: get_points_coords_from_vtk, + __SUPPORT_CHOICES[ 1 ]: get_cell_centers_array +} def get_vtu_filepaths( options: Options ) -> tuple[ str ]: @@ -52,40 +58,26 @@ def get_vtu_filepaths( options: Options ) -> tuple[ str ]: raise ValueError( f"The filepath '{options.source}' provided targets neither a .vtu, a .vtm nor a .pvd file." ) -def build_cell_centers_array( mesh: vtkUnstructuredGrid ) -> np.array: - """Builds an array containing the cell centers coordinates for every cell of a mesh. +def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid, options: Options ) -> array: + """Builds an array containing the indexes of the reference grid linked to every + cell ids / point ids of the subset grid. Args: - mesh (vtkUnstructuredGrid): A vtk grid. - - Returns: - np.array: Shape=( 3, mesh number of cells ) - """ - cell_centers_filter: vtkCellCenters = vtkCellCenters() - cell_centers_filter.SetInputData( mesh ) - cell_centers_filter.Update() - cell_centers = cell_centers_filter.GetOutput() - points = cell_centers.GetPoints() - num_points: int = points.GetNumberOfPoints() - cell_centers_array: np.array = np.array( [ points.GetPoint( i ) for i in range( num_points ) ], dtype=float ) - return cell_centers_array - - -def get_cells_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid ) -> np.array: - """Builds an array containing the indexes of the reference grid linked to every cell ids of the subset grid. - - Args: - kd_tree_grid_ref (KDTree): A KDTree of the nearest neighbor cell centers for every cells of the reference grid. + kd_tree_grid_ref (KDTree): A KDTree of the nearest neighbor cell centers for every cells / + points coordinates for point of the reference grid. sub_grid (vtkUnstructuredGrid): A vtk grid that is a subset of the reference grid. Returns: - np.array: [ cell_idK_grid, cell_idN_grid, ... ] + np.array: [ cell_idK_grid, cell_idN_grid, ... ] or [ point_idK_grid, point_idN_grid, ... ] """ - cell_centers: np.array = build_cell_centers_array( sub_grid ) - number_cells: int = sub_grid.GetNumberOfCells() - mapping: np.array = np.empty( number_cells, dtype=np.int64 ) - for cell_id in range( number_cells ): - _, index = kd_tree_grid_ref.query( cell_centers[ cell_id ] ) + if options.support not in __SUPPORT_CHOICES: + raise ValueError( f"Support option should be between {__SUPPORT_CHOICES}, not {options.support}." ) + support_elements: array = support_construction[ options.support ]( sub_grid ) + # now that you have the support elements, you can map them to the reference grid + number_elements: int = support_elements.shape[ 0 ] + mapping: array = empty( number_elements, dtype=int64 ) + for cell_id in range( number_elements ): + _, index = kd_tree_grid_ref.query( support_elements[ cell_id ] ) mapping[ cell_id ] = index return mapping @@ -184,10 +176,7 @@ def __transfer_field( mesh, support, field_name, source ) -> bool: return True -def perform_operation_on_array( global_array: np.array, - local_array: np.array, - mapping: np.array, - options: Options ) -> None: +def perform_operation_on_array( global_array: array, local_array: array, mapping: array, options: Options ) -> None: """Perform an operation that will fill the values of a global_array using the values contained in a local_array that is smaller or equal to the size as the global_array. A mapping is used to copy the values from the local_array to the right indexes in the global_array. @@ -205,13 +194,13 @@ def perform_operation_on_array( global_array: np.array, assert number_columns_global == number_columns_local, "The arrays do not have same number of columns." if options.operation == __OPERATION_CHOICES[ 0 ]: # transfer if len(size_local) == 1: - local_array = local_array.reshape(-1, 1) + local_array = local_array.reshape( -1, 1 ) global_array[ mapping ] = local_array else: raise ValueError( f"Cannot perform operation '{options.operation}'. Only operations are {__OPERATION_CHOICES}" ) -def implement_arrays( grid_ref: vtkUnstructuredGrid, global_arrays: dict[ str, np.array ], options: Options ) -> None: +def implement_arrays( grid_ref: vtkUnstructuredGrid, global_arrays: dict[ str, array ], options: Options ) -> None: """Implement the arrays that are contained in global_arrays into the Data of a grid_ref. Args: @@ -219,15 +208,12 @@ def implement_arrays( grid_ref: vtkUnstructuredGrid, global_arrays: dict[ str, n global_arrays (dict[ str, np.array ]): { "array_name0": np.array, ..., "array_nameN": np.array } options (Options): Options chosen by the user. """ - if options.support == __SUPPORT_CHOICES[ 0 ]: - data = grid_ref.GetPointData() - number_elements: int = grid_ref.GetNumberOfPoints() - elif options.support == __SUPPORT_CHOICES[ 1 ]: - data = grid_ref.GetCellData() - number_elements = grid_ref.GetNumberOfCells() - else: + if options.support not in __SUPPORT_CHOICES: raise ValueError( f"Support choices should be one of these: {__SUPPORT_CHOICES}." ) - + data = grid_ref.GetPointData() if options.support == __SUPPORT_CHOICES[ 0 ] else grid_ref.GetCellData() + number_elements: int = grid_ref.GetNumberOfPoints() if options.support == __SUPPORT_CHOICES[ 0 ] else \ + grid_ref.GetNumberOfCells() + # once the data is selected, we can implement the global arrays inside it for name, array in global_arrays.items(): dimension: int = array.shape[ 1 ] if len( array.shape ) == 2 else 1 if dimension > 1: # Reshape the VTK array to match the original dimensions @@ -241,56 +227,48 @@ def implement_arrays( grid_ref: vtkUnstructuredGrid, global_arrays: dict[ str, n data.AddArray( vtk_array ) -def __check_on_points( grid_ref: vtkUnstructuredGrid, vtu_filepaths: tuple[ str ], options: Options ) -> None: - # First part needs to create the empty arrays for each field name that we want to operate on. - # The goal is then to fill them with the actual values from the different sub vtus on which we apply the operation. - ... - - -def __check_on_cells( grid_ref: vtkUnstructuredGrid, vtu_filepaths: tuple[ str ], options: Options ) -> None: - # First part needs to create the empty arrays for each field name that we want to operate on. - # The goal is then to fill them with the actual values from the different sub vtus on which we apply the operation. - global_arrays: dict[ str, np.array ] = dict() - cell_centers_ref: np.array = build_cell_centers_array( grid_ref ) - kd_tree_ref: KDTree = KDTree( cell_centers_ref ) - for vtu_id in range( len( vtu_filepaths ) ): - sub_grid: vtkUnstructuredGrid = read_mesh( vtu_filepaths[ vtu_id ] ) - sub_data = sub_grid.GetCellData() +def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: + # create the output grid + output_mesh: vtkUnstructuredGrid = grid_ref.NewInstance() + output_mesh.CopyStructure( grid_ref ) + output_mesh.CopyAttributes( grid_ref ) + # find the support elements to use and construct their KDTree + if options.support not in __SUPPORT_CHOICES: + raise ValueError( f"Support option should be between {__SUPPORT_CHOICES}, not {options.support}." ) + support_elements: array = support_construction[ options.support ]( output_mesh ) + size_support: int = support_elements.shape[ 0 ] + kd_tree_ref: KDTree = KDTree( support_elements ) + # perform operations to construct the global arrays to implement in the output mesh + global_arrays: dict[ str, array ] = dict() + sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) + for vtu_id in range( len( sub_vtu_filepaths ) ): + sub_grid: vtkUnstructuredGrid = read_mesh( sub_vtu_filepaths[ vtu_id ] ) + if options.support == __SUPPORT_CHOICES[ 0 ]: + sub_data = sub_grid.GetPointData() + else: + sub_data = sub_grid.GetCellData() # We need to make sure that the arrays we are looking at exist in the sub grid arrays_available: list[ str ] = [ sub_data.GetArrayName( i ) for i in range( sub_data.GetNumberOfArrays() ) ] to_operate_on_indexes: list[ int ] = list() for name in options.field_names: if name not in arrays_available: - logging.warning( f"The field named '{name}' does not exist in '{vtu_filepaths[ vtu_id ]}'." + - " Cannot perform operation on it. Default values set to NaN." ) + logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepaths[ vtu_id ]}'" + + " in the data. Cannot perform operation on it. Default values set to NaN." ) else: array_index: int = arrays_available.index( name ) to_operate_on_indexes.append( array_index ) if not name in global_arrays: dimension: int = sub_data.GetArray( array_index ).GetNumberOfComponents() - global_arrays[ name ] = np.full( ( grid_ref.GetNumberOfCells(), dimension ), np.nan ) + global_arrays[ name ] = full( ( size_support, dimension ), nan ) # If the arrays exist, we can perform the operation and fill the empty arrays if len( to_operate_on_indexes ) > 0: - mapping: np.array = get_cells_reorder_mapping( kd_tree_ref, sub_grid ) + mapping: array = get_reorder_mapping( kd_tree_ref, sub_grid, options ) for index in to_operate_on_indexes: name = arrays_available[ index ] - sub_array: np.array = vtk_to_numpy( sub_data.GetArray( index ) ) + sub_array: array = vtk_to_numpy( sub_data.GetArray( index ) ) perform_operation_on_array( global_arrays[ name ], sub_array, mapping, options ) - # the global arrays have been filled, so now we need to implement them in the grid_ref - implement_arrays( grid_ref, global_arrays, options ) - - -def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: - all_sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) - # create the output grid - output_mesh: vtkUnstructuredGrid = grid_ref.NewInstance() - output_mesh.CopyStructure( grid_ref ) - output_mesh.CopyAttributes( grid_ref ) - # perform operations on the grid - if options.support == __SUPPORT_CHOICES[ 0 ]: - __check_on_points( output_mesh, all_sub_vtu_filepaths, options ) - elif options.support == __SUPPORT_CHOICES[ 1 ]: - __check_on_cells( output_mesh, all_sub_vtu_filepaths, options ) + # The global arrays have been filled, so now we need to implement them in the output_mesh + implement_arrays( output_mesh, global_arrays, options ) write_mesh( output_mesh, options.out_vtk ) return Result( info="OK" ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index 40b04d8..067ae6a 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -2,9 +2,11 @@ import os.path import xml.etree.ElementTree as ET from dataclasses import dataclass +from numpy import array from typing import Iterator, Optional +from vtkmodules.vtkFiltersCore import vtkCellCenters from vtkmodules.vtkCommonCore import vtkIdList -from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkMultiBlockDataSet +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkMultiBlockDataSet, vtkPolyData from vtkmodules.vtkIOLegacy import vtkUnstructuredGridWriter, vtkUnstructuredGridReader from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader, vtkXMLUnstructuredGridWriter, vtkXMLMultiBlockDataReader @@ -69,6 +71,38 @@ def has_invalid_field( mesh: vtkUnstructuredGrid, invalid_fields: list[ str ] ) return False +def get_points_coords_from_vtk( data: vtkPolyData ) -> array: + """Extracts the coordinates of every point from a vtkPolyData and returns them in a numpy array. + + Args: + data (vtkPolyData): vtkPolyData object. + + Returns: + array: Numpy array of shape( number_of_points, 3 ) + """ + points = data.GetPoints() + num_points: int = points.GetNumberOfPoints() + points_coords: array = array( [ points.GetPoint( i ) for i in range( num_points ) ], dtype=float ) + return points_coords + + +def get_cell_centers_array( mesh: vtkUnstructuredGrid ) -> array: + """Returns an array containing the cell centers coordinates for every cell of a mesh. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + + Returns: + np.array: Shape=( mesh number of cells, 3 ) + """ + cell_centers_filter: vtkCellCenters = vtkCellCenters() + cell_centers_filter.SetInputData( mesh ) + cell_centers_filter.Update() + cell_centers = cell_centers_filter.GetOutput() + cell_centers_array: array = get_points_coords_from_vtk( cell_centers ) + return cell_centers_array + + def __read_vtk( vtk_input_file: str ) -> Optional[ vtkUnstructuredGrid ]: reader = vtkUnstructuredGridReader() logging.info( f"Testing file format \"{vtk_input_file}\" using legacy format reader..." ) From 7de5d874e79b8f3b00a46bdacffc5fb2382f5686 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 27 Nov 2024 12:08:12 -0800 Subject: [PATCH 22/34] Tests of vtk_utils added --- .../src/geos/mesh/doctor/checks/vtk_utils.py | 59 +-- geos-mesh/tests/test_vtu_utils.py | 367 ++++++++++++++++++ 2 files changed, 381 insertions(+), 45 deletions(-) create mode 100644 geos-mesh/tests/test_vtu_utils.py diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index 067ae6a..dccde55 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -8,7 +8,8 @@ from vtkmodules.vtkCommonCore import vtkIdList from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkMultiBlockDataSet, vtkPolyData from vtkmodules.vtkIOLegacy import vtkUnstructuredGridWriter, vtkUnstructuredGridReader -from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader, vtkXMLUnstructuredGridWriter, vtkXMLMultiBlockDataReader +from vtkmodules.vtkIOXML import ( vtkXMLUnstructuredGridReader, vtkXMLUnstructuredGridWriter, + vtkXMLMultiBlockDataReader, vtkXMLMultiBlockDataWriter ) @dataclass( frozen=True ) @@ -221,49 +222,6 @@ def get_vtu_filepaths_from_vtm( vtm_filepath: str ) -> tuple[ str ]: return tuple( vtu_filepaths ) # to lock the order of the vtus like in the vtm -def get_number_of_cells_vtm_multiblock( multiblock: vtkMultiBlockDataSet ) -> int: - """Counts the total number of cells that are part of a vtkMultiBlockDataSet from a .vtm produced by GEOS. - - Args: - multiblock (vtkMultiBlockDataSet): Dataset obtained from reading a .vtm file from GEOS output simulation. - The tree hierarchy must look like this: - - - - - - - ... - - - ... - - - ... - - - - - - - - Returns: - int: The number of cells when combining all cell blocks. - """ - try: - cell_element_region: vtkMultiBlockDataSet = multiblock.GetBlock( 0 ).GetBlock( 0 ).GetBlock( 0 ) - assert cell_element_region.IsA( "vtkMultiBlockDataSet" ) - except AssertionError: - raise ValueError( "The multiblock provided from a .vtm is not of the expected GEOS format." ) - total_number_cells: int = 0 - for region_id in range( cell_element_region.GetNumberOfBlocks() ): - region: vtkMultiBlockDataSet = cell_element_region.GetBlock( region_id ) - for rank_id in range( region.GetNumberOfBlocks() ): - rank: vtkUnstructuredGrid = region.GetBlock( rank_id ) - total_number_cells += rank.GetNumberOfCells() - return total_number_cells - - def __write_vtk( mesh: vtkUnstructuredGrid, output: str ) -> int: logging.info( f"Writing mesh into file \"{output}\" using legacy format." ) writer = vtkUnstructuredGridWriter() @@ -302,4 +260,15 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: err_msg = f"Could not find the appropriate VTK writer for extension \"{file_extension}\"." logging.error( err_msg ) raise ValueError( err_msg ) - return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. \ No newline at end of file + return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. + + +def write_VTM( multiblock: vtkMultiBlockDataSet, vtk_output: VtkOutput ) -> int: + if os.path.exists( vtk_output.output ): + logging.error( f"File \"{vtk_output.output}\" already exists, nothing done." ) + return 1 + writer = vtkXMLMultiBlockDataWriter() + writer.SetFileName( vtk_output.output ) + writer.SetInputData( multiblock ) + writer.SetDataModeToBinary() if vtk_output.is_data_mode_binary else writer.SetDataModeToAscii() + writer.Write() diff --git a/geos-mesh/tests/test_vtu_utils.py b/geos-mesh/tests/test_vtu_utils.py new file mode 100644 index 0000000..eec6516 --- /dev/null +++ b/geos-mesh/tests/test_vtu_utils.py @@ -0,0 +1,367 @@ +import glob +import os +import shutil +import xml.etree.ElementTree as ET +from geos.mesh.doctor.checks import vtk_utils as vu +from numpy import array, ones, array_equal +from vtkmodules.util.numpy_support import numpy_to_vtk +from vtkmodules.vtkCommonCore import vtkPoints +from vtkmodules.vtkCommonDataModel import ( vtkMultiBlockDataSet, vtkUnstructuredGrid, vtkCellArray, vtkHexahedron, + vtkCompositeDataSet, VTK_HEXAHEDRON ) + + +""" +For creation of output test meshes +""" +current_file_path: str = __file__ +dir_name: str = os.path.dirname( current_file_path ) +pattern_test: str = "to_check_mesh" +filepath_mesh_for_stats: str = os.path.join( dir_name, pattern_test + ".vtu" ) +test_mesh_for_stats: vu.VtkOutput = vu.VtkOutput( filepath_mesh_for_stats, True ) +geos_hierarchy: str = "mesh/Level0" + + +""" +Utility functions for tests +""" +def split_list( initial_list: list[ any ], number_sub_lists: int ) -> list[ list[ any ] ]: + initial_len: int = len( initial_list ) + assert number_sub_lists <= initial_len + average: int = initial_len // number_sub_lists + remainder: int = initial_len % number_sub_lists + new_lists: list = list() + start: int = 0 + for i in range( initial_len ): + end: int = start + average + ( 1 if i < remainder else 0 ) + new_lists.append( initial_list[ start:end ] ) + start = end + return new_lists + + +def create_vtk_points( point_3D_coords: list[ list[ float ] ] ) -> vtkPoints: + points: vtkPoints = vtkPoints() + for coord in point_3D_coords: + points.InsertNextPoint( coord ) + return points + + +def create_vtk_hexahedron( point_ids: list[ int ] ) -> vtkHexahedron: + hex: vtkHexahedron = vtkHexahedron() + for i, point_id in enumerate( point_ids ): + hex.GetPointIds().SetId( i, point_id ) + return hex + + +def create_type_vtk_grid( point_3D_coords: list[ list[ float ] ], + all_point_ids: list[ list[ int ] ], + vtk_type: int ) -> vtkUnstructuredGrid: + points: vtkPoints = create_vtk_points( point_3D_coords ) + cells: vtkCellArray = vtkCellArray() + for point_ids in all_point_ids: + cells.InsertNextCell( create_vtk_hexahedron( point_ids ) ) + grid: vtkUnstructuredGrid = vtkUnstructuredGrid() + grid.SetPoints( points ) + grid.SetCells( vtk_type, cells ) + return grid + + +def create_geos_pvd( all_grids_per_vtm: dict[ str, dict[ str, list[ vtkUnstructuredGrid ] ] ], + pvd_dir_path: str ) -> str: + # Create the .pvd + os.makedirs( pvd_dir_path, exist_ok=True ) + pvd_name = os.path.basename( pvd_dir_path ) + root_pvd = ET.Element( "VTKFile", type="Collection", version="1.0" ) + collection = ET.SubElement( root_pvd, "Collection" ) + + for timestep, regions_with_grids in all_grids_per_vtm.items(): + vtm_directory: str = os.path.join( pvd_dir_path, timestep ) + os.makedirs( vtm_directory, exist_ok=True ) + vtm_sub_path: str = os.path.join( pvd_name, timestep + ".vtm" ) + ET.SubElement( collection, "DataSet", timestep=timestep, file=vtm_sub_path ) + + # Create the .vtm file respecting GEOS format + root_vtm = ET.Element( "VTKFile", type="vtkMultiBlockDataSet", version="1.0" ) + vtm = ET.SubElement( root_vtm, "vtkMultiBlockDataSet" ) + mesh_block = ET.SubElement( vtm, "Block", name="mesh" ) + level0_block = ET.SubElement( mesh_block, "Block", name="Level0" ) + cell_element_region_block = ET.SubElement( level0_block, "Block", name="CellElementRegion" ) + + for region, grids in regions_with_grids.items(): + region_directory: str = os.path.join( vtm_directory, geos_hierarchy, region ) + os.makedirs( region_directory, exist_ok=True ) + + # Create block element for regions + region_block = ET.SubElement( cell_element_region_block, "Block", name=region ) + for i, grid in enumerate( grids ): + rank_name: str = "rank_0" + str( i ) + vtu_name: str = rank_name + ".vtu" + path_from_vtm: str = os.path.join( timestep, geos_hierarchy, vtu_name ) + ET.SubElement( region_block, "DataSet", name=rank_name, file=path_from_vtm ) + vtu_filepath: str = os.path.join( region_directory, vtu_name ) + output_vtu: vu.VtkOutput = vu.VtkOutput( vtu_filepath, False ) + vu.write_mesh( grid, output_vtu ) + + # write the vtm for each timestep + vtm_filepath: str = os.path.join( pvd_directory, timestep + ".vtm" ) + tree_vtm = ET.ElementTree( root_vtm ) + tree_vtm.write( vtm_filepath, encoding='utf-8', xml_declaration=True ) + + # write the pvd file link to the vtms written before + tree_pvd = ET.ElementTree( root_pvd ) + pvd_filepath: str = os.path.join( os.path.dirname( pvd_dir_path ), pvd_name + ".pvd" ) + tree_pvd.write( pvd_filepath, encoding='utf-8', xml_declaration=True ) + + return pvd_filepath + + +""" +Grids to perform tests on. +""" +# 4 Hexahedrons +four_hex_ids: list[ list[ int ] ] = [ [ 0, 1, 4, 3, 6, 7, 10, 9 ], + [ 1, 2, 5, 4, 7, 8, 11, 10 ], + [ 6, 7, 10, 9, 12, 13, 16, 15 ], + [ 7, 8, 11, 10, 13, 14, 17, 16 ] ] + +four_hexs_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], # point0 + [ 1.0, 0.0, 0.0 ], # point1 + [ 2.0, 0.0, 0.0 ], # point2 + [ 0.0, 1.0, 0.0 ], # point3 + [ 1.0, 1.0, 0.0 ], # point4 + [ 2.0, 1.0, 0.0 ], # point5 + [ 0.0, 0.0, 1.0 ], # point6 + [ 1.0, 0.0, 1.0 ], # point7 + [ 2.0, 0.0, 1.0 ], # point8 + [ 0.0, 1.0, 1.0 ], # point9 + [ 1.0, 1.0, 1.0 ], # point10 + [ 2.0, 1.0, 1.0 ], # point11 + [ 0.0, 0.0, 2.0 ], # point12 + [ 1.0, 0.0, 2.0 ], # point13 + [ 2.0, 0.0, 2.0 ], # point14 + [ 0.0, 1.0, 2.0 ], # point15 + [ 1.0, 1.0, 2.0 ], # point16 + [ 2.0, 1.0, 2.0 ] ] # point17 +# Create grid +four_hex_grid: vtkUnstructuredGrid = create_type_vtk_grid( four_hexs_points_coords, four_hex_ids, VTK_HEXAHEDRON ) +# Create output paths +path_four_hex_vtu: str = os.path.join( dir_name, "4_hex.vtu" ) +path_four_hex_vtk: str = os.path.join( dir_name, "4_hex.vtk" ) +output_four_hex_vtu: vu.VtkOutput = vu.VtkOutput( path_four_hex_vtu, False ) +output_four_hex_vtk: vu.VtkOutput = vu.VtkOutput( path_four_hex_vtk, False ) + + +# 8 Hexahedrons divided in 2 regions and for each region into 2 ranks +two_hex_ids = [ [ 0, 1, 4, 3, 6, 7, 10, 9 ], + [ 1, 2, 5, 4, 7, 8, 11, 10 ] ] +## GRID 1 +two_hex1_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], # point0 + [ 1.0, 0.0, 0.0 ], # point1 + [ 2.0, 0.0, 0.0 ], # point2 + [ 0.0, 1.0, 0.0 ], # point3 + [ 1.0, 1.0, 0.0 ], # point4 + [ 2.0, 1.0, 0.0 ], # point5 + [ 0.0, 0.0, 1.0 ], # point6 + [ 1.0, 0.0, 1.0 ], # point7 + [ 2.0, 0.0, 1.0 ], # point8 + [ 0.0, 1.0, 1.0 ], # point9 + [ 1.0, 1.0, 1.0 ], # point10 + [ 2.0, 1.0, 1.0 ] ] # point11 +two_hex1_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex1_points_coords, two_hex_ids, VTK_HEXAHEDRON ) + +## GRID 2 +two_hex2_points_coords: list[ list[ float ] ] = [ [ 0.0, 1.0, 0.0 ], # point0 + [ 1.0, 1.0, 0.0 ], # point1 + [ 2.0, 1.0, 0.0 ], # point2 + [ 0.0, 2.0, 0.0 ], # point3 + [ 1.0, 2.0, 0.0 ], # point4 + [ 2.0, 2.0, 0.0 ], # point5 + [ 0.0, 1.0, 1.0 ], # point6 + [ 1.0, 1.0, 1.0 ], # point7 + [ 2.0, 1.0, 1.0 ], # point8 + [ 0.0, 2.0, 1.0 ], # point9 + [ 1.0, 2.0, 1.0 ], # point10 + [ 2.0, 2.0, 1.0 ] ] # point11 +two_hex2_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex2_points_coords, two_hex_ids, VTK_HEXAHEDRON ) + +## GRID 3 +two_hex3_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 1.0 ], # point0 + [ 1.0, 0.0, 1.0 ], # point1 + [ 2.0, 0.0, 1.0 ], # point2 + [ 0.0, 1.0, 1.0 ], # point3 + [ 1.0, 1.0, 1.0 ], # point4 + [ 2.0, 1.0, 1.0 ], # point5 + [ 0.0, 0.0, 2.0 ], # point6 + [ 1.0, 0.0, 2.0 ], # point7 + [ 2.0, 0.0, 2.0 ], # point8 + [ 0.0, 1.0, 2.0 ], # point9 + [ 1.0, 1.0, 2.0 ], # point10 + [ 2.0, 1.0, 2.0 ] ] # point11 +two_hex3_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex3_points_coords, two_hex_ids, VTK_HEXAHEDRON ) + +## GRID 4 +two_hex4_points_coords: list[ list[ float ] ] = [ [ 0.0, 1.0, 1.0 ], # point0 + [ 1.0, 1.0, 1.0 ], # point1 + [ 2.0, 1.0, 1.0 ], # point2 + [ 0.0, 2.0, 1.0 ], # point3 + [ 1.0, 2.0, 1.0 ], # point4 + [ 2.0, 2.0, 1.0 ], # point5 + [ 0.0, 1.0, 2.0 ], # point6 + [ 1.0, 1.0, 2.0 ], # point7 + [ 2.0, 1.0, 2.0 ], # point8 + [ 0.0, 2.0, 2.0 ], # point9 + [ 1.0, 2.0, 2.0 ], # point10 + [ 2.0, 2.0, 2.0 ] ] # point11 +two_hex4_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex4_points_coords, two_hex_ids, VTK_HEXAHEDRON ) +all_two_hex_grids: list[ vtkUnstructuredGrid ] = [ two_hex1_grid, two_hex2_grid, two_hex3_grid, two_hex4_grid ] + + +## Duplicated grids but with different DataArrays per region and per timestep +number_timesteps: int = 2 +number_regions: int = 2 + +# Create the target directories for the tests and generate the vtms +pvd_name: str = "vtkOutput" +pvd_directory: str = os.path.join( dir_name, pvd_name ) +region_name: str = "region" +stored_grids: dict[ str, dict[ str, list[ vtkUnstructuredGrid ] ] ] = dict() +for i in range( number_timesteps ): + vtm_name: str = "time" + str( i ) + stored_grids[ vtm_name ] = dict() + splitted_grids_by_region: list[ list[ vtkUnstructuredGrid ] ] = split_list( all_two_hex_grids, number_regions ) + for j in range( number_regions ): + region: str = region_name + str( j ) + stored_grids[ vtm_name ][ region ] = list() + for k, grid in enumerate( splitted_grids_by_region[ j ] ): + new_grid: vtkUnstructuredGrid = vtkUnstructuredGrid() + new_grid.DeepCopy( grid ) + for dimension in [ 1, 2, 3 ]: + arr_np: array = ones( ( new_grid.GetNumberOfCells(), dimension ), dtype=int ) * ( i * 100 + 10 * j + k ) + arr_points = numpy_to_vtk( arr_np ) + arr_cells = numpy_to_vtk( arr_np ) + arr_points.SetName( "point_param" + str( dimension ) ) + arr_cells.SetName( "cell_param" + str( dimension ) ) + new_grid.GetPointData().AddArray( arr_points ) + new_grid.GetCellData().AddArray( arr_cells ) + stored_grids[ vtm_name ][ region ].append( new_grid ) + + +class TestClass: + + def test_to_vtk_id_list_and_vtk_iter( self ): + # vtk_id_list + data1: list[ int ] = [ 0, 1, 2 ] + data2: tuple[ int ] = ( 3, 4, 5, 6 ) + result = vu.to_vtk_id_list( data1 ) + result2 = vu.to_vtk_id_list( data2 ) + assert result.IsA("vtkIdList") + assert result2.IsA("vtkIdList") + assert result.GetNumberOfIds() == 3 + assert result2.GetNumberOfIds() == 4 + # vtk_iter + result3 = list( vu.vtk_iter( result ) ) + result4 = tuple( vu.vtk_iter( result2 ) ) + assert len( result3 ) == 3 + assert len( result4 ) == 4 + assert result3 == data1 + assert result4 == data2 + + def test_write_and_read_mesh( self ): + found_files_vtu: list[ str ] = list() + found_files_vtk: list[ str ] = list() + found_files_vtu.extend( glob.glob( os.path.join( dir_name, "*.vtu" ) ) ) + found_files_vtu.extend( glob.glob( os.path.join( dir_name, "*.vtk" ) ) ) + assert len( found_files_vtu ) == 0 + assert len( found_files_vtk ) == 0 + vu.write_mesh( four_hex_grid, output_four_hex_vtu ) + vu.write_mesh( four_hex_grid, output_four_hex_vtk ) + found_files_vtu.extend( glob.glob( os.path.join( dir_name, "*.vtu" ) ) ) + found_files_vtk.extend( glob.glob( os.path.join( dir_name, "*.vtk" ) ) ) + assert len( found_files_vtu ) == 1 + assert len( found_files_vtk ) == 1 + # no overwritting possible + vu.write_mesh( four_hex_grid, output_four_hex_vtu ) + vu.write_mesh( four_hex_grid, output_four_hex_vtk ) + assert len( found_files_vtu ) == 1 + assert len( found_files_vtk ) == 1 + # read the meshes + read_vtu: vtkUnstructuredGrid = vu.read_mesh( output_four_hex_vtu.output ) + read_vtk: vtkUnstructuredGrid = vu.read_mesh( output_four_hex_vtu.output ) + assert read_vtu.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() + assert read_vtk.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() + try: + os.remove( output_four_hex_vtu.output ) + os.remove( output_four_hex_vtk.output ) + except Exception as e: + raise ValueError( f"test_write_and_read_mesh failed because of '{e}'." ) + + def test_write_and_read_vtm( self ): + multiblock: vtkMultiBlockDataSet = vtkMultiBlockDataSet() + for i in range( 5 ): + vtu: vtkUnstructuredGrid = vtkUnstructuredGrid() + multiblock.SetBlock( i, vtu ) + multiblock.GetMetaData( i ).Set( vtkCompositeDataSet.NAME(), "rank" + str( i ) ) + output_vtk: vu.VtkOutput = vu.VtkOutput( os.path.join( dir_name, "test.vtm" ), True ) + vu.write_VTM( multiblock, output_vtk ) + mulltiblock_read: vtkMultiBlockDataSet = vu.read_vtm( output_vtk.output ) + os.remove( output_vtk.output ) + assert multiblock.GetNumberOfBlocks() == mulltiblock_read.GetNumberOfBlocks() == 5 + + def test_get_filepath_from_pvd_and_vtm( self ): + pvd_filepath: str = create_geos_pvd( stored_grids, pvd_directory ) + result0: str = vu.get_vtm_filepath_from_pvd( pvd_filepath, 0 ) + result1: str = vu.get_vtm_filepath_from_pvd( pvd_filepath, 1 ) + assert result0.endswith( "time0.vtm" ) + assert result1.endswith( "time1.vtm" ) + result2: list[ str ] = vu.get_vtu_filepaths_from_vtm( result0 ) + for i, path2 in enumerate( result2 ): + if i % 2 == 0: + assert path2.endswith( "rank_00.vtu" ) + else: + assert path2.endswith( "rank_01.vtu" ) + try: + shutil.rmtree( pvd_directory ) + except OSError as e: + print( f"Error: {e}" ) + os.remove( pvd_filepath ) + + def test_has_invalid_field( self ): + # initialize test meshes + test_mesh_points: vtkUnstructuredGrid = four_hex_grid.NewInstance() + test_mesh_cells: vtkUnstructuredGrid = four_hex_grid.NewInstance() + test_mesh: vtkUnstructuredGrid = four_hex_grid.NewInstance() + test_mesh_points.CopyStructure( four_hex_grid ) + test_mesh_cells.CopyStructure( four_hex_grid ) + test_mesh.CopyStructure( four_hex_grid ) + test_mesh_points.CopyAttributes( four_hex_grid ) + test_mesh_cells.CopyAttributes( four_hex_grid ) + test_mesh.CopyAttributes( four_hex_grid ) + # create vtk arrays + array_for_points: array = ones( ( test_mesh_points.GetNumberOfPoints(), 1 ) ) + array_for_cells: array = ones( ( test_mesh_cells.GetNumberOfCells(), 1 ) ) + vtk_array_points_invalid = numpy_to_vtk( array_for_points ) + vtk_array_cells_invalid = numpy_to_vtk( array_for_cells ) + vtk_array_points_valid = numpy_to_vtk( array_for_points ) + vtk_array_cells_valid = numpy_to_vtk( array_for_cells ) + invalid_fields: list[ str ] = [ "PointsWrong", "CellsWrong" ] + vtk_array_points_invalid.SetName( invalid_fields[ 0 ] ) + vtk_array_cells_invalid.SetName( invalid_fields[ 1 ] ) + vtk_array_points_valid.SetName( "PointsValid" ) + vtk_array_cells_valid.SetName( "CellsValid" ) + # add vtk arrays + test_mesh_points.GetPointData().AddArray( vtk_array_points_invalid ) + test_mesh_cells.GetCellData().AddArray( vtk_array_cells_invalid ) + test_mesh.GetPointData().AddArray( vtk_array_points_valid ) + test_mesh.GetCellData().AddArray( vtk_array_cells_valid ) + # check invalid_fields + assert vu.has_invalid_field( test_mesh_points, invalid_fields ) == True + assert vu.has_invalid_field( test_mesh_cells, invalid_fields ) == True + assert vu.has_invalid_field( test_mesh, invalid_fields ) == False + + def test_get_points_coords_from_vtk( self ): + result: array = vu.get_points_coords_from_vtk( four_hex_grid ) + assert four_hexs_points_coords == result.tolist() + + def test_get_cell_centers_array( self ): + result: array = vu.get_cell_centers_array( four_hex_grid ) + assert array_equal( result, + array( [ [ 0.5, 0.5, 0.5 ], [ 1.5, 0.5, 0.5 ], [ 0.5, 0.5, 1.5 ], [ 1.5, 0.5, 1.5 ] ] ) ) From 60fad80eba9a51c7622b0337e09283cf68c3e2c1 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Fri, 6 Dec 2024 09:14:15 -0800 Subject: [PATCH 23/34] Bugfix and update of test caes for vtk_utils --- .../{test_vtu_utils.py => test_vtk_utils.py} | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) rename geos-mesh/tests/{test_vtu_utils.py => test_vtk_utils.py} (97%) diff --git a/geos-mesh/tests/test_vtu_utils.py b/geos-mesh/tests/test_vtk_utils.py similarity index 97% rename from geos-mesh/tests/test_vtu_utils.py rename to geos-mesh/tests/test_vtk_utils.py index eec6516..c35818f 100644 --- a/geos-mesh/tests/test_vtu_utils.py +++ b/geos-mesh/tests/test_vtk_utils.py @@ -18,7 +18,7 @@ pattern_test: str = "to_check_mesh" filepath_mesh_for_stats: str = os.path.join( dir_name, pattern_test + ".vtu" ) test_mesh_for_stats: vu.VtkOutput = vu.VtkOutput( filepath_mesh_for_stats, True ) -geos_hierarchy: str = "mesh/Level0" +geos_hierarchy: str = os.path.join( "mesh", "Level0" ) """ @@ -95,7 +95,7 @@ def create_geos_pvd( all_grids_per_vtm: dict[ str, dict[ str, list[ vtkUnstructu for i, grid in enumerate( grids ): rank_name: str = "rank_0" + str( i ) vtu_name: str = rank_name + ".vtu" - path_from_vtm: str = os.path.join( timestep, geos_hierarchy, vtu_name ) + path_from_vtm: str = os.path.join( timestep, geos_hierarchy, region, vtu_name ) ET.SubElement( region_block, "DataSet", name=rank_name, file=path_from_vtm ) vtu_filepath: str = os.path.join( region_directory, vtu_name ) output_vtu: vu.VtkOutput = vu.VtkOutput( vtu_filepath, False ) @@ -286,13 +286,13 @@ def test_write_and_read_mesh( self ): # read the meshes read_vtu: vtkUnstructuredGrid = vu.read_mesh( output_four_hex_vtu.output ) read_vtk: vtkUnstructuredGrid = vu.read_mesh( output_four_hex_vtu.output ) - assert read_vtu.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() - assert read_vtk.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() try: os.remove( output_four_hex_vtu.output ) os.remove( output_four_hex_vtk.output ) except Exception as e: raise ValueError( f"test_write_and_read_mesh failed because of '{e}'." ) + assert read_vtu.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() + assert read_vtk.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() def test_write_and_read_vtm( self ): multiblock: vtkMultiBlockDataSet = vtkMultiBlockDataSet() @@ -310,20 +310,28 @@ def test_get_filepath_from_pvd_and_vtm( self ): pvd_filepath: str = create_geos_pvd( stored_grids, pvd_directory ) result0: str = vu.get_vtm_filepath_from_pvd( pvd_filepath, 0 ) result1: str = vu.get_vtm_filepath_from_pvd( pvd_filepath, 1 ) - assert result0.endswith( "time0.vtm" ) - assert result1.endswith( "time1.vtm" ) result2: list[ str ] = vu.get_vtu_filepaths_from_vtm( result0 ) - for i, path2 in enumerate( result2 ): - if i % 2 == 0: - assert path2.endswith( "rank_00.vtu" ) - else: - assert path2.endswith( "rank_01.vtu" ) + try: shutil.rmtree( pvd_directory ) except OSError as e: print( f"Error: {e}" ) os.remove( pvd_filepath ) + assert result0.endswith( "time0.vtm" ) + assert result1.endswith( "time1.vtm" ) + for i, path2 in enumerate( result2 ): + print( "path2: ", path2 ) + if i % 4 < 2: + region_name: str = "region0" + else: + region_name = "region1" + if i % 2 == 0: + assert path2.endswith( os.path.join( geos_hierarchy, region_name, "rank_00.vtu" ) ) + else: + assert path2.endswith( os.path.join( geos_hierarchy, region_name, "rank_01.vtu" ) ) + + def test_has_invalid_field( self ): # initialize test meshes test_mesh_points: vtkUnstructuredGrid = four_hex_grid.NewInstance() From 23d1f6ad96757d1d220c3d8be73b7ce2dda3ee2d Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Fri, 6 Dec 2024 15:05:54 -0800 Subject: [PATCH 24/34] Added get_all_array_names to vtk_utils --- .../src/geos/mesh/doctor/checks/vtk_utils.py | 45 +++++++++++-------- geos-mesh/tests/test_vtk_utils.py | 1 - 2 files changed, 27 insertions(+), 19 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index dccde55..b9517cc 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -40,6 +40,27 @@ def vtk_iter( l ) -> Iterator[ any ]: yield l.GetCellType( i ) +def get_all_array_names( mesh: vtkUnstructuredGrid ) -> dict[ str, dict[ str, int ] ]: + """Returns a dict with the names of each arrays and their indexes for each type of data contained in the mesh. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + + Returns: + dict[ str, dict[ str, int ] ]: { "CellData": { array_name0: 3, array_name1: 0, ... }, + "FieldData": { ... }, + "PointData": { ... } } + """ + data_types: dict[ str, any ] = { "CellData": mesh.GetCellData, "FieldData": mesh.GetFieldData, + "PointData": mesh.GetPointData } + all_array_names: dict[ str, dict[ str, int ] ] = { data_type: dict() for data_type in data_types } + for typ, data in data_types.items(): + for i in range( data().GetNumberOfArrays() ): + name: str = data().GetArrayName( i ) + all_array_names[ typ ][ name ] = i + return all_array_names + + def has_invalid_field( mesh: vtkUnstructuredGrid, invalid_fields: list[ str ] ) -> bool: """Checks if a mesh contains at least a data arrays within its cell, field or point data having a certain name. If so, returns True, else False. @@ -51,24 +72,12 @@ def has_invalid_field( mesh: vtkUnstructuredGrid, invalid_fields: list[ str ] ) Returns: bool: True if one field found, else False. """ - # Check the cell data fields - cell_data = mesh.GetCellData() - for i in range( cell_data.GetNumberOfArrays() ): - if cell_data.GetArrayName( i ) in invalid_fields: - logging.error( f"The mesh contains an invalid cell field name '{cell_data.GetArrayName( i )}'." ) - return True - # Check the field data fields - field_data = mesh.GetFieldData() - for i in range( field_data.GetNumberOfArrays() ): - if field_data.GetArrayName( i ) in invalid_fields: - logging.error( f"The mesh contains an invalid field name '{field_data.GetArrayName( i )}'." ) - return True - # Check the point data fields - point_data = mesh.GetPointData() - for i in range( point_data.GetNumberOfArrays() ): - if point_data.GetArrayName( i ) in invalid_fields: - logging.error( f"The mesh contains an invalid point field name '{point_data.GetArrayName( i )}'." ) - return True + all_array_names: dict[ str, dict[ str, int ] ] = get_all_array_names( mesh ) + for data_type, array_names in all_array_names.items(): + for array_name in array_names.keys(): + if array_name in invalid_fields: + logging.error( f"The mesh contains an invalid {data_type} array name '{array_name}'." ) + return True return False diff --git a/geos-mesh/tests/test_vtk_utils.py b/geos-mesh/tests/test_vtk_utils.py index c35818f..2507c87 100644 --- a/geos-mesh/tests/test_vtk_utils.py +++ b/geos-mesh/tests/test_vtk_utils.py @@ -321,7 +321,6 @@ def test_get_filepath_from_pvd_and_vtm( self ): assert result0.endswith( "time0.vtm" ) assert result1.endswith( "time1.vtm" ) for i, path2 in enumerate( result2 ): - print( "path2: ", path2 ) if i % 4 < 2: region_name: str = "region0" else: From 01b9af1c91ff4ea11716c58fcc735561547591b3 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 12 Dec 2024 16:30:43 -0800 Subject: [PATCH 25/34] Add numexpr package for mesh_doctor --- docs/requirements.txt | 1 + geos-mesh/pyproject.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/requirements.txt b/docs/requirements.txt index 8a23448..3e8d02d 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -8,3 +8,4 @@ vtk >= 9.1 networkx >= 2.4 tqdm numpy +numexpr diff --git a/geos-mesh/pyproject.toml b/geos-mesh/pyproject.toml index a15ceba..192ea25 100644 --- a/geos-mesh/pyproject.toml +++ b/geos-mesh/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ "networkx >= 2.4", "tqdm", "numpy", + "numexpr", "meshio>=5.3.2", ] From 9002e9ca11a647efa07d2bb5506c0d927596c641 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 12 Dec 2024 18:07:26 -0800 Subject: [PATCH 26/34] copy and creation of fields added --- .../mesh/doctor/checks/field_operations.py | 272 ++++++++++-------- .../parsing/field_operations_parsing.py | 84 ++++-- 2 files changed, 206 insertions(+), 150 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index 24c8d08..f11cd1e 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -1,4 +1,5 @@ import logging +from numexpr import evaluate from dataclasses import dataclass from math import sqrt from numpy import array, empty, full, int64, nan @@ -8,16 +9,16 @@ from vtkmodules.vtkCommonCore import vtkDoubleArray from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, - get_vtm_filepath_from_pvd, get_vtu_filepaths_from_vtm, read_mesh, - write_mesh ) + get_vtm_filepath_from_pvd, get_vtu_filepaths_from_vtm, + get_all_array_names, read_mesh, write_mesh ) @dataclass( frozen=True ) class Options: - operation: str support: str - field_names: list[ str ] source: str + copy_fields: dict[ str, list[ str ] ] + created_fields: dict[ str, str ] vtm_index: int out_vtk: VtkOutput @@ -27,16 +28,65 @@ class Result: info: bool -__OPERATION_CHOICES = [ "transfer" ] __SUPPORT_CHOICES = [ "point", "cell" ] - - support_construction: dict[ str, tuple[ any ] ] = { __SUPPORT_CHOICES[ 0 ]: get_points_coords_from_vtk, __SUPPORT_CHOICES[ 1 ]: get_cell_centers_array } +def get_distances_mesh_center( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""For a specific support type {__SUPPORT_CHOICES}, returns a numpy array filled with the distances between + their coordinates and the center of the mesh. + + Args: + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + array: [ distance0, distance1, ..., distanceN ] with N being the number of support elements. + """ + if support == __SUPPORT_CHOICES[ 0 ]: + coords: array = get_points_coords_from_vtk( mesh ) + elif support == __SUPPORT_CHOICES[ 1 ]: + coords = get_cell_centers_array( mesh ) + else: + raise ValueError( f"For support, the only choices available are {__SUPPORT_CHOICES}." ) + + center = ( coords.max( axis=0 ) + coords.min( axis=0 ) ) / 2 + distances = empty( coords.shape[ 0 ] ) + for i in range( coords.shape[ 0 ] ): + distance_squared: float = 0.0 + coord = coords[ i ] + for j in range( len( coord ) ): + distance_squared += ( coord[ j ] - center[ j ] ) * ( coord[ j ] - center[ j ] ) + distances[ i ] = sqrt( distance_squared ) + return distances + + +def get_random_field( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""For a specific support type {__SUPPORT_CHOICES}, an array with samples from a uniform distribution over [0, 1). + + Args: + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + array: Array of size N being the number of support elements. + """ + if support == __SUPPORT_CHOICES[ 0 ]: + number_elements: int = mesh.GetNumberOfPoints() + elif support == __SUPPORT_CHOICES[ 1 ]: + number_elements = mesh.GetNumberOfCells() + else: + raise ValueError( f"For support, the only choices available are {__SUPPORT_CHOICES}." ) + return rand( number_elements, 1 ) + + +create_precoded_fields: dict[ str, any ] = { + "distances_mesh_center": get_distances_mesh_center, + "random": get_random_field +} + + def get_vtu_filepaths( options: Options ) -> tuple[ str ]: """Returns the vtu filepaths to use for the rest of the workflow. @@ -48,7 +98,7 @@ def get_vtu_filepaths( options: Options ) -> tuple[ str ]: """ source_filepath: str = options.source if source_filepath.endswith( ".vtu" ): - return ( source_filepath ) + return ( source_filepath, ) elif source_filepath.endswith( ".vtm" ): return get_vtu_filepaths_from_vtm( source_filepath ) elif source_filepath.endswith( ".pvd" ): @@ -58,7 +108,7 @@ def get_vtu_filepaths( options: Options ) -> tuple[ str ]: raise ValueError( f"The filepath '{options.source}' provided targets neither a .vtu, a .vtm nor a .pvd file." ) -def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid, options: Options ) -> array: +def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid, support: str ) -> array: """Builds an array containing the indexes of the reference grid linked to every cell ids / point ids of the subset grid. @@ -66,13 +116,12 @@ def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid kd_tree_grid_ref (KDTree): A KDTree of the nearest neighbor cell centers for every cells / points coordinates for point of the reference grid. sub_grid (vtkUnstructuredGrid): A vtk grid that is a subset of the reference grid. + support (str): Either "point" or "cell". Returns: np.array: [ cell_idK_grid, cell_idN_grid, ... ] or [ point_idK_grid, point_idN_grid, ... ] """ - if options.support not in __SUPPORT_CHOICES: - raise ValueError( f"Support option should be between {__SUPPORT_CHOICES}, not {options.support}." ) - support_elements: array = support_construction[ options.support ]( sub_grid ) + support_elements: array = support_construction[ support ]( sub_grid ) # now that you have the support elements, you can map them to the reference grid number_elements: int = support_elements.shape[ 0 ] mapping: array = empty( number_elements, dtype=int64 ) @@ -82,43 +131,6 @@ def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid return mapping -def __analytic_field( mesh, support, name ) -> bool: - if support == 'node': - # example function: distance from mesh center - nn = mesh.GetNumberOfPoints() - coords = vtk_to_numpy( mesh.GetPoints().GetData() ) - center = ( coords.max( axis=0 ) + coords.min( axis=0 ) ) / 2 - data_arr = vtkDoubleArray() - data_np = empty( nn ) - - for i in range( nn ): - val = 0 - pt = mesh.GetPoint( i ) - for j in range( len( pt ) ): - val += ( pt[ j ] - center[ j ] ) * ( pt[ j ] - center[ j ] ) - val = sqrt( val ) - data_np[ i ] = val - - data_arr = numpy_to_vtk( data_np ) - data_arr.SetName( name ) - mesh.GetPointData().AddArray( data_arr ) - return True - - elif support == 'cell': - # example function: random field - ne = mesh.GetNumberOfCells() - data_arr = vtkDoubleArray() - data_np = rand( ne, 1 ) - - data_arr = numpy_to_vtk( data_np ) - data_arr.SetName( name ) - mesh.GetCellData().AddArray( data_arr ) - return True - else: - logging.error( 'incorrect support option. Options are node, cell' ) - return False - - def __compatible_meshes( dest_mesh, source_mesh ) -> bool: # for now, just check that meshes have same number of elements and same number of nodes # and require that each cell has same nodes, each node has same coordinate @@ -148,125 +160,141 @@ def __compatible_meshes( dest_mesh, source_mesh ) -> bool: return True -def __transfer_field( mesh, support, field_name, source ) -> bool: - from_mesh = read_mesh( source ) - same_mesh = __compatible_meshes( mesh, from_mesh ) - if not same_mesh: - logging.error( 'meshes are not the same' ) - return False +def get_array_names_to_collect( sub_vtu_filepath: str, options: Options ) -> list[ str ]: + """We need to have the list of array names that are required to perform copy and creation of new arrays. To build + global_arrays to perform operations, we need only these names and not all array names present in the sub meshes. - if support == 'cell': - data = from_mesh.GetCellData().GetArray( field_name ) - if data == None: - logging.error( 'Requested field does not exist on source mesh' ) - return False - else: - mesh.GetCellData().AddArray( data ) - elif support == 'node': - data = from_mesh.GetPointData().GetArray( field_name ) - if data == None: - logging.error( 'Requested field does not exist on source mesh' ) - return False + Args: + sub_vtu_filepath (str): Path to sub vtu file that can be used to find the names of the arrays within its data. + options (Options): Options chosen by the user. + + Returns: + list[ str ]: Array names. + """ + ref_mesh: vtkUnstructuredGrid = read_mesh( sub_vtu_filepath ) + all_array_names: dict[ str, dict[ str, int ] ] = get_all_array_names( ref_mesh ) + if options.support == __SUPPORT_CHOICES[ 0 ]: # point + support_array_names: list[ str ] = list( all_array_names[ "PointData" ].keys() ) + else: # cell + support_array_names: list[ str ] = list( all_array_names[ "CellData" ].keys() ) + + to_use_arrays: set[ str ] = set() + for name in options.copy_fields.keys(): + if name in support_array_names: + to_use_arrays.add( name ) else: - mesh.GetPointData().AddArray( data ) - return False - else: - logging.error( 'incorrect support option. Options are node, cell' ) - return False - return True + logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepath}' in the data. " + + "Cannot perform operations on it." ) + + for function in options.created_fields.values(): + for support_array_name in support_array_names: + if support_array_name in function: + to_use_arrays.add( support_array_name ) + + return list( to_use_arrays ) -def perform_operation_on_array( global_array: array, local_array: array, mapping: array, options: Options ) -> None: - """Perform an operation that will fill the values of a global_array using the values contained in a local_array - that is smaller or equal to the size as the global_array. A mapping is used to copy the values from the - local_array to the right indexes in the global_array. +def merge_local_in_global_array( global_array: array, local_array: array, mapping: array ) -> None: + """Fill the values of a global_array using the values contained in a local_array that is smaller or equal to the + size as the global_array. A mapping is used to copy the values from the local_array to the right indexes in + the global_array. Args: global_array (np.array): Array of size N. local_array (np.array): Array of size M <= N that is representing a subset of the global_array. mapping (np.array): Array of global indexes of size M. - options (Options): Options chosen by the user. """ size_global, size_local = global_array.shape, local_array.shape - assert size_global[ 0 ] >= size_local[ 0 ], "The array to fill is smaller than the array to use." + assert size_global[ 0 ] >= size_local[ 0 ], "The global array to fill is smaller than the local array to merge." number_columns_global: int = size_global[ 1 ] if len( size_global ) == 2 else 1 number_columns_local: int = size_local[ 1 ] if len( size_local ) == 2 else 1 assert number_columns_global == number_columns_local, "The arrays do not have same number of columns." - if options.operation == __OPERATION_CHOICES[ 0 ]: # transfer - if len(size_local) == 1: - local_array = local_array.reshape( -1, 1 ) - global_array[ mapping ] = local_array - else: - raise ValueError( f"Cannot perform operation '{options.operation}'. Only operations are {__OPERATION_CHOICES}" ) + # when converting a numpy array to vtk array, you need to make sure to have a 2D array + if len( size_local ) == 1: + local_array = local_array.reshape( -1, 1 ) + global_array[ mapping ] = local_array -def implement_arrays( grid_ref: vtkUnstructuredGrid, global_arrays: dict[ str, array ], options: Options ) -> None: - """Implement the arrays that are contained in global_arrays into the Data of a grid_ref. +def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array ], options: Options ) -> None: + """Implement the arrays that are contained in global_arrays into the Data of a mesh. Args: - grid_ref (vtkUnstructuredGrid): A vtk grid. + mesh (vtkUnstructuredGrid): A vtk grid. global_arrays (dict[ str, np.array ]): { "array_name0": np.array, ..., "array_nameN": np.array } options (Options): Options chosen by the user. """ - if options.support not in __SUPPORT_CHOICES: - raise ValueError( f"Support choices should be one of these: {__SUPPORT_CHOICES}." ) - data = grid_ref.GetPointData() if options.support == __SUPPORT_CHOICES[ 0 ] else grid_ref.GetCellData() - number_elements: int = grid_ref.GetNumberOfPoints() if options.support == __SUPPORT_CHOICES[ 0 ] else \ - grid_ref.GetNumberOfCells() + data = mesh.GetPointData() if options.support == __SUPPORT_CHOICES[ 0 ] else mesh.GetCellData() + number_elements: int = mesh.GetNumberOfPoints() if options.support == __SUPPORT_CHOICES[ 0 ] else \ + mesh.GetNumberOfCells() + + arrays_to_implement: dict[ str, array ] = dict() + # proceed copy operations + for name, new_name_expression in options.copy_fields.items(): + new_name: str = name + if len( new_name_expression ) > 0: + new_name: str = new_name_expression[ 0 ] + if len( new_name_expression ) == 2: + expression: str = new_name_expression[ 1 ] + copy_arr: array = evaluate( name + expression, local_dict=global_arrays ) + else: + copy_arr = global_arrays[ name ] + arrays_to_implement[ new_name ] = copy_arr + + # proceed create operations + for new_name, expression in options.created_fields.items(): + if expression in create_precoded_fields: + created_arr: array = create_precoded_fields[ expression ]( mesh, options.support ) + else: + created_arr = evaluate( expression, local_dict=global_arrays ) + arrays_to_implement[ new_name ] = created_arr + # once the data is selected, we can implement the global arrays inside it - for name, array in global_arrays.items(): - dimension: int = array.shape[ 1 ] if len( array.shape ) == 2 else 1 + for final_name, final_array in arrays_to_implement.items(): + dimension: int = final_array.shape[ 1 ] if len( final_array.shape ) == 2 else 1 if dimension > 1: # Reshape the VTK array to match the original dimensions - vtk_array = numpy_to_vtk( array.flatten() ) + vtk_array = numpy_to_vtk( final_array.flatten() ) vtk_array.SetNumberOfComponents( dimension ) vtk_array.SetNumberOfTuples( number_elements ) else: - vtk_array = numpy_to_vtk( array ) - vtk_array.SetName( name ) - if options.operation == __OPERATION_CHOICES[ 0 ]: # transfer - data.AddArray( vtk_array ) + vtk_array = numpy_to_vtk( final_array ) + vtk_array.SetName( final_name ) + data.AddArray( vtk_array ) def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: + sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) + useful_array_names: list[ str ] = get_array_names_to_collect( sub_vtu_filepaths[ 0 ], options ) # create the output grid output_mesh: vtkUnstructuredGrid = grid_ref.NewInstance() output_mesh.CopyStructure( grid_ref ) output_mesh.CopyAttributes( grid_ref ) # find the support elements to use and construct their KDTree - if options.support not in __SUPPORT_CHOICES: - raise ValueError( f"Support option should be between {__SUPPORT_CHOICES}, not {options.support}." ) support_elements: array = support_construction[ options.support ]( output_mesh ) size_support: int = support_elements.shape[ 0 ] kd_tree_ref: KDTree = KDTree( support_elements ) - # perform operations to construct the global arrays to implement in the output mesh + # perform operations to construct the global arrays to implement in the output mesh from copy global_arrays: dict[ str, array ] = dict() - sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) for vtu_id in range( len( sub_vtu_filepaths ) ): sub_grid: vtkUnstructuredGrid = read_mesh( sub_vtu_filepaths[ vtu_id ] ) if options.support == __SUPPORT_CHOICES[ 0 ]: sub_data = sub_grid.GetPointData() else: sub_data = sub_grid.GetCellData() - # We need to make sure that the arrays we are looking at exist in the sub grid + arrays_available: list[ str ] = [ sub_data.GetArrayName( i ) for i in range( sub_data.GetNumberOfArrays() ) ] to_operate_on_indexes: list[ int ] = list() - for name in options.field_names: - if name not in arrays_available: - logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepaths[ vtu_id ]}'" + - " in the data. Cannot perform operation on it. Default values set to NaN." ) - else: - array_index: int = arrays_available.index( name ) - to_operate_on_indexes.append( array_index ) - if not name in global_arrays: - dimension: int = sub_data.GetArray( array_index ).GetNumberOfComponents() - global_arrays[ name ] = full( ( size_support, dimension ), nan ) - # If the arrays exist, we can perform the operation and fill the empty arrays - if len( to_operate_on_indexes ) > 0: - mapping: array = get_reorder_mapping( kd_tree_ref, sub_grid, options ) - for index in to_operate_on_indexes: - name = arrays_available[ index ] - sub_array: array = vtk_to_numpy( sub_data.GetArray( index ) ) - perform_operation_on_array( global_arrays[ name ], sub_array, mapping, options ) + for name in useful_array_names: + array_index: int = arrays_available.index( name ) + to_operate_on_indexes.append( array_index ) + if not name in global_arrays: + dimension: int = sub_data.GetArray( array_index ).GetNumberOfComponents() + global_arrays[ name ] = full( ( size_support, dimension ), nan ) + + reorder_mapping: array = get_reorder_mapping( kd_tree_ref, sub_grid, options.support ) + for index in to_operate_on_indexes: + name = arrays_available[ index ] + sub_array: array = vtk_to_numpy( sub_data.GetArray( index ) ) + merge_local_in_global_array( global_arrays[ name ], sub_array, reorder_mapping ) # The global arrays have been filled, so now we need to implement them in the output_mesh implement_arrays( output_mesh, global_arrays, options ) write_mesh( output_mesh, options.out_vtk ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index 780ddb3..57059a1 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -1,60 +1,87 @@ import logging -from geos.mesh.doctor.checks.field_operations import Options, Result, __SUPPORT_CHOICES, __OPERATION_CHOICES +from geos.mesh.doctor.checks.field_operations import Options, Result, __SUPPORT_CHOICES from geos.mesh.doctor.parsing import vtk_output_parsing, FIELD_OPERATIONS -__OPERATION = "operation" __SUPPORT = "support" - -__FIELD_NAMES = "field_names" __SOURCE = "source" +__COPY_FIELDS = "copy_fields" +__CREATE_FIELDS = "create_fields" + __WHICH_VTM = "which_vtm" __WHICH_VTM_SUGGESTIONS = [ "first", "last" ] def fill_subparser( subparsers ) -> None: - p = subparsers.add_parser( FIELD_OPERATIONS, help=f"Allows to perform an operation on fields from a source to your input mesh." ) - p.add_argument( '--' + __OPERATION, - type=str, - required=True, - metavar=", ".join( map( str, __OPERATION_CHOICES ) ), - default=__OPERATION_CHOICES[ 0 ], - help="[string]: Choose what operation you want to perform from the source to your input mesh. " - f"'{__OPERATION_CHOICES[ 0 ]}' copies field(s) from the source to the input mesh." ) + p = subparsers.add_parser( FIELD_OPERATIONS, + help=f"Allows to perform an operation on fields from a source to your input mesh." ) p.add_argument( '--' + __SUPPORT, type=str, required=True, metavar=", ".join( map( str, __SUPPORT_CHOICES ) ), default=__SUPPORT_CHOICES[ 0 ], help="[string]: Where to define field." ) - p.add_argument( '--' + __FIELD_NAMES, - type=str, - required=True, - help="[list of string comma separated]: Name of each field to use for the operation." ) p.add_argument( '--' + __SOURCE, type=str, - required=True, - help="[string]: Where field data to use for operation comes from (function, .vtu, .vtm, .pvd file)." ) + required=False, + help="[string]: Where field data to use for operation comes from .vtu, .vtm or .pvd file." ) + p.add_argument( '--' + __COPY_FIELDS, + type=str, + required=False, + help="[list of string comma separated]: Allows to copy a field from an input mesh to an output mesh. " + + "This copy can also be done while applying a coefficient on the copied field. The syntax to use " + + "is 'old_field_name:new_field_name:function'. Example: The available fields in your input mesh " + + "are 'poro,perm,temp,pressure,'. First, to copy 'poro' without any modification use 'poro'. " + + "Then, to copy 'perm' and change its name to 'permeability' use 'perm:permeability'. " + + "After, to copy 'temp' and change its name to 'temperature' and to increase the values by 3 use 'temp:temperature:+3'. " + + "Finally, to copy 'pressure' without changing its name and to multiply the values by 10 use 'pressure:pressure:*10'. " + + f"The combined syntax is '--{__COPY_FIELDS} poro,perm:permeability,temp:temperature:+3,pressure:pressure:*10'." ) + p.add_argument( '--' + __CREATE_FIELDS, + type=str, + required=False, + help="[list of string comma separated]: Allows to create new fields by using a function that is " + + "either pre-defined or to implement one. The syntax to use is 'new_field_name:function'. " + + "Predefined functions are: 1) 'distances_mesh_center' calculates the distance from the center. " + + "2) 'random' populates an array with samples from a uniform distribution over [0, 1). " + + "The other method, which is to implement a function using the numexpr functionalities." ) p.add_argument( '--' + __WHICH_VTM, type=str, required=False, default=__WHICH_VTM_SUGGESTIONS[ 1 ], help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " - "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " - "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " - "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) + "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " + "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " + "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) vtk_output_parsing.fill_vtk_output_subparser( p ) def convert( parsed_options ) -> Options: - operation: str = parsed_options[ __OPERATION ] - if operation not in __OPERATION_CHOICES: - raise ValueError( f"For --{__OPERATION}, the only choices available are {__OPERATION_CHOICES}." ) support: str = parsed_options[ __SUPPORT ] if support not in __SUPPORT_CHOICES: raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) - field_names: list[ str ] = list( map( str, parsed_options[ __FIELD_NAMES ].split( "," ) ) ) + + copy_fields: dict[ str, list[ str ] ] = dict() + splitted_copy_fields: list[ str ] = parsed_options[ __COPY_FIELDS ].split( "," ) + for copy_field in splitted_copy_fields: + parts: list[ str ] = copy_field.split( ":" ) + if len( parts ) > 0 and len( parts ) <= 3: + copy_fields[ parts[ 0 ] ] = list() if len( parts ) == 1 else parts[ 1: ] + else: + raise ValueError( f"The correct format for '--{__COPY_FIELDS}' is to have either: 'field_name', or " + + f"'field_name:new_field_name' or 'field_name:new_field_name:function' " + f"but not '{copy_field}'." ) + + created_fields: dict[ str, str ] = dict() + splitted_created_fields: list[ str ] = parsed_options[ __CREATE_FIELDS ].split( "," ) + for created_field in splitted_created_fields: + parts = created_field.split( ":" ) + if len( parts ) == 2: + created_fields[ parts[ 0 ] ] = parts[ 1 ] + else: + raise ValueError( f"The correct format for '--{__CREATE_FIELDS}' is to have 'new_field_name:function', " + + f"but not '{created_field}'." ) + which_vtm: str = parsed_options[ __WHICH_VTM ] if which_vtm in __WHICH_VTM_SUGGESTIONS: vtm_index: int = 0 if which_vtm == __WHICH_VTM_SUGGESTIONS[ 0 ] else -1 @@ -64,10 +91,11 @@ def convert( parsed_options ) -> Options: except ValueError: raise ValueError( f"The choice for --{__WHICH_VTM} needs to be an integer or " + f"'{__WHICH_VTM_SUGGESTIONS[ 0 ]}' or '{__WHICH_VTM_SUGGESTIONS[ 1 ]}'." ) - return Options( operation=operation, - support=support, - field_names=field_names, + + return Options( support=support, source=parsed_options[ __SOURCE ], + copy_fields=copy_fields, + created_fields=created_fields, vtm_index=vtm_index, out_vtk=vtk_output_parsing.convert( parsed_options ) ) From c0ce7f6dca1f83b9919d547aa4a6118e0afac44e Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 12 Dec 2024 18:07:39 -0800 Subject: [PATCH 27/34] Testing of field_operations --- geos-mesh/tests/__init__.py | 1 + geos-mesh/tests/test_field_operations.py | 288 +++++++++++++++++++++++ 2 files changed, 289 insertions(+) create mode 100644 geos-mesh/tests/__init__.py create mode 100644 geos-mesh/tests/test_field_operations.py diff --git a/geos-mesh/tests/__init__.py b/geos-mesh/tests/__init__.py new file mode 100644 index 0000000..b1cfe26 --- /dev/null +++ b/geos-mesh/tests/__init__.py @@ -0,0 +1 @@ +# Empty \ No newline at end of file diff --git a/geos-mesh/tests/test_field_operations.py b/geos-mesh/tests/test_field_operations.py new file mode 100644 index 0000000..ae98454 --- /dev/null +++ b/geos-mesh/tests/test_field_operations.py @@ -0,0 +1,288 @@ +import os +import shutil +from numpy import array, arange, full, array_equal, sqrt, nan, log, log10 +from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy +from scipy.spatial import KDTree +from geos.mesh.doctor.checks import field_operations as fo +from geos.mesh.doctor.checks import vtk_utils as vu +from tests import test_vtk_utils as tvu +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, VTK_HEXAHEDRON + + +""" +For tests creation +""" +# yapf: disable +## GRID 1 +eight_hex_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], #point0 + [ 1.0, 0.0, 0.0 ], #point1 + [ 2.0, 0.0, 0.0 ], #point2 + [ 0.0, 1.0, 0.0 ], #point3 + [ 1.0, 1.0, 0.0 ], #point4 + [ 2.0, 1.0, 0.0 ], #point5 + [ 0.0, 2.0, 0.0 ], #point6 + [ 1.0, 2.0, 0.0 ], #point7 + [ 2.0, 2.0, 0.0 ], #point8 + [ 0.0, 0.0, 1.0 ], #point9 + [ 1.0, 0.0, 1.0 ], #point10 + [ 2.0, 0.0, 1.0 ], #point11 + [ 0.0, 1.0, 1.0 ], #point12 + [ 1.0, 1.0, 1.0 ], #point13 + [ 2.0, 1.0, 1.0 ], #point14 + [ 0.0, 2.0, 1.0 ], #point15 + [ 1.0, 2.0, 1.0 ], #point16 + [ 2.0, 2.0, 1.0 ], #point17 + [ 0.0, 0.0, 2.0 ], #point18 + [ 1.0, 0.0, 2.0 ], #point19 + [ 2.0, 0.0, 2.0 ], #point20 + [ 0.0, 1.0, 2.0 ], #point21 + [ 1.0, 1.0, 2.0 ], #point22 + [ 2.0, 1.0, 2.0 ], #point23 + [ 0.0, 2.0, 2.0 ], #point24 + [ 1.0, 2.0, 2.0 ], #point25 + [ 2.0, 2.0, 2.0 ] ] #point26 +eight_hex_ids = [ [ 0, 1, 4, 3, 9, 10, 13, 12 ], + [ 0 + 1, 1 + 1, 4 + 1, 3 + 1, 9 + 1, 10 + 1, 13 + 1, 12 + 1 ], + [ 0 + 3, 1 + 3, 4 + 3, 3 + 3, 9 + 3, 10 + 3, 13 + 3, 12 + 3 ], + [ 0 + 4, 1 + 4, 4 + 4, 3 + 4, 9 + 4, 10 + 4, 13 + 4, 12 + 4 ], + [ 0 + 9, 1 + 9, 4 + 9, 3 + 9, 9 + 9, 10 + 9, 13 + 9, 12 + 9 ], + [ 0 + 10, 1 + 10, 4 + 10, 3 + 10, 9 + 10, 10 + 10, 13 + 10, 12 + 10 ], + [ 0 + 12, 1 + 12, 4 + 12, 3 + 12, 9 + 12, 10 + 12, 13 + 12, 12 + 12 ], + [ 0 + 13, 1 + 13, 4 + 13, 3 + 13, 9 + 13, 10 + 13, 13 + 13, 12 + 13 ] ] +eight_hex_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( eight_hex_points_coords, eight_hex_ids, VTK_HEXAHEDRON ) +eight_hex_grid_output = vu.VtkOutput( os.path.join( tvu.dir_name, "eight_hex.vtu" ), False ) + +eight_hex_grid_empty: vtkUnstructuredGrid = vtkUnstructuredGrid() +eight_hex_grid_empty.DeepCopy( eight_hex_grid ) +eight_hex_grid_empty_output = vu.VtkOutput( os.path.join( tvu.dir_name, "eight_hex_empty.vtu" ), False ) + +#GRID 2 which is a cell0 of GRID 1 +hex_ids = [ [ 0, 1, 2, 3, 4, 5, 6, 7 ] ] +hex0_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], #point0 + [ 1.0, 0.0, 0.0 ], #point1 + [ 1.0, 1.0, 0.0 ], #point2 + [ 0.0, 1.0, 0.0 ], #point3 + [ 0.0, 0.0, 1.0 ], #point4 + [ 1.0, 0.0, 1.0 ], #point5 + [ 1.0, 1.0, 1.0 ], #point6 + [ 0.0, 1.0, 1.0 ] ] #point7 +hex0_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( hex0_points_coords, hex_ids, VTK_HEXAHEDRON ) + +#GRID 3 which is cell1 of GRID 1 +hex1_points_coords: list[ list[ float ] ] = [ [ 1.0, 0.0, 0.0 ], #point0 + [ 2.0, 0.0, 0.0 ], #point1 + [ 2.0, 1.0, 0.0 ], #point2 + [ 1.0, 1.0, 0.0 ], #point3 + [ 1.0, 0.0, 1.0 ], #point4 + [ 2.0, 0.0, 1.0 ], #point5 + [ 2.0, 1.0, 1.0 ], #point6 + [ 1.0, 1.0, 1.0 ] ] #point7 +hex1_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( hex1_points_coords, hex_ids, VTK_HEXAHEDRON ) + +sub_grids: list[ vtkUnstructuredGrid ] = [ hex0_grid, hex1_grid ] +sub_grids_values: list[ dict[ str, array ] ] = [ dict() for _ in range( len( sub_grids ) ) ] +sub_grids_output: list[ vu.VtkOutput ] = [ vu.VtkOutput( os.path.join( tvu.dir_name, f"sub_grid{i}.vtu" ), True ) for i in range( len( sub_grids ) ) ] +# yapf: enable +""" +Add arrays in each grid +""" +ncells: int = eight_hex_grid.GetNumberOfCells() +npoints: int = eight_hex_grid.GetNumberOfPoints() +eight_hex_grid_values: dict[ str, array ] = { + "cell_param0": arange( 0, ncells ).reshape( ncells, 1 ), + "cell_param1": arange( ncells, ncells * 3 ).reshape( ncells, 2 ), + "cell_param2": arange( ncells * 3, ncells * 6 ).reshape( ncells, 3 ), + "point_param0": arange( ncells * 6, ncells * 6 + npoints ).reshape( npoints, 1 ), + "point_param1": arange( ncells * 6 + npoints, ncells * 6 + npoints * 3 ).reshape( npoints, 2 ), + "point_param2": arange( ncells * 6 + npoints * 3, ncells * 6 + npoints * 6 ).reshape( npoints, 3 ) +} +for name, value in eight_hex_grid_values.items(): + arr_values = numpy_to_vtk( value ) + arr_values.SetName( name ) + if "cell" in name: + eight_hex_grid.GetCellData().AddArray( arr_values ) + else: + eight_hex_grid.GetPointData().AddArray( arr_values ) + for i in range( len( sub_grids_values ) ): + if len( value.shape ) == 1: + sub_grids_values[ i ][ name ] = value[ i ] + else: + sub_grids_values[ i ][ name ] = [ value[ i ][ j ] for j in range( value.shape[ 1 ] ) ] + +for i, sub_grid in enumerate( sub_grids ): + for name, value in sub_grids_values[ i ].items(): + arr_values = numpy_to_vtk( value ) + arr_values.SetName( name ) + if "cell" in name: + sub_grid.GetCellData().AddArray( arr_values ) + else: + sub_grid.GetPointData().AddArray( arr_values ) + +copy_fields_points: dict[ str, list[ str ] ] = { + "point_param0": list(), + "point_param1": [ "point_param1" + "_new" ], + "point_param2": [ "point_param2" + "_new", "*3" ] +} +copy_fields_cells: dict[ str, list[ str ] ] = { + "cell_param0": list(), + "cell_param1": [ "cell_param1" + "_new" ], + "cell_param2": [ "cell_param2" + "_new", "+ 10" ] +} + +created_fields_points: dict[ str, str ] = { + "point_param0" + "_created": "log( point_param0 )", + "point_param1" + "_created": "sqrt( point_param1 )", + "point_param2" + "_created": "point_param0 +point_param1 * 2" +} +created_fields_cells: dict[ str, str ] = { + "cell_param0" + "_created": "log( cell_param0 )", + "cell_param1" + "_created": "sqrt( cell_param1 )", + "cell_param2" + "_created": "cell_param0 + cell_param1 * 2" +} + + +out_points: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "points.vtu" ), True ) +out_cells: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "cells.vtu" ), True ) + + +class TestClass: + + def test_precoded_fields( self ): + result_points: array = fo.get_distances_mesh_center( eight_hex_grid_empty, "point" ) + result_cells: array = fo.get_distances_mesh_center( eight_hex_grid_empty, "cell" ) + sq2, sq3, sq3h = sqrt( 2 ), sqrt( 3 ), sqrt( 3 ) / 2 + expected_points: array = array( [ sq3, sq2, sq3, sq2, 1.0, sq2, sq3, sq2, sq3, sq2, 1.0, sq2, 1.0, 0.0, 1.0, + sq2, 1.0, sq2, sq3, sq2, sq3, sq2, 1.0, sq2, sq3, sq2, sq3 ] ) + expected_cells: array = array( [ sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h ] ) + assert array_equal( result_points, expected_points ) + assert array_equal( result_cells, expected_cells ) + random_points: array = fo.get_random_field( eight_hex_grid_empty, "point" ) + random_cells: array = fo.get_random_field( eight_hex_grid_empty, "cell" ) + assert eight_hex_grid_empty.GetNumberOfPoints() == random_points.shape[ 0 ] + assert eight_hex_grid_empty.GetNumberOfCells() == random_cells.shape[ 0 ] + + def test_get_vtu_filepaths( self ): + pvd_filepath: str = tvu.create_geos_pvd( tvu.stored_grids, tvu.pvd_directory ) + options_pvd0: fo.Options = fo.Options( support="point", + source=pvd_filepath, + copy_fields=dict(), + created_fields=dict(), + vtm_index=0, + out_vtk=out_points ) + options_pvd1: fo.Options = fo.Options( support="point", + source=pvd_filepath, + copy_fields=dict(), + created_fields=dict(), + vtm_index=-1, + out_vtk=out_points ) + result0: tuple[ str ] = fo.get_vtu_filepaths( options_pvd0 ) + result1: tuple[ str ] = fo.get_vtu_filepaths( options_pvd1 ) + try: + shutil.rmtree( tvu.pvd_directory ) + except OSError as e: + print( f"Error: {e}" ) + os.remove( pvd_filepath ) + for i in range( len( result0 ) ): + assert "time0" in result0[ i ] # looking through first vtm which is time0 + assert "time1" in result1[ i ] # looking through last vtm which is time1 + + def test_get_reorder_mapping( self ): + support_points: array = fo.support_construction[ "point" ]( eight_hex_grid ) + support_cells: array = fo.support_construction[ "cell" ]( eight_hex_grid ) + kd_tree_points: KDTree = KDTree( support_points ) + kd_tree_cells: KDTree = KDTree( support_cells ) + result_points1: array = fo.get_reorder_mapping( kd_tree_points, hex0_grid, "point" ) + result_cells1: array = fo.get_reorder_mapping( kd_tree_cells, hex0_grid, "cell" ) + result_points2: array = fo.get_reorder_mapping( kd_tree_points, hex1_grid, "point" ) + result_cells2: array = fo.get_reorder_mapping( kd_tree_cells, hex1_grid, "cell" ) + assert result_points1.tolist() == [ 0, 1, 4, 3, 9, 10, 13, 12 ] + assert result_points2.tolist() == [ 0 + 1, 1 + 1, 4 + 1, 3 + 1, 9 + 1, 10 + 1, 13 + 1, 12 + 1 ] + assert result_cells1.tolist() == [ 0 ] + assert result_cells2.tolist() == [ 1 ] + + def test_get_array_names_to_collect( self ): + vu.write_mesh( eight_hex_grid, eight_hex_grid_output ) + options1: fo.Options = fo.Options( "cell", eight_hex_grid_output.output, copy_fields_cells, + created_fields_cells, -1, out_cells ) + options2: fo.Options = fo.Options( "point", eight_hex_grid_output.output, copy_fields_points, + created_fields_points, -1, out_points ) + result1: list[ str ] = fo.get_array_names_to_collect( eight_hex_grid_output.output, options1 ) + result2: list[ str ] = fo.get_array_names_to_collect( eight_hex_grid_output.output, options2 ) + os.remove( eight_hex_grid_output.output ) + assert result1.sort() == list( copy_fields_cells.keys() ).sort() + assert result2.sort() == list( copy_fields_points.keys() ).sort() + + def test_merge_local_in_global_array( self ): + # create arrays filled with nan values + glob_arr_points_1D: array = full( ( 8, 1), nan ) + glob_arr_cells_1D: array = full( ( 8, 1), nan ) + glob_arr_points_3D: array = full( ( 8, 3 ), nan ) + glob_arr_cells_3D: array = full( ( 8, 3 ), nan ) + loc_arr_points_1D: array = array( list( range( 0, 4 ) ) ) + loc_arr_cells_1D: array = array( list( range( 4, 8 ) ) ) + loc_arr_points_3D: array = array( ( list( range( 0, 3 ) ), list( range( 6, 9 ) ), list( range( 12, 15 ) ), + list( range( 18, 21 ) ) ) ) + loc_arr_cells_3D: array = array( ( list( range( 3, 6 ) ), list( range( 9, 12 ) ), list( range( 15, 18 ) ), + list( range( 21, 24 ) ) ) ) + mapping_points: array = array( [ 0, 2, 4, 6 ] ) + mapping_cells: array = array( [ 7, 5, 3, 1 ] ) + fo.merge_local_in_global_array( glob_arr_points_1D, loc_arr_points_1D, mapping_points ) + fo.merge_local_in_global_array( glob_arr_cells_1D, loc_arr_cells_1D, mapping_cells ) + fo.merge_local_in_global_array( glob_arr_points_3D, loc_arr_points_3D, mapping_points ) + fo.merge_local_in_global_array( glob_arr_cells_3D, loc_arr_cells_3D, mapping_cells ) + expected_points_1D: array = array( [ 0, nan, 1, nan, 2, nan, 3, nan ] ).reshape( -1, 1 ) + expected_cells_1D: array = array( [ nan, 7, nan, 6, nan, 5, nan, 4 ] ).reshape( -1, 1 ) + expected_points_3D: array = array( [ [ 0, 1, 2 ], [ nan, nan, nan ], [ 6, 7, 8 ], [ nan, nan, nan ], + [ 12, 13, 14 ], [ nan, nan, nan ], [ 18, 19, 20 ], [ nan, nan, nan ] ] ) + expected_cells_3D: array = array( [ [ nan, nan, nan ], [ 21, 22, 23 ], [ nan, nan, nan ], [ 15, 16, 17 ], + [ nan, nan, nan ], [ 9, 10, 11 ], [ nan, nan, nan ], [ 3, 4, 5 ] ] ) + assert array_equal( glob_arr_points_1D, expected_points_1D, equal_nan=True ) + assert array_equal( glob_arr_cells_1D, expected_cells_1D, equal_nan=True ) + assert array_equal( glob_arr_points_3D, expected_points_3D, equal_nan=True ) + assert array_equal( glob_arr_cells_3D, expected_cells_3D, equal_nan=True ) + + def test_implement_arrays( self ): + output: vu.VtkOutput = vu.VtkOutput( "filled.vtu", True ) + empty_mesh: vtkUnstructuredGrid = vtkUnstructuredGrid() + empty_mesh.DeepCopy( eight_hex_grid_empty ) + npoints: int = empty_mesh.GetNumberOfPoints() + ncells: int = empty_mesh.GetNumberOfCells() + copy_fpoints = { "point_param0": [], + "point_param1": [ "point_copy1" ], + "point_param2": [ "point_param2", "*10 + 0.1" ] } + copy_fcells = { "cell_param0": [], + "cell_param1": [ "cell_copy1" ], + "cell_param2": [ "cell_param2", "/0.1 - 0.5" ] } + create_fpoints = { "new0": "log(point_param0)", + "new1": "sqrt(point_param1)", + "new2": "distances_mesh_center" } + create_fcells = { "new3": "sqrt(cell_param0)", + "new4": "log10(cell_param1)", + "new5": "cell_param0 + cell_param1" } + options_point = fo.Options( "point", "empty.vtu", copy_fpoints, create_fpoints, -1, output ) + options_cell = fo.Options( "cell", "empty.vtu", copy_fcells, create_fcells, -1, output ) + fo.implement_arrays( empty_mesh, eight_hex_grid_values, options_point ) + fo.implement_arrays( empty_mesh, eight_hex_grid_values, options_cell ) + point_data_mesh = empty_mesh.GetPointData() + cell_data_mesh = empty_mesh.GetCellData() + expected_results: dict[ str, array ] = { + "point_param0": eight_hex_grid_values[ "point_param0" ], + "point_copy1": eight_hex_grid_values[ "point_param1" ], + "point_param2": eight_hex_grid_values[ "point_param2" ] * 10 + 0.1, + "cell_param0": eight_hex_grid_values[ "cell_param0" ], + "cell_copy1": eight_hex_grid_values[ "cell_param1" ], + "cell_param2": eight_hex_grid_values[ "cell_param2" ] / 0.1 - 0.5, + "new0": log( eight_hex_grid_values[ "point_param0" ] ), + "new1": sqrt( eight_hex_grid_values[ "point_param1" ] ), + "new2": fo.get_distances_mesh_center( empty_mesh, "point" ).reshape( ( npoints, 1 ) ), + "new3": sqrt( eight_hex_grid_values[ "cell_param0" ] ), + "new4": log10( eight_hex_grid_values[ "cell_param1" ] ), + "new5": eight_hex_grid_values[ "cell_param0" ] + eight_hex_grid_values[ "cell_param1" ] + } + for data, nelements in zip( [ point_data_mesh, cell_data_mesh ], [ npoints, ncells ] ): + for i in range( data.GetNumberOfArrays() ): + array_name: str = data.GetArrayName( i ) + result: array = vtk_to_numpy( data.GetArray( i ) ) + if len( result.shape ) == 1: + result = result.reshape( ( nelements, 1 ) ) + assert array_equal( result, expected_results[ array_name ] ) From bd814da0a2dd1463f3f2ee6139607f82526c6db4 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Mon, 16 Dec 2024 08:41:23 -0800 Subject: [PATCH 28/34] Changed storages of fields from dict to tuple --- .../mesh/doctor/checks/field_operations.py | 68 +++++++++---------- .../parsing/field_operations_parsing.py | 25 ++++--- 2 files changed, 48 insertions(+), 45 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index f11cd1e..f017cca 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -1,25 +1,25 @@ import logging from numexpr import evaluate from dataclasses import dataclass +from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, + get_vtm_filepath_from_pvd, get_vtu_filepaths_from_vtm, + get_all_array_names, read_mesh, write_mesh ) from math import sqrt from numpy import array, empty, full, int64, nan from numpy.random import rand from scipy.spatial import KDTree +from tqdm import tqdm from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy -from vtkmodules.vtkCommonCore import vtkDoubleArray from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid -from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, - get_vtm_filepath_from_pvd, get_vtu_filepaths_from_vtm, - get_all_array_names, read_mesh, write_mesh ) @dataclass( frozen=True ) class Options: - support: str - source: str - copy_fields: dict[ str, list[ str ] ] - created_fields: dict[ str, str ] - vtm_index: int + support: str # choice between 'cell' and 'point' to operate on fields + source: str # file from where the data is collected + copy_fields: list[ tuple[ str ] ] # [ ( old_name0, new_name0, function0 ), ... ] + created_fields: list[ tuple[ str ] ] # [ ( new_name0, function0 ), ... ] + vtm_index: int # useful when source is a .pvd or .vtm file out_vtk: VtkOutput @@ -179,16 +179,18 @@ def get_array_names_to_collect( sub_vtu_filepath: str, options: Options ) -> lis support_array_names: list[ str ] = list( all_array_names[ "CellData" ].keys() ) to_use_arrays: set[ str ] = set() - for name in options.copy_fields.keys(): + for name_newname_function in options.copy_fields: + name: str = name_newname_function[ 0 ] if name in support_array_names: to_use_arrays.add( name ) else: - logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepath}' in the data. " + - "Cannot perform operations on it." ) + logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepath}' in the " + + f"{options.support} data. Cannot perform operations on it." ) - for function in options.created_fields.values(): + for newname_function in options.created_fields: + funct: str = newname_function[ 1 ] for support_array_name in support_array_names: - if support_array_name in function: + if support_array_name in funct: to_use_arrays.add( support_array_name ) return list( to_use_arrays ) @@ -229,31 +231,33 @@ def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array arrays_to_implement: dict[ str, array ] = dict() # proceed copy operations - for name, new_name_expression in options.copy_fields.items(): - new_name: str = name - if len( new_name_expression ) > 0: - new_name: str = new_name_expression[ 0 ] - if len( new_name_expression ) == 2: - expression: str = new_name_expression[ 1 ] - copy_arr: array = evaluate( name + expression, local_dict=global_arrays ) + for name_newname_function in tqdm( options.copy_fields, desc="Copying fields" ): + name: str = name_newname_function[ 0 ] + new_name: str = name_newname_function[ 0 ] + if len( name_newname_function ) > 1: + new_name = name_newname_function[ 1 ] + if len( name_newname_function ) == 3: + funct: str = name_newname_function[ 2 ] + copy_arr: array = evaluate( name + funct, local_dict=global_arrays ) else: copy_arr = global_arrays[ name ] arrays_to_implement[ new_name ] = copy_arr # proceed create operations - for new_name, expression in options.created_fields.items(): - if expression in create_precoded_fields: - created_arr: array = create_precoded_fields[ expression ]( mesh, options.support ) + for newname_function in tqdm( options.created_fields, desc="Creating fields" ): + new_name, funct = newname_function + if funct in create_precoded_fields: + created_arr: array = create_precoded_fields[ funct ]( mesh, options.support ) else: - created_arr = evaluate( expression, local_dict=global_arrays ) + created_arr = evaluate( funct, local_dict=global_arrays ) arrays_to_implement[ new_name ] = created_arr # once the data is selected, we can implement the global arrays inside it for final_name, final_array in arrays_to_implement.items(): - dimension: int = final_array.shape[ 1 ] if len( final_array.shape ) == 2 else 1 - if dimension > 1: # Reshape the VTK array to match the original dimensions + number_columns: int = final_array.shape[ 1 ] if len( final_array.shape ) == 2 else 1 + if number_columns > 1: # Reshape the VTK array to match the original dimensions vtk_array = numpy_to_vtk( final_array.flatten() ) - vtk_array.SetNumberOfComponents( dimension ) + vtk_array.SetNumberOfComponents( number_columns ) vtk_array.SetNumberOfTuples( number_elements ) else: vtk_array = numpy_to_vtk( final_array ) @@ -274,13 +278,9 @@ def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: kd_tree_ref: KDTree = KDTree( support_elements ) # perform operations to construct the global arrays to implement in the output mesh from copy global_arrays: dict[ str, array ] = dict() - for vtu_id in range( len( sub_vtu_filepaths ) ): + for vtu_id in tqdm( range( len( sub_vtu_filepaths ) ), desc="Processing VTU files" ): sub_grid: vtkUnstructuredGrid = read_mesh( sub_vtu_filepaths[ vtu_id ] ) - if options.support == __SUPPORT_CHOICES[ 0 ]: - sub_data = sub_grid.GetPointData() - else: - sub_data = sub_grid.GetCellData() - + sub_data = sub_grid.GetPointData() if options.support == __SUPPORT_CHOICES[ 0 ] else sub_grid.GetCellData() arrays_available: list[ str ] = [ sub_data.GetArrayName( i ) for i in range( sub_data.GetNumberOfArrays() ) ] to_operate_on_indexes: list[ int ] = list() for name in useful_array_names: diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index 57059a1..27e3752 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -43,8 +43,11 @@ def fill_subparser( subparsers ) -> None: help="[list of string comma separated]: Allows to create new fields by using a function that is " + "either pre-defined or to implement one. The syntax to use is 'new_field_name:function'. " + "Predefined functions are: 1) 'distances_mesh_center' calculates the distance from the center. " + - "2) 'random' populates an array with samples from a uniform distribution over [0, 1). " + - "The other method, which is to implement a function using the numexpr functionalities." ) + "2) 'random' populates an array with samples from a uniform distribution over [0, 1). An example " + + f" would be '--{__CREATE_FIELDS} new_distances:distances_mesh_center'." + + "The other method is to implement a function using the 'numexpr' library functionalities. For " + + "example, if in your source vtk data you have a cell array called 'PERMEABILITY' and you want to " + + f"create a new field that is the log of this field, you can use: '--{__CREATE_FIELDS} log_perm:log(PERMEABILITY)'.") p.add_argument( '--' + __WHICH_VTM, type=str, required=False, @@ -61,23 +64,23 @@ def convert( parsed_options ) -> Options: if support not in __SUPPORT_CHOICES: raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) - copy_fields: dict[ str, list[ str ] ] = dict() + copy_fields: list[ tuple[ str ] ] = list() splitted_copy_fields: list[ str ] = parsed_options[ __COPY_FIELDS ].split( "," ) for copy_field in splitted_copy_fields: - parts: list[ str ] = copy_field.split( ":" ) - if len( parts ) > 0 and len( parts ) <= 3: - copy_fields[ parts[ 0 ] ] = list() if len( parts ) == 1 else parts[ 1: ] - else: + name_newname_function: tuple[ str ] = tuple( copy_field.split( ":" ) ) + if len( name_newname_function ) == 0 or len( name_newname_function ) > 3: raise ValueError( f"The correct format for '--{__COPY_FIELDS}' is to have either: 'field_name', or " + f"'field_name:new_field_name' or 'field_name:new_field_name:function' " f"but not '{copy_field}'." ) + else: + copy_fields.append( name_newname_function ) - created_fields: dict[ str, str ] = dict() + created_fields: list[ tuple[ str ] ] = list() splitted_created_fields: list[ str ] = parsed_options[ __CREATE_FIELDS ].split( "," ) for created_field in splitted_created_fields: - parts = created_field.split( ":" ) - if len( parts ) == 2: - created_fields[ parts[ 0 ] ] = parts[ 1 ] + newname_function = tuple( created_field.split( ":" ) ) + if len( newname_function ) == 2: + created_fields.append( newname_function ) else: raise ValueError( f"The correct format for '--{__CREATE_FIELDS}' is to have 'new_field_name:function', " + f"but not '{created_field}'." ) From 8c14003dc2dae6f8740259d89963ca8da2cc344d Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Tue, 17 Dec 2024 21:55:05 -0800 Subject: [PATCH 29/34] Allow modification of options copy and create used depending on the arrays available in the source data + better error handling --- .../mesh/doctor/checks/field_operations.py | 84 ++++++++----- .../parsing/field_operations_parsing.py | 119 ++++++++++-------- 2 files changed, 123 insertions(+), 80 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index f017cca..b18dd62 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -20,7 +20,7 @@ class Options: copy_fields: list[ tuple[ str ] ] # [ ( old_name0, new_name0, function0 ), ... ] created_fields: list[ tuple[ str ] ] # [ ( new_name0, function0 ), ... ] vtm_index: int # useful when source is a .pvd or .vtm file - out_vtk: VtkOutput + vtk_output: VtkOutput @dataclass( frozen=True ) @@ -105,7 +105,8 @@ def get_vtu_filepaths( options: Options ) -> tuple[ str ]: vtm_filepath: str = get_vtm_filepath_from_pvd( source_filepath, options.vtm_index ) return get_vtu_filepaths_from_vtm( vtm_filepath ) else: - raise ValueError( f"The filepath '{options.source}' provided targets neither a .vtu, a .vtm nor a .pvd file." ) + raise ValueError( f"The source filepath '{options.source}' provided does not target a .vtu, a .vtm nor a " + + ".pvd file." ) def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid, support: str ) -> array: @@ -160,7 +161,8 @@ def __compatible_meshes( dest_mesh, source_mesh ) -> bool: return True -def get_array_names_to_collect( sub_vtu_filepath: str, options: Options ) -> list[ str ]: +def get_array_names_to_collect_and_options( sub_vtu_filepath: str, + options: Options ) -> tuple[ list[ tuple[ str ] ], Options ]: """We need to have the list of array names that are required to perform copy and creation of new arrays. To build global_arrays to perform operations, we need only these names and not all array names present in the sub meshes. @@ -176,24 +178,40 @@ def get_array_names_to_collect( sub_vtu_filepath: str, options: Options ) -> lis if options.support == __SUPPORT_CHOICES[ 0 ]: # point support_array_names: list[ str ] = list( all_array_names[ "PointData" ].keys() ) else: # cell - support_array_names: list[ str ] = list( all_array_names[ "CellData" ].keys() ) + support_array_names = list( all_array_names[ "CellData" ].keys() ) to_use_arrays: set[ str ] = set() + to_use_copy: list[ tuple[ str ] ] = list() for name_newname_function in options.copy_fields: name: str = name_newname_function[ 0 ] if name in support_array_names: to_use_arrays.add( name ) + to_use_copy.append( name_newname_function ) else: - logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepath}' in the " + - f"{options.support} data. Cannot perform operations on it." ) + logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepath}' " + + f"{options.support} data. Cannot perform copy operation on it." ) + to_use_create: list[ tuple[ str ] ] = list() for newname_function in options.created_fields: funct: str = newname_function[ 1 ] + if funct in create_precoded_fields: + to_use_create.append( newname_function ) + continue + + is_usable: bool = False for support_array_name in support_array_names: if support_array_name in funct: to_use_arrays.add( support_array_name ) + is_usable = True + if is_usable: + to_use_create.append( newname_function ) + else: + logging.warning( f"Cannot perform create operations with '{funct}' because some or all the fields do not " + + f"exist in '{sub_vtu_filepath}'." ) - return list( to_use_arrays ) + updated_options: Options = Options( options.support, options.source, to_use_copy, to_use_create, options.vtm_index, + options.vtk_output ) + return ( list( to_use_arrays ), updated_options ) def merge_local_in_global_array( global_array: array, local_array: array, mapping: array ) -> None: @@ -207,10 +225,12 @@ def merge_local_in_global_array( global_array: array, local_array: array, mappin mapping (np.array): Array of global indexes of size M. """ size_global, size_local = global_array.shape, local_array.shape - assert size_global[ 0 ] >= size_local[ 0 ], "The global array to fill is smaller than the local array to merge." + if size_global[ 0 ] < size_local[ 0 ]: + raise ValueError( "The global array to fill is smaller than the local array to merge." ) number_columns_global: int = size_global[ 1 ] if len( size_global ) == 2 else 1 number_columns_local: int = size_local[ 1 ] if len( size_local ) == 2 else 1 - assert number_columns_global == number_columns_local, "The arrays do not have same number of columns." + if number_columns_global != number_columns_local: + raise ValueError( "The arrays do not have same number of columns." ) # when converting a numpy array to vtk array, you need to make sure to have a 2D array if len( size_local ) == 1: local_array = local_array.reshape( -1, 1 ) @@ -232,8 +252,7 @@ def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array arrays_to_implement: dict[ str, array ] = dict() # proceed copy operations for name_newname_function in tqdm( options.copy_fields, desc="Copying fields" ): - name: str = name_newname_function[ 0 ] - new_name: str = name_newname_function[ 0 ] + name, new_name = name_newname_function[ 0 ], name_newname_function[ 0 ] if len( name_newname_function ) > 1: new_name = name_newname_function[ 1 ] if len( name_newname_function ) == 3: @@ -267,37 +286,40 @@ def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) - useful_array_names: list[ str ] = get_array_names_to_collect( sub_vtu_filepaths[ 0 ], options ) + array_names_to_collect, new_options = get_array_names_to_collect_and_options( sub_vtu_filepaths[ 0 ], options ) + if len( array_names_to_collect ) == 0: + raise ValueError( "No array corresponding to the operations suggested for either copy or creation was found " + + f"in the source {new_options.support} data. Check your support and source file." ) # create the output grid output_mesh: vtkUnstructuredGrid = grid_ref.NewInstance() output_mesh.CopyStructure( grid_ref ) output_mesh.CopyAttributes( grid_ref ) # find the support elements to use and construct their KDTree - support_elements: array = support_construction[ options.support ]( output_mesh ) - size_support: int = support_elements.shape[ 0 ] + support_elements: array = support_construction[ new_options.support ]( output_mesh ) + number_elements: int = support_elements.shape[ 0 ] kd_tree_ref: KDTree = KDTree( support_elements ) # perform operations to construct the global arrays to implement in the output mesh from copy global_arrays: dict[ str, array ] = dict() for vtu_id in tqdm( range( len( sub_vtu_filepaths ) ), desc="Processing VTU files" ): sub_grid: vtkUnstructuredGrid = read_mesh( sub_vtu_filepaths[ vtu_id ] ) - sub_data = sub_grid.GetPointData() if options.support == __SUPPORT_CHOICES[ 0 ] else sub_grid.GetCellData() - arrays_available: list[ str ] = [ sub_data.GetArrayName( i ) for i in range( sub_data.GetNumberOfArrays() ) ] - to_operate_on_indexes: list[ int ] = list() - for name in useful_array_names: - array_index: int = arrays_available.index( name ) - to_operate_on_indexes.append( array_index ) - if not name in global_arrays: - dimension: int = sub_data.GetArray( array_index ).GetNumberOfComponents() - global_arrays[ name ] = full( ( size_support, dimension ), nan ) - - reorder_mapping: array = get_reorder_mapping( kd_tree_ref, sub_grid, options.support ) - for index in to_operate_on_indexes: - name = arrays_available[ index ] - sub_array: array = vtk_to_numpy( sub_data.GetArray( index ) ) - merge_local_in_global_array( global_arrays[ name ], sub_array, reorder_mapping ) + sub_data = sub_grid.GetPointData() if new_options.support == __SUPPORT_CHOICES[ 0 ] else sub_grid.GetCellData() + usable_arrays: list[ tuple[ int, str ] ] = list() + for array_index in range( sub_data.GetNumberOfArrays() ): + array_name: str = sub_data.GetArrayName( array_index ) + if array_name in array_names_to_collect: + usable_arrays.append( ( array_index, array_name ) ) + if not array_name in global_arrays: + number_components: int = sub_data.GetArray( array_index ).GetNumberOfComponents() + global_arrays[ array_name ] = full( ( number_elements, number_components ), nan ) + + if len( usable_arrays ) > 0: + reorder_mapping: array = get_reorder_mapping( kd_tree_ref, sub_grid, new_options.support ) + for index_name in usable_arrays: + sub_array: array = vtk_to_numpy( sub_data.GetArray( index_name[ 0 ] ) ) + merge_local_in_global_array( global_arrays[ index_name[ 1 ] ], sub_array, reorder_mapping ) # The global arrays have been filled, so now we need to implement them in the output_mesh - implement_arrays( output_mesh, global_arrays, options ) - write_mesh( output_mesh, options.out_vtk ) + implement_arrays( output_mesh, global_arrays, new_options ) + write_mesh( output_mesh, new_options.vtk_output ) return Result( info="OK" ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index 27e3752..cd5ad19 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -2,12 +2,12 @@ from geos.mesh.doctor.checks.field_operations import Options, Result, __SUPPORT_CHOICES from geos.mesh.doctor.parsing import vtk_output_parsing, FIELD_OPERATIONS - __SUPPORT = "support" __SOURCE = "source" __COPY_FIELDS = "copy_fields" __CREATE_FIELDS = "create_fields" +__FIELDS_DEFAULT = "" __WHICH_VTM = "which_vtm" __WHICH_VTM_SUGGESTIONS = [ "first", "last" ] @@ -26,36 +26,45 @@ def fill_subparser( subparsers ) -> None: type=str, required=False, help="[string]: Where field data to use for operation comes from .vtu, .vtm or .pvd file." ) - p.add_argument( '--' + __COPY_FIELDS, - type=str, - required=False, - help="[list of string comma separated]: Allows to copy a field from an input mesh to an output mesh. " + - "This copy can also be done while applying a coefficient on the copied field. The syntax to use " + - "is 'old_field_name:new_field_name:function'. Example: The available fields in your input mesh " + - "are 'poro,perm,temp,pressure,'. First, to copy 'poro' without any modification use 'poro'. " + - "Then, to copy 'perm' and change its name to 'permeability' use 'perm:permeability'. " + - "After, to copy 'temp' and change its name to 'temperature' and to increase the values by 3 use 'temp:temperature:+3'. " + - "Finally, to copy 'pressure' without changing its name and to multiply the values by 10 use 'pressure:pressure:*10'. " + - f"The combined syntax is '--{__COPY_FIELDS} poro,perm:permeability,temp:temperature:+3,pressure:pressure:*10'." ) - p.add_argument( '--' + __CREATE_FIELDS, - type=str, - required=False, - help="[list of string comma separated]: Allows to create new fields by using a function that is " + - "either pre-defined or to implement one. The syntax to use is 'new_field_name:function'. " + - "Predefined functions are: 1) 'distances_mesh_center' calculates the distance from the center. " + - "2) 'random' populates an array with samples from a uniform distribution over [0, 1). An example " + - f" would be '--{__CREATE_FIELDS} new_distances:distances_mesh_center'." + - "The other method is to implement a function using the 'numexpr' library functionalities. For " + - "example, if in your source vtk data you have a cell array called 'PERMEABILITY' and you want to " + - f"create a new field that is the log of this field, you can use: '--{__CREATE_FIELDS} log_perm:log(PERMEABILITY)'.") - p.add_argument( '--' + __WHICH_VTM, - type=str, - required=False, - default=__WHICH_VTM_SUGGESTIONS[ 1 ], - help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " - "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " - "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " - "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) + p.add_argument( + '--' + __COPY_FIELDS, + type=str, + required=False, + default=__FIELDS_DEFAULT, + help="[list of string comma separated]: Allows to copy a field from an input mesh to an output mesh. " + + "This copy can also be done while applying a coefficient on the copied field. The syntax to use " + + "is 'old_field_name:new_field_name:function'. Example: The available fields in your input mesh " + + "are 'poro,perm,temp,pressure,'. First, to copy 'poro' without any modification use 'poro'. " + + "Then, to copy 'perm' and change its name to 'permeability' use 'perm:permeability'. " + + "After, to copy 'temp' and change its name to 'temperature' and to increase the values by 3 use 'temp:temperature:+3'. " + + + "Finally, to copy 'pressure' without changing its name and to multiply the values by 10 use 'pressure:pressure:*10'. " + + + f"The combined syntax is '--{__COPY_FIELDS} poro,perm:permeability,temp:temperature:+3,pressure:pressure:*10'." + ) + p.add_argument( + '--' + __CREATE_FIELDS, + type=str, + required=False, + default=__FIELDS_DEFAULT, + help="[list of string comma separated]: Allows to create new fields by using a function that is " + + "either pre-defined or to implement one. The syntax to use is 'new_field_name:function'. " + + "Predefined functions are: 1) 'distances_mesh_center' calculates the distance from the center. " + + "2) 'random' populates an array with samples from a uniform distribution over [0, 1). An example " + + f" would be '--{__CREATE_FIELDS} new_distances:distances_mesh_center'." + + "The other method is to implement a function using the 'numexpr' library functionalities. For " + + "example, if in your source vtk data you have a cell array called 'PERMEABILITY' and you want to " + + f"create a new field that is the log of this field, you can use: '--{__CREATE_FIELDS} log_perm:log(PERMEABILITY)'." + ) + p.add_argument( + '--' + __WHICH_VTM, + type=str, + required=False, + default=__WHICH_VTM_SUGGESTIONS[ 1 ], + help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " + "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " + "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " + "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) vtk_output_parsing.fill_vtk_output_subparser( p ) @@ -65,25 +74,37 @@ def convert( parsed_options ) -> Options: raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) copy_fields: list[ tuple[ str ] ] = list() - splitted_copy_fields: list[ str ] = parsed_options[ __COPY_FIELDS ].split( "," ) - for copy_field in splitted_copy_fields: - name_newname_function: tuple[ str ] = tuple( copy_field.split( ":" ) ) - if len( name_newname_function ) == 0 or len( name_newname_function ) > 3: - raise ValueError( f"The correct format for '--{__COPY_FIELDS}' is to have either: 'field_name', or " + - f"'field_name:new_field_name' or 'field_name:new_field_name:function' " - f"but not '{copy_field}'." ) - else: - copy_fields.append( name_newname_function ) + parsed_copy_fields: str = parsed_options[ __COPY_FIELDS ] + if parsed_copy_fields == __FIELDS_DEFAULT: + logging.info( "No field will be copied because none was provided." ) + else: + splitted_copy_fields: list[ str ] = parsed_copy_fields.split( "," ) + for copy_field in splitted_copy_fields: + name_newname_function: tuple[ str ] = tuple( copy_field.split( ":" ) ) + if len( name_newname_function ) == 0 or len( name_newname_function ) > 3: + raise ValueError( f"The correct format for '--{__COPY_FIELDS}' is to have either: 'field_name', or " + + f"'field_name:new_field_name' or 'field_name:new_field_name:function' " + f"but not '{copy_field}'." ) + else: + copy_fields.append( name_newname_function ) created_fields: list[ tuple[ str ] ] = list() - splitted_created_fields: list[ str ] = parsed_options[ __CREATE_FIELDS ].split( "," ) - for created_field in splitted_created_fields: - newname_function = tuple( created_field.split( ":" ) ) - if len( newname_function ) == 2: - created_fields.append( newname_function ) - else: - raise ValueError( f"The correct format for '--{__CREATE_FIELDS}' is to have 'new_field_name:function', " + - f"but not '{created_field}'." ) + parsed_create_fields: str = parsed_options[ __CREATE_FIELDS ] + if parsed_create_fields == __FIELDS_DEFAULT: + logging.info( "No field will be created because none was provided." ) + else: + splitted_created_fields: list[ str ] = parsed_create_fields.split( "," ) + for created_field in splitted_created_fields: + newname_function = tuple( created_field.split( ":" ) ) + if len( newname_function ) == 2: + created_fields.append( newname_function ) + else: + raise ValueError( + f"The correct format for '--{__CREATE_FIELDS}' is to have 'new_field_name:function', " + + f"but not '{created_field}'." ) + + if len( copy_fields ) == len( created_fields ) == 0: + raise ValueError( f"No copy nor creation of field was found. No operation can be executed with this feature." ) which_vtm: str = parsed_options[ __WHICH_VTM ] if which_vtm in __WHICH_VTM_SUGGESTIONS: @@ -100,7 +121,7 @@ def convert( parsed_options ) -> Options: copy_fields=copy_fields, created_fields=created_fields, vtm_index=vtm_index, - out_vtk=vtk_output_parsing.convert( parsed_options ) ) + vtk_output=vtk_output_parsing.convert( parsed_options ) ) def display_results( options: Options, result: Result ): From 67c7d0bf22b85e6c73ca8f18f89d422b1371e7ec Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Tue, 17 Dec 2024 21:55:48 -0800 Subject: [PATCH 30/34] Update of tests --- geos-mesh/tests/test_field_operations.py | 89 ++++++++++-------------- 1 file changed, 38 insertions(+), 51 deletions(-) diff --git a/geos-mesh/tests/test_field_operations.py b/geos-mesh/tests/test_field_operations.py index ae98454..e14283b 100644 --- a/geos-mesh/tests/test_field_operations.py +++ b/geos-mesh/tests/test_field_operations.py @@ -7,8 +7,6 @@ from geos.mesh.doctor.checks import vtk_utils as vu from tests import test_vtk_utils as tvu from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, VTK_HEXAHEDRON - - """ For tests creation """ @@ -77,7 +75,7 @@ [ 2.0, 0.0, 1.0 ], #point5 [ 2.0, 1.0, 1.0 ], #point6 [ 1.0, 1.0, 1.0 ] ] #point7 -hex1_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( hex1_points_coords, hex_ids, VTK_HEXAHEDRON ) +hex1_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( hex1_points_coords, hex_ids, VTK_HEXAHEDRON ) sub_grids: list[ vtkUnstructuredGrid ] = [ hex0_grid, hex1_grid ] sub_grids_values: list[ dict[ str, array ] ] = [ dict() for _ in range( len( sub_grids ) ) ] @@ -118,28 +116,17 @@ else: sub_grid.GetPointData().AddArray( arr_values ) -copy_fields_points: dict[ str, list[ str ] ] = { - "point_param0": list(), - "point_param1": [ "point_param1" + "_new" ], - "point_param2": [ "point_param2" + "_new", "*3" ] -} -copy_fields_cells: dict[ str, list[ str ] ] = { - "cell_param0": list(), - "cell_param1": [ "cell_param1" + "_new" ], - "cell_param2": [ "cell_param2" + "_new", "+ 10" ] -} - -created_fields_points: dict[ str, str ] = { - "point_param0" + "_created": "log( point_param0 )", - "point_param1" + "_created": "sqrt( point_param1 )", - "point_param2" + "_created": "point_param0 +point_param1 * 2" -} -created_fields_cells: dict[ str, str ] = { - "cell_param0" + "_created": "log( cell_param0 )", - "cell_param1" + "_created": "sqrt( cell_param1 )", - "cell_param2" + "_created": "cell_param0 + cell_param1 * 2" -} +copy_fields_points: list[ tuple[ str ] ] = [ ( "point_param0", ), ( "point_param1", "point_param1" + "_new" ), + ( "point_param2", "point_param2" + "_new", "*3" ) ] +copy_fields_cells: list[ tuple[ str ] ] = [ ( "cell_param0", ), ( "cell_param1", "cell_param1" + "_new" ), + ( "cell_param2", "cell_param2" + "_new", "+ 10" ) ] +created_fields_points: list[ tuple[ str ] ] = [ ( "point_param0" + "_created", "log( point_param0 )" ), + ( "point_param1" + "_created", "sqrt( point_param1 )" ), + ( "point_param2" + "_created", "point_param0 +point_param1 * 2" ) ] +created_fields_cells: dict[ str, str ] = [ ( "cell_param0" + "_created", "log( cell_param0 )" ), + ( "cell_param1" + "_created", "sqrt( cell_param1 )" ), + ( "cell_param2" + "_created", "cell_param0 + cell_param1 * 2" ) ] out_points: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "points.vtu" ), True ) out_cells: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "cells.vtu" ), True ) @@ -151,8 +138,10 @@ def test_precoded_fields( self ): result_points: array = fo.get_distances_mesh_center( eight_hex_grid_empty, "point" ) result_cells: array = fo.get_distances_mesh_center( eight_hex_grid_empty, "cell" ) sq2, sq3, sq3h = sqrt( 2 ), sqrt( 3 ), sqrt( 3 ) / 2 - expected_points: array = array( [ sq3, sq2, sq3, sq2, 1.0, sq2, sq3, sq2, sq3, sq2, 1.0, sq2, 1.0, 0.0, 1.0, - sq2, 1.0, sq2, sq3, sq2, sq3, sq2, 1.0, sq2, sq3, sq2, sq3 ] ) + expected_points: array = array( [ + sq3, sq2, sq3, sq2, 1.0, sq2, sq3, sq2, sq3, sq2, 1.0, sq2, 1.0, 0.0, 1.0, sq2, 1.0, sq2, sq3, sq2, sq3, + sq2, 1.0, sq2, sq3, sq2, sq3 + ] ) expected_cells: array = array( [ sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h ] ) assert array_equal( result_points, expected_points ) assert array_equal( result_cells, expected_cells ) @@ -168,13 +157,13 @@ def test_get_vtu_filepaths( self ): copy_fields=dict(), created_fields=dict(), vtm_index=0, - out_vtk=out_points ) + vtk_output=out_points ) options_pvd1: fo.Options = fo.Options( support="point", source=pvd_filepath, copy_fields=dict(), created_fields=dict(), vtm_index=-1, - out_vtk=out_points ) + vtk_output=out_points ) result0: tuple[ str ] = fo.get_vtu_filepaths( options_pvd0 ) result1: tuple[ str ] = fo.get_vtu_filepaths( options_pvd1 ) try: @@ -200,30 +189,32 @@ def test_get_reorder_mapping( self ): assert result_cells1.tolist() == [ 0 ] assert result_cells2.tolist() == [ 1 ] - def test_get_array_names_to_collect( self ): + def test_get_array_names_to_collect_and_options( self ): vu.write_mesh( eight_hex_grid, eight_hex_grid_output ) options1: fo.Options = fo.Options( "cell", eight_hex_grid_output.output, copy_fields_cells, created_fields_cells, -1, out_cells ) options2: fo.Options = fo.Options( "point", eight_hex_grid_output.output, copy_fields_points, created_fields_points, -1, out_points ) - result1: list[ str ] = fo.get_array_names_to_collect( eight_hex_grid_output.output, options1 ) - result2: list[ str ] = fo.get_array_names_to_collect( eight_hex_grid_output.output, options2 ) + result1, options1_new = fo.get_array_names_to_collect_and_options( eight_hex_grid_output.output, options1 ) + result2, options2_new = fo.get_array_names_to_collect_and_options( eight_hex_grid_output.output, options2 ) os.remove( eight_hex_grid_output.output ) - assert result1.sort() == list( copy_fields_cells.keys() ).sort() - assert result2.sort() == list( copy_fields_points.keys() ).sort() + assert result1.sort() == [ fc[ 0 ] for fc in copy_fields_cells ].sort() + assert result2.sort() == [ fp[ 0 ] for fp in copy_fields_points ].sort() + assert options1_new.copy_fields.sort() == copy_fields_cells.sort() + assert options2_new.copy_fields.sort() == copy_fields_points.sort() def test_merge_local_in_global_array( self ): # create arrays filled with nan values - glob_arr_points_1D: array = full( ( 8, 1), nan ) - glob_arr_cells_1D: array = full( ( 8, 1), nan ) + glob_arr_points_1D: array = full( ( 8, 1 ), nan ) + glob_arr_cells_1D: array = full( ( 8, 1 ), nan ) glob_arr_points_3D: array = full( ( 8, 3 ), nan ) glob_arr_cells_3D: array = full( ( 8, 3 ), nan ) loc_arr_points_1D: array = array( list( range( 0, 4 ) ) ) loc_arr_cells_1D: array = array( list( range( 4, 8 ) ) ) - loc_arr_points_3D: array = array( ( list( range( 0, 3 ) ), list( range( 6, 9 ) ), list( range( 12, 15 ) ), - list( range( 18, 21 ) ) ) ) - loc_arr_cells_3D: array = array( ( list( range( 3, 6 ) ), list( range( 9, 12 ) ), list( range( 15, 18 ) ), - list( range( 21, 24 ) ) ) ) + loc_arr_points_3D: array = array( + ( list( range( 0, 3 ) ), list( range( 6, 9 ) ), list( range( 12, 15 ) ), list( range( 18, 21 ) ) ) ) + loc_arr_cells_3D: array = array( + ( list( range( 3, 6 ) ), list( range( 9, 12 ) ), list( range( 15, 18 ) ), list( range( 21, 24 ) ) ) ) mapping_points: array = array( [ 0, 2, 4, 6 ] ) mapping_cells: array = array( [ 7, 5, 3, 1 ] ) fo.merge_local_in_global_array( glob_arr_points_1D, loc_arr_points_1D, mapping_points ) @@ -247,18 +238,14 @@ def test_implement_arrays( self ): empty_mesh.DeepCopy( eight_hex_grid_empty ) npoints: int = empty_mesh.GetNumberOfPoints() ncells: int = empty_mesh.GetNumberOfCells() - copy_fpoints = { "point_param0": [], - "point_param1": [ "point_copy1" ], - "point_param2": [ "point_param2", "*10 + 0.1" ] } - copy_fcells = { "cell_param0": [], - "cell_param1": [ "cell_copy1" ], - "cell_param2": [ "cell_param2", "/0.1 - 0.5" ] } - create_fpoints = { "new0": "log(point_param0)", - "new1": "sqrt(point_param1)", - "new2": "distances_mesh_center" } - create_fcells = { "new3": "sqrt(cell_param0)", - "new4": "log10(cell_param1)", - "new5": "cell_param0 + cell_param1" } + copy_fpoints = [ ( "point_param0", ), ( "point_param1", "point_copy1" ), + ( "point_param2", "point_param2", "*10 + 0.1" ) ] + copy_fcells = [ ( "cell_param0", ), ( "cell_param1", "cell_copy1" ), + ( "cell_param2", "cell_param2", "/0.1 - 0.5" ) ] + create_fpoints = [ ( "new0", "log(point_param0)" ), ( "new1", "sqrt(point_param1)" ), + ( "new2", "distances_mesh_center" ) ] + create_fcells = [ ( "new3", "sqrt(cell_param0)" ), ( "new4", "log10(cell_param1)" ), + ( "new5", "cell_param0 + cell_param1" ) ] options_point = fo.Options( "point", "empty.vtu", copy_fpoints, create_fpoints, -1, output ) options_cell = fo.Options( "cell", "empty.vtu", copy_fcells, create_fcells, -1, output ) fo.implement_arrays( empty_mesh, eight_hex_grid_values, options_point ) From 479a1b2ef75ab790e046d2a4b1f53cbf5908fda5 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 18 Dec 2024 09:06:37 -0800 Subject: [PATCH 31/34] Documentation of mesh_stats and field_operations with update from develop --- docs/geos-mesh.rst | 280 ++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 253 insertions(+), 27 deletions(-) diff --git a/docs/geos-mesh.rst b/docs/geos-mesh.rst index 8582f10..7ae1b29 100644 --- a/docs/geos-mesh.rst +++ b/docs/geos-mesh.rst @@ -15,14 +15,59 @@ Modules To list all the modules available through ``mesh-doctor``, you can simply use the ``--help`` option, which will list all available modules as well as a quick summary. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py --help + usage: mesh_doctor.py [-h] [-v] [-q] -i VTK_MESH_FILE + {collocated_nodes,element_volumes,fix_elements_orderings,generate_cube,generate_fractures,generate_global_ids,non_conformal,self_intersecting_elements,supported_elements} + ... + + Inspects meshes for GEOSX. + + positional arguments: + {collocated_nodes,element_volumes,fix_elements_orderings,generate_cube,generate_fractures,generate_global_ids,non_conformal,self_intersecting_elements,supported_elements} + Modules + collocated_nodes + Checks if nodes are collocated. + element_volumes + Checks if the volumes of the elements are greater than "min". + fix_elements_orderings + Reorders the support nodes for the given cell types. + generate_cube + Generate a cube and its fields. + generate_fractures + Splits the mesh to generate the faults and fractures. [EXPERIMENTAL] + generate_global_ids + Adds globals ids for points and cells. + non_conformal + Detects non conformal elements. [EXPERIMENTAL] + self_intersecting_elements + Checks if the faces of the elements are self intersecting. + supported_elements + Check that all the elements of the mesh are supported by GEOSX. + + options: + -h, --help + show this help message and exit + -v Use -v 'INFO', -vv for 'DEBUG'. Defaults to 'WARNING'. + -q Use -q to reduce the verbosity of the output. + -i VTK_MESH_FILE, --vtk-input-file VTK_MESH_FILE + + Note that checks are dynamically loaded. + An option may be missing because of an unloaded module. + Increase verbosity (-v, -vv) to get full information. Then, if you are interested in a specific module, you can ask for its documentation using the ``mesh-doctor module_name --help`` pattern. For example -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help + usage: mesh_doctor.py collocated_nodes [-h] --tolerance TOLERANCE + + options: + -h, --help show this help message and exit + --tolerance TOLERANCE [float]: The absolute distance between two nodes for them to be considered collocated. ``mesh-doctor`` loads its module dynamically. If a module can't be loaded, ``mesh-doctor`` will proceed and try to load other modules. @@ -44,8 +89,14 @@ Here is a list and brief description of all the modules available. Displays the neighboring nodes that are closer to each other than a prescribed threshold. It is not uncommon to define multiple nodes for the exact same position, which will typically be an issue for ``geos`` and should be fixed. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help + usage: mesh_doctor.py collocated_nodes [-h] --tolerance TOLERANCE + + options: + -h, --help show this help message and exit + --tolerance TOLERANCE [float]: The absolute distance between two nodes for them to be considered collocated. ``element_volumes`` """"""""""""""""""" @@ -53,8 +104,63 @@ It is not uncommon to define multiple nodes for the exact same position, which w Computes the volumes of all the cells and displays the ones that are below a prescribed threshold. Cells with negative volumes will typically be an issue for ``geos`` and should be fixed. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py element_volumes --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py element_volumes --help + usage: mesh_doctor.py element_volumes [-h] --min 0.0 + + options: + -h, --help show this help message and exit + --min 0.0 [float]: The minimum acceptable volume. Defaults to 0.0. + +``field_operations`` +"""""""""""""""""""""""""" + +Using a source file containing PointData or CellData, allows to perform operations on that data from the source file to an output .vtu file. +The source file can be a .vtu, .vtm or .pvd file as long as the geometry of the multiblock corresponds to the geometry of the output .vtu file. +An example of source file can be the vtkOutput.pvd from a GEOS simulation and the output file can be your VTK mesh used in this simulation. +The term 'operations' covers two distinct categories: +'COPY' operations which copies data arrays from the source file to the output file, with the possibility to rename the arrays copied and to apply multiplication or addition to these arrays. +'CREATE' operations which uses the source file data to create new arrays by performing addition between several arrays, applying log or sqrt functions ... allowed by the numexpr library. + +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py field_operations --help + usage: mesh_doctor.py field_operations [-h] [--support point, cell] [--source SOURCE] [--copy_fields COPY_FIELDS] + [--create_fields CREATE_FIELDS] [--which_vtm WHICH_VTM] --output OUTPUT + [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --support point, cell + [string]: Where to define field. + --source SOURCE [string]: Where field data to use for operation comes from .vtu, .vtm or .pvd file. + --copy_fields COPY_FIELDS + [list of string comma separated]: Allows to copy a field from an input mesh to an output mesh. + This copy can also be done while applying a coefficient on the copied field. + The syntax to use is 'old_field_name:new_field_name:function'. + Example: The available fields in your input mesh are 'poro,perm,temp,pressure'. + First, to copy 'poro' without any modification use 'poro'. + Then, to copy 'perm' and change its name to 'permeability' use 'perm:permeability'. + After, to copy 'temp' and change its name to 'temperature' and to increase the values by 3 use 'temp:temperature:+3'. + Finally, to copy 'pressure' without changing its name and to multiply the values by 10 use 'pressure:pressure:*10'. + The combined syntax is '--copy_fields poro,perm:permeability,temp:temperature:+3,pressure:pressure:*10'. + --create_fields CREATE_FIELDS + [list of string comma separated]: Allows to create new fields by using a function that is either pre-defined or to implement one. + The syntax to use is 'new_field_name:function'. + Predefined functions are: 'distances_mesh_center' calculates the distance from the center. + random' populates an array with samples from a uniform distribution over (0, 1). + An example would be '--create_fields new_distances:distances_mesh_center'. + The other method is to implement a function using the 'numexpr' library functionalities. + For example, if in your source vtk data you have a cell array called 'PERMEABILITY' and you want to create a new field that is the log of this field, you can use: '--create_fields log_perm:log(PERMEABILITY)'. + --which_vtm WHICH_VTM + [string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique timestep) will be used for the operation. + To do so, you can choose amongst these possibilities: 'first' will select the initial timestep; + 'last' will select the final timestep; or you can enter directly the index starting from 0 of the timestep (not the time). + By default, the value is set to 'last'. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``fix_elements_orderings`` """""""""""""""""""""""""" @@ -63,8 +169,29 @@ It sometimes happens that an exported mesh does not abide by the ``vtk`` orderin The ``fix_elements_orderings`` module can rearrange the nodes of given types of elements. This can be convenient if you cannot regenerate the mesh. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py fix_elements_orderings --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py fix_elements_orderings --help + usage: mesh_doctor.py fix_elements_orderings [-h] [--Hexahedron 1,6,5,4,7,0,2,3] [--Prism5 8,2,0,7,6,9,5,1,4,3] + [--Prism6 11,2,8,10,5,0,9,7,6,1,4,3] [--Pyramid 3,4,0,2,1] + [--Tetrahedron 2,0,3,1] [--Voxel 1,6,5,4,7,0,2,3] + [--Wedge 3,5,4,0,2,1] --output OUTPUT [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --Hexahedron 1,6,5,4,7,0,2,3 + [list of integers]: node permutation for "Hexahedron". + --Prism5 8,2,0,7,6,9,5,1,4,3 + [list of integers]: node permutation for "Prism5". + --Prism6 11,2,8,10,5,0,9,7,6,1,4,3 + [list of integers]: node permutation for "Prism6". + --Pyramid 3,4,0,2,1 [list of integers]: node permutation for "Pyramid". + --Tetrahedron 2,0,3,1 [list of integers]: node permutation for "Tetrahedron". + --Voxel 1,6,5,4,7,0,2,3 [list of integers]: node permutation for "Voxel". + --Wedge 3,5,4,0,2,1 [list of integers]: node permutation for "Wedge". + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``generate_cube`` """"""""""""""""" @@ -73,8 +200,30 @@ This module conveniently generates cubic meshes in ``vtk``. It can also generate fields with simple values. This tool can also be useful to generate a trial mesh that will later be refined or customized. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py generate_cube --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py generate_cube --help + usage: mesh_doctor.py generate_cube [-h] [--x 0:1.5:3] [--y 0:5:10] [--z 0:1] [--nx 2:2] [--ny 1:1] [--nz 4] + [--fields name:support:dim [name:support:dim ...]] [--cells] [--no-cells] + [--points] [--no-points] --output OUTPUT [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --x 0:1.5:3 [list of floats]: X coordinates of the points. + --y 0:5:10 [list of floats]: Y coordinates of the points. + --z 0:1 [list of floats]: Z coordinates of the points. + --nx 2:2 [list of integers]: Number of elements in the X direction. + --ny 1:1 [list of integers]: Number of elements in the Y direction. + --nz 4 [list of integers]: Number of elements in the Z direction. + --fields name:support:dim + [name:support:dim ...]: Create fields on CELLS or POINTS, with given dimension (typically 1 or 3). + --cells [bool]: Generate global ids for cells. Defaults to true. + --no-cells [bool]: Don't generate global ids for cells. + --points [bool]: Generate global ids for points. Defaults to true. + --no-points [bool]: Don't generate global ids for points. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``generate_fractures`` """""""""""""""""""""" @@ -82,8 +231,30 @@ This tool can also be useful to generate a trial mesh that will later be refined For a conformal fracture to be defined in a mesh, ``geos`` requires the mesh to be split at the faces where the fracture gets across the mesh. The ``generate_fractures`` module will split the mesh and generate the multi-block ``vtk`` files. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py generate_fractures --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py generate_fractures --help + usage: mesh_doctor.py generate_fractures [-h] --policy field, internal_surfaces [--name NAME] [--values VALUES] --output OUTPUT + [--data-mode binary, ascii] [--fractures_output_dir FRACTURES_OUTPUT_DIR] + + options: + -h, --help show this help message and exit + --policy field, internal_surfaces + [string]: The criterion to define the surfaces that will be changed into fracture zones. Possible values are "field, internal_surfaces" + --name NAME [string]: If the "field" policy is selected, defines which field will be considered to define the fractures. + If the "internal_surfaces" policy is selected, defines the name of the attribute will be considered to identify the fractures. + --values VALUES [list of comma separated integers]: If the "field" policy is selected, which changes of the field will be considered as a fracture. + If the "internal_surfaces" policy is selected, list of the fracture attributes. + You can create multiple fractures by separating the values with ':' like shown in this example. + --values 10,12:13,14,16,18:22 will create 3 fractures identified respectively with the values (10,12), (13,14,16,18) and (22). + If no ':' is found, all values specified will be assumed to create only 1 single fracture. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. + --fractures_output_dir FRACTURES_OUTPUT_DIR + [string]: The output directory for the fractures meshes that will be generated from the mesh. + --fractures_data_mode FRACTURES_DATA_MODE + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``generate_global_ids`` """"""""""""""""""""""" @@ -91,8 +262,42 @@ The ``generate_fractures`` module will split the mesh and generate the multi-blo When running ``geos`` in parallel, `global ids` can be used to refer to data across multiple ranks. The ``generate_global_ids`` can generate `global ids` for the imported ``vtk`` mesh. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py generate_global_ids --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py generate_global_ids --help + usage: mesh_doctor.py generate_global_ids [-h] [--cells] [--no-cells] [--points] [--no-points] --output OUTPUT + [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --cells [bool]: Generate global ids for cells. Defaults to true. + --no-cells [bool]: Don't generate global ids for cells. + --points [bool]: Generate global ids for points. Defaults to true. + --no-points [bool]: Don't generate global ids for points. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. + +``mesh_stats`` +""""""""""""""""" + +Performs a summary over certain geometrical, topological and data painting mesh properties. +The future goal for this feature would be to provide a deeper mesh analysis and to evaluate the 'quality' of this mesh before using it in GEOS. + +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py mesh_stats --help + usage: mesh_doctor.py mesh_stats [-h] --write_stats [0, 1] [--disconnected [0, 1]] + [--field_values [0, 1]] [--output OUTPUT] + + options: + -h, --help show this help message and exit + --write_stats [0, 1] [int]: The stats of the mesh will be printed in a file to the folder specified in --output. + --disconnected [0, 1] + [int]: Display all disconnected nodes ids and disconnected cell ids. + --field_values [0, 1] + [int]: Display all range of field values that seem not realistic. + --output OUTPUT [string]: The output folder destination where the stats will be written. ``non_conformal`` """"""""""""""""" @@ -102,8 +307,19 @@ This module will detect elements which are close enough (there's a user defined The angle between two faces can also be precribed. This module can be a bit time consuming. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py non_conformal --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py non_conformal --help + usage: mesh_doctor.py non_conformal [-h] [--angle_tolerance 10.0] [--point_tolerance POINT_TOLERANCE] + [--face_tolerance FACE_TOLERANCE] + + options: + -h, --help show this help message and exit + --angle_tolerance 10.0 [float]: angle tolerance in degrees. Defaults to 10.0 + --point_tolerance POINT_TOLERANCE + [float]: tolerance for two points to be considered collocated. + --face_tolerance FACE_TOLERANCE + [float]: tolerance for two faces to be considered "touching". ``self_intersecting_elements`` """""""""""""""""""""""""""""" @@ -111,8 +327,15 @@ This module can be a bit time consuming. Some meshes can have cells that auto-intersect. This module will display the elements that have faces intersecting. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py self_intersecting_elements --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py self_intersecting_elements --help + usage: mesh_doctor.py self_intersecting_elements [-h] [--min 2.220446049250313e-16] + + options: + -h, --help show this help message and exit + --min 2.220446049250313e-16 + [float]: The tolerance in the computation. Defaults to your machine precision 2.220446049250313e-16. ``supported_elements`` """""""""""""""""""""" @@ -125,8 +348,15 @@ But also prismes up to 11 faces. The ``supported_elements`` check will validate that no unsupported element is included in the input mesh. It will also verify that the ``VTK_POLYHEDRON`` cells can effectively get converted into a supported type of element. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py supported_elements --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py supported_elements --help + usage: mesh_doctor.py supported_elements [-h] [--chunck_size 1] [--nproc 8] + + options: + -h, --help show this help message and exit + --chunck_size 1 [int]: Defaults chunk size for parallel processing to 1 + --nproc 8 [int]: Number of threads used for parallel processing. Defaults to your CPU count 8. @@ -179,8 +409,4 @@ API ^^^ .. automodule:: geos.mesh.conversion.abaqus_converter - :members: - - - - + :members: \ No newline at end of file From 4b200a7fe7e283db715dc0a2131148888ebd9ebe Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 18 Dec 2024 14:11:02 -0800 Subject: [PATCH 32/34] Improved implementation of operations --- docs/geos-mesh.rst | 34 +++--- .../mesh/doctor/checks/field_operations.py | 84 +++---------- .../parsing/field_operations_parsing.py | 110 ++++++------------ geos-mesh/tests/test_field_operations.py | 59 +++++----- 4 files changed, 90 insertions(+), 197 deletions(-) diff --git a/docs/geos-mesh.rst b/docs/geos-mesh.rst index 7ae1b29..adefeeb 100644 --- a/docs/geos-mesh.rst +++ b/docs/geos-mesh.rst @@ -126,33 +126,25 @@ The term 'operations' covers two distinct categories: .. code-block:: $ python src/geos/mesh/doctor/mesh_doctor.py field_operations --help - usage: mesh_doctor.py field_operations [-h] [--support point, cell] [--source SOURCE] [--copy_fields COPY_FIELDS] - [--create_fields CREATE_FIELDS] [--which_vtm WHICH_VTM] --output OUTPUT - [--data-mode binary, ascii] + usage: mesh_doctor.py field_operations [-h] [--support point, cell] [--source SOURCE] [--operations OPERATIONS] + [--which_vtm WHICH_VTM] --output OUTPUT [--data-mode binary, ascii] options: -h, --help show this help message and exit --support point, cell [string]: Where to define field. --source SOURCE [string]: Where field data to use for operation comes from .vtu, .vtm or .pvd file. - --copy_fields COPY_FIELDS - [list of string comma separated]: Allows to copy a field from an input mesh to an output mesh. - This copy can also be done while applying a coefficient on the copied field. - The syntax to use is 'old_field_name:new_field_name:function'. - Example: The available fields in your input mesh are 'poro,perm,temp,pressure'. - First, to copy 'poro' without any modification use 'poro'. - Then, to copy 'perm' and change its name to 'permeability' use 'perm:permeability'. - After, to copy 'temp' and change its name to 'temperature' and to increase the values by 3 use 'temp:temperature:+3'. - Finally, to copy 'pressure' without changing its name and to multiply the values by 10 use 'pressure:pressure:*10'. - The combined syntax is '--copy_fields poro,perm:permeability,temp:temperature:+3,pressure:pressure:*10'. - --create_fields CREATE_FIELDS - [list of string comma separated]: Allows to create new fields by using a function that is either pre-defined or to implement one. - The syntax to use is 'new_field_name:function'. - Predefined functions are: 'distances_mesh_center' calculates the distance from the center. - random' populates an array with samples from a uniform distribution over (0, 1). - An example would be '--create_fields new_distances:distances_mesh_center'. - The other method is to implement a function using the 'numexpr' library functionalities. - For example, if in your source vtk data you have a cell array called 'PERMEABILITY' and you want to create a new field that is the log of this field, you can use: '--create_fields log_perm:log(PERMEABILITY)'. + --operations OPERATIONS + [list of string comma separated]: The syntax here is function0:new_name0, function1:new_name1, ... + Allows to perform a wide arrays of operations to add new data to your output mesh using the source file data. + Examples are the following: + 1. Copy of the field 'poro' from the input to the ouput with 'poro:poro'. + 2. Copy of the field 'PERM' from the input to the ouput with a multiplication of the values by 10 with 'PERM*10:PERM'. + 3. Copy of the field 'TEMP' from the input to the ouput with an addition to the values by 0.5 and change the name of the field to 'temperature' with 'TEMP+0.5:TEMPERATURE'. + 4. Create a new field 'NEW_PARAM' using the input 'PERM' field and having the square root of it with 'sqrt(PERM):NEW_PARAM'. + Another method is to use precoded functions available which are: + 1. 'distances_mesh_center' will create a field where the distances from the mesh centerare calculated for all the elements chosen as support. To use: 'distances_mesh_center:NEW_FIELD_NAME'. + 2. 'random' will create a field with samples from a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'. --which_vtm WHICH_VTM [string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique timestep) will be used for the operation. To do so, you can choose amongst these possibilities: 'first' will select the initial timestep; diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index b18dd62..7c62e8d 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -4,8 +4,7 @@ from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, get_vtm_filepath_from_pvd, get_vtu_filepaths_from_vtm, get_all_array_names, read_mesh, write_mesh ) -from math import sqrt -from numpy import array, empty, full, int64, nan +from numpy import array, empty, full, sqrt, int64, nan from numpy.random import rand from scipy.spatial import KDTree from tqdm import tqdm @@ -17,8 +16,7 @@ class Options: support: str # choice between 'cell' and 'point' to operate on fields source: str # file from where the data is collected - copy_fields: list[ tuple[ str ] ] # [ ( old_name0, new_name0, function0 ), ... ] - created_fields: list[ tuple[ str ] ] # [ ( new_name0, function0 ), ... ] + operations: list[ tuple[ str ] ] # [ ( function0, new_name0 ), ... ] vtm_index: int # useful when source is a .pvd or .vtm file vtk_output: VtkOutput @@ -59,7 +57,8 @@ def get_distances_mesh_center( mesh: vtkUnstructuredGrid, support: str ) -> arra coord = coords[ i ] for j in range( len( coord ) ): distance_squared += ( coord[ j ] - center[ j ] ) * ( coord[ j ] - center[ j ] ) - distances[ i ] = sqrt( distance_squared ) + distances[ i ] = distance_squared + distances = sqrt( distances ) return distances @@ -132,35 +131,6 @@ def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid return mapping -def __compatible_meshes( dest_mesh, source_mesh ) -> bool: - # for now, just check that meshes have same number of elements and same number of nodes - # and require that each cell has same nodes, each node has same coordinate - dest_ne = dest_mesh.GetNumberOfCells() - dest_nn = dest_mesh.GetNumberOfPoints() - source_ne = source_mesh.GetNumberOfCells() - source_nn = source_mesh.GetNumberOfPoints() - - if dest_ne != source_ne: - logging.error( 'meshes have different number of cells' ) - return False - if dest_nn != source_nn: - logging.error( 'meshes have different number of nodes' ) - return False - - for i in range( dest_nn ): - if not ( ( dest_mesh.GetPoint( i ) ) == ( source_mesh.GetPoint( i ) ) ): - logging.error( 'at least one node is in a different location' ) - return False - - for i in range( dest_ne ): - if not ( vtk_to_numpy( dest_mesh.GetCell( i ).GetPoints().GetData() ) == vtk_to_numpy( - source_mesh.GetCell( i ).GetPoints().GetData() ) ).all(): - logging.error( 'at least one cell has different nodes' ) - return False - - return True - - def get_array_names_to_collect_and_options( sub_vtu_filepath: str, options: Options ) -> tuple[ list[ tuple[ str ] ], Options ]: """We need to have the list of array names that are required to perform copy and creation of new arrays. To build @@ -181,21 +151,11 @@ def get_array_names_to_collect_and_options( sub_vtu_filepath: str, support_array_names = list( all_array_names[ "CellData" ].keys() ) to_use_arrays: set[ str ] = set() - to_use_copy: list[ tuple[ str ] ] = list() - for name_newname_function in options.copy_fields: - name: str = name_newname_function[ 0 ] - if name in support_array_names: - to_use_arrays.add( name ) - to_use_copy.append( name_newname_function ) - else: - logging.warning( f"The field named '{name}' does not exist in '{sub_vtu_filepath}' " + - f"{options.support} data. Cannot perform copy operation on it." ) - - to_use_create: list[ tuple[ str ] ] = list() - for newname_function in options.created_fields: - funct: str = newname_function[ 1 ] + to_use_operate: list[ tuple[ str ] ] = list() + for function_newname in options.operations: + funct: str = function_newname[ 0 ] if funct in create_precoded_fields: - to_use_create.append( newname_function ) + to_use_operate.append( function_newname ) continue is_usable: bool = False @@ -204,12 +164,12 @@ def get_array_names_to_collect_and_options( sub_vtu_filepath: str, to_use_arrays.add( support_array_name ) is_usable = True if is_usable: - to_use_create.append( newname_function ) + to_use_operate.append( function_newname ) else: - logging.warning( f"Cannot perform create operations with '{funct}' because some or all the fields do not " + + logging.warning( f"Cannot perform operations with '{funct}' because some or all the fields do not " + f"exist in '{sub_vtu_filepath}'." ) - updated_options: Options = Options( options.support, options.source, to_use_copy, to_use_create, options.vtm_index, + updated_options: Options = Options( options.support, options.source, to_use_operate, options.vtm_index, options.vtk_output ) return ( list( to_use_arrays ), updated_options ) @@ -250,21 +210,9 @@ def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array mesh.GetNumberOfCells() arrays_to_implement: dict[ str, array ] = dict() - # proceed copy operations - for name_newname_function in tqdm( options.copy_fields, desc="Copying fields" ): - name, new_name = name_newname_function[ 0 ], name_newname_function[ 0 ] - if len( name_newname_function ) > 1: - new_name = name_newname_function[ 1 ] - if len( name_newname_function ) == 3: - funct: str = name_newname_function[ 2 ] - copy_arr: array = evaluate( name + funct, local_dict=global_arrays ) - else: - copy_arr = global_arrays[ name ] - arrays_to_implement[ new_name ] = copy_arr - - # proceed create operations - for newname_function in tqdm( options.created_fields, desc="Creating fields" ): - new_name, funct = newname_function + # proceed operations + for function_newname in tqdm( options.operations, desc="Performing operations" ): + funct, new_name = function_newname if funct in create_precoded_fields: created_arr: array = create_precoded_fields[ funct ]( mesh, options.support ) else: @@ -288,8 +236,8 @@ def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) array_names_to_collect, new_options = get_array_names_to_collect_and_options( sub_vtu_filepaths[ 0 ], options ) if len( array_names_to_collect ) == 0: - raise ValueError( "No array corresponding to the operations suggested for either copy or creation was found " + - f"in the source {new_options.support} data. Check your support and source file." ) + raise ValueError( "No array corresponding to the operations suggested was found in the source" + + f" {new_options.support} data. Check your support and source file." ) # create the output grid output_mesh: vtkUnstructuredGrid = grid_ref.NewInstance() output_mesh.CopyStructure( grid_ref ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index cd5ad19..dfc408e 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -5,9 +5,8 @@ __SUPPORT = "support" __SOURCE = "source" -__COPY_FIELDS = "copy_fields" -__CREATE_FIELDS = "create_fields" -__FIELDS_DEFAULT = "" +__OPERATIONS = "operations" +__OPERATIONS_DEFAULT = "" __WHICH_VTM = "which_vtm" __WHICH_VTM_SUGGESTIONS = [ "first", "last" ] @@ -24,47 +23,29 @@ def fill_subparser( subparsers ) -> None: help="[string]: Where to define field." ) p.add_argument( '--' + __SOURCE, type=str, - required=False, + required=True, help="[string]: Where field data to use for operation comes from .vtu, .vtm or .pvd file." ) - p.add_argument( - '--' + __COPY_FIELDS, - type=str, - required=False, - default=__FIELDS_DEFAULT, - help="[list of string comma separated]: Allows to copy a field from an input mesh to an output mesh. " + - "This copy can also be done while applying a coefficient on the copied field. The syntax to use " + - "is 'old_field_name:new_field_name:function'. Example: The available fields in your input mesh " + - "are 'poro,perm,temp,pressure,'. First, to copy 'poro' without any modification use 'poro'. " + - "Then, to copy 'perm' and change its name to 'permeability' use 'perm:permeability'. " + - "After, to copy 'temp' and change its name to 'temperature' and to increase the values by 3 use 'temp:temperature:+3'. " - + - "Finally, to copy 'pressure' without changing its name and to multiply the values by 10 use 'pressure:pressure:*10'. " - + - f"The combined syntax is '--{__COPY_FIELDS} poro,perm:permeability,temp:temperature:+3,pressure:pressure:*10'." - ) - p.add_argument( - '--' + __CREATE_FIELDS, - type=str, - required=False, - default=__FIELDS_DEFAULT, - help="[list of string comma separated]: Allows to create new fields by using a function that is " + - "either pre-defined or to implement one. The syntax to use is 'new_field_name:function'. " + - "Predefined functions are: 1) 'distances_mesh_center' calculates the distance from the center. " + - "2) 'random' populates an array with samples from a uniform distribution over [0, 1). An example " + - f" would be '--{__CREATE_FIELDS} new_distances:distances_mesh_center'." + - "The other method is to implement a function using the 'numexpr' library functionalities. For " + - "example, if in your source vtk data you have a cell array called 'PERMEABILITY' and you want to " + - f"create a new field that is the log of this field, you can use: '--{__CREATE_FIELDS} log_perm:log(PERMEABILITY)'." - ) - p.add_argument( - '--' + __WHICH_VTM, - type=str, - required=False, - default=__WHICH_VTM_SUGGESTIONS[ 1 ], - help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " - "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " - "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " - "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) + p.add_argument( '--' + __OPERATIONS, + type=str, + required=True, + default=__OPERATIONS_DEFAULT, + help="[list of string comma separated]: The syntax here is function0:new_name0, function1:new_name1, ... " + + "Allows to perform a wide arrays of operations to add new data to your output mesh using the source file data. " + + "Examples are the following: 1. Copy of the field 'poro' from the input to the ouput with 'poro:poro'. " + + "2. Copy of the field 'PERM' from the input to the ouput with a multiplication of the values by 10 with 'PERM*10:PERM'. " + + "3. Copy of the field 'TEMP' from the input to the ouput with an addition to the values by 0.5 and change the name of the field to 'temperature' with 'TEMP+0.5:TEMPERATURE'. " + + "4. Create a new field 'NEW_PARAM' using the input 'PERM' field and having the square root of it with 'sqrt(PERM):NEW_PARAM'. " + + "Another method is to use precoded functions available which are: " + + "1. 'distances_mesh_center' will create a field where the distances from the mesh centerare calculated for all the elements chosen as support. To use: 'distances_mesh_center:NEW_FIELD_NAME'. " + + "2. 'random' will create a field with samples from a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'." ) + p.add_argument( '--' + __WHICH_VTM, + type=str, + required=False, + default=__WHICH_VTM_SUGGESTIONS[ 1 ], + help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " + "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " + "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " + "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) vtk_output_parsing.fill_vtk_output_subparser( p ) @@ -73,38 +54,18 @@ def convert( parsed_options ) -> Options: if support not in __SUPPORT_CHOICES: raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) - copy_fields: list[ tuple[ str ] ] = list() - parsed_copy_fields: str = parsed_options[ __COPY_FIELDS ] - if parsed_copy_fields == __FIELDS_DEFAULT: - logging.info( "No field will be copied because none was provided." ) + operations: list[ tuple[ str ] ] = list() + parsed_operations: str = parsed_options[ __OPERATIONS ] + if parsed_operations == __OPERATIONS_DEFAULT: + raise ValueError( f"No operation was found. Cannot execute this feature." ) else: - splitted_copy_fields: list[ str ] = parsed_copy_fields.split( "," ) - for copy_field in splitted_copy_fields: - name_newname_function: tuple[ str ] = tuple( copy_field.split( ":" ) ) - if len( name_newname_function ) == 0 or len( name_newname_function ) > 3: - raise ValueError( f"The correct format for '--{__COPY_FIELDS}' is to have either: 'field_name', or " + - f"'field_name:new_field_name' or 'field_name:new_field_name:function' " - f"but not '{copy_field}'." ) + splitted_operations: list[ str ] = parsed_operations.split( "," ) + for operation in splitted_operations: + function_newname: tuple[ str ] = tuple( operation.split( ":" ) ) + if len( function_newname ) == 0 or len( function_newname ) > 2: + raise ValueError( f"The correct format for '--{__OPERATIONS}' is to have 'function:newname'." ) else: - copy_fields.append( name_newname_function ) - - created_fields: list[ tuple[ str ] ] = list() - parsed_create_fields: str = parsed_options[ __CREATE_FIELDS ] - if parsed_create_fields == __FIELDS_DEFAULT: - logging.info( "No field will be created because none was provided." ) - else: - splitted_created_fields: list[ str ] = parsed_create_fields.split( "," ) - for created_field in splitted_created_fields: - newname_function = tuple( created_field.split( ":" ) ) - if len( newname_function ) == 2: - created_fields.append( newname_function ) - else: - raise ValueError( - f"The correct format for '--{__CREATE_FIELDS}' is to have 'new_field_name:function', " + - f"but not '{created_field}'." ) - - if len( copy_fields ) == len( created_fields ) == 0: - raise ValueError( f"No copy nor creation of field was found. No operation can be executed with this feature." ) + operations.append( function_newname ) which_vtm: str = parsed_options[ __WHICH_VTM ] if which_vtm in __WHICH_VTM_SUGGESTIONS: @@ -118,8 +79,7 @@ def convert( parsed_options ) -> Options: return Options( support=support, source=parsed_options[ __SOURCE ], - copy_fields=copy_fields, - created_fields=created_fields, + operations=operations, vtm_index=vtm_index, vtk_output=vtk_output_parsing.convert( parsed_options ) ) diff --git a/geos-mesh/tests/test_field_operations.py b/geos-mesh/tests/test_field_operations.py index e14283b..d3397b8 100644 --- a/geos-mesh/tests/test_field_operations.py +++ b/geos-mesh/tests/test_field_operations.py @@ -116,17 +116,16 @@ else: sub_grid.GetPointData().AddArray( arr_values ) -copy_fields_points: list[ tuple[ str ] ] = [ ( "point_param0", ), ( "point_param1", "point_param1" + "_new" ), - ( "point_param2", "point_param2" + "_new", "*3" ) ] -copy_fields_cells: list[ tuple[ str ] ] = [ ( "cell_param0", ), ( "cell_param1", "cell_param1" + "_new" ), - ( "cell_param2", "cell_param2" + "_new", "+ 10" ) ] - -created_fields_points: list[ tuple[ str ] ] = [ ( "point_param0" + "_created", "log( point_param0 )" ), - ( "point_param1" + "_created", "sqrt( point_param1 )" ), - ( "point_param2" + "_created", "point_param0 +point_param1 * 2" ) ] -created_fields_cells: dict[ str, str ] = [ ( "cell_param0" + "_created", "log( cell_param0 )" ), - ( "cell_param1" + "_created", "sqrt( cell_param1 )" ), - ( "cell_param2" + "_created", "cell_param0 + cell_param1 * 2" ) ] +operations_points: list[ tuple[ str ] ] = [ ( "point_param0", "point_param0" ), ( "point_param1", "point_param1_new" ), + ( "point_param2 * 3", "point_param2_new" ), + ( "log( point_param0 )", "point_param0_created" ), + ( "sqrt( point_param1 )", "point_param1_created" ), + ( "point_param0 + point_param1 * 2", "point_param2_created" ) ] +operations_cells: list[ tuple[ str ] ] = [ ( "cell_param0", "cell_param0" ), ( "cell_param1", "cell_param1_new" ), + ( "cell_param2 + 10", "cell_param2_new" ), + ( "log( cell_param0 )", "cell_param0_created" ), + ( "sqrt( cell_param1 )", "cell_param1_created" ), + ( "cell_param0 + cell_param1 * 2", "cell_param2_created" ) ] out_points: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "points.vtu" ), True ) out_cells: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "cells.vtu" ), True ) @@ -154,14 +153,12 @@ def test_get_vtu_filepaths( self ): pvd_filepath: str = tvu.create_geos_pvd( tvu.stored_grids, tvu.pvd_directory ) options_pvd0: fo.Options = fo.Options( support="point", source=pvd_filepath, - copy_fields=dict(), - created_fields=dict(), + operations=dict(), vtm_index=0, vtk_output=out_points ) options_pvd1: fo.Options = fo.Options( support="point", source=pvd_filepath, - copy_fields=dict(), - created_fields=dict(), + operations=dict(), vtm_index=-1, vtk_output=out_points ) result0: tuple[ str ] = fo.get_vtu_filepaths( options_pvd0 ) @@ -191,17 +188,15 @@ def test_get_reorder_mapping( self ): def test_get_array_names_to_collect_and_options( self ): vu.write_mesh( eight_hex_grid, eight_hex_grid_output ) - options1: fo.Options = fo.Options( "cell", eight_hex_grid_output.output, copy_fields_cells, - created_fields_cells, -1, out_cells ) - options2: fo.Options = fo.Options( "point", eight_hex_grid_output.output, copy_fields_points, - created_fields_points, -1, out_points ) + options1: fo.Options = fo.Options( "cell", eight_hex_grid_output.output, operations_cells, -1, out_cells ) + options2: fo.Options = fo.Options( "point", eight_hex_grid_output.output, operations_points, -1, out_points ) result1, options1_new = fo.get_array_names_to_collect_and_options( eight_hex_grid_output.output, options1 ) result2, options2_new = fo.get_array_names_to_collect_and_options( eight_hex_grid_output.output, options2 ) os.remove( eight_hex_grid_output.output ) - assert result1.sort() == [ fc[ 0 ] for fc in copy_fields_cells ].sort() - assert result2.sort() == [ fp[ 0 ] for fp in copy_fields_points ].sort() - assert options1_new.copy_fields.sort() == copy_fields_cells.sort() - assert options2_new.copy_fields.sort() == copy_fields_points.sort() + assert result1.sort() == [ fc[ 0 ] for fc in operations_cells ].sort() + assert result2.sort() == [ fp[ 0 ] for fp in operations_points ].sort() + assert options1_new.operations.sort() == operations_cells.sort() + assert options2_new.operations.sort() == operations_points.sort() def test_merge_local_in_global_array( self ): # create arrays filled with nan values @@ -238,16 +233,14 @@ def test_implement_arrays( self ): empty_mesh.DeepCopy( eight_hex_grid_empty ) npoints: int = empty_mesh.GetNumberOfPoints() ncells: int = empty_mesh.GetNumberOfCells() - copy_fpoints = [ ( "point_param0", ), ( "point_param1", "point_copy1" ), - ( "point_param2", "point_param2", "*10 + 0.1" ) ] - copy_fcells = [ ( "cell_param0", ), ( "cell_param1", "cell_copy1" ), - ( "cell_param2", "cell_param2", "/0.1 - 0.5" ) ] - create_fpoints = [ ( "new0", "log(point_param0)" ), ( "new1", "sqrt(point_param1)" ), - ( "new2", "distances_mesh_center" ) ] - create_fcells = [ ( "new3", "sqrt(cell_param0)" ), ( "new4", "log10(cell_param1)" ), - ( "new5", "cell_param0 + cell_param1" ) ] - options_point = fo.Options( "point", "empty.vtu", copy_fpoints, create_fpoints, -1, output ) - options_cell = fo.Options( "cell", "empty.vtu", copy_fcells, create_fcells, -1, output ) + ope_points = [ ( "point_param0", "point_param0" ), ( "point_param1", "point_copy1" ), + ( "point_param2 * 10 + 0.1", "point_param2" ), ( "log(point_param0)", "new0" ), + ( "sqrt(point_param1)", "new1" ), ( "distances_mesh_center", "new2" ) ] + ope_cells = [ ( "cell_param0", "cell_param0" ), ( "cell_param1", "cell_copy1" ), + ( "cell_param2 / 0.1 - 0.5", "cell_param2" ), ( "sqrt(cell_param0)", "new3" ), + ( "log10(cell_param1)", "new4" ), ( "cell_param0 + cell_param1", "new5" ) ] + options_point = fo.Options( "point", "empty.vtu", ope_points, -1, output ) + options_cell = fo.Options( "cell", "empty.vtu", ope_cells, -1, output ) fo.implement_arrays( empty_mesh, eight_hex_grid_values, options_point ) fo.implement_arrays( empty_mesh, eight_hex_grid_values, options_cell ) point_data_mesh = empty_mesh.GetPointData() From 72741ee2bd29c3bb639eed4183b0530a60980687 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 18 Dec 2024 17:39:50 -0800 Subject: [PATCH 33/34] yapf formatting --- .../src/geos/mesh/doctor/checks/mesh_stats.py | 6 ++-- .../src/geos/mesh/doctor/checks/vtk_utils.py | 7 +++-- .../parsing/field_operations_parsing.py | 31 +++++++++++-------- .../mesh/doctor/parsing/mesh_stats_parsing.py | 29 ++++++++--------- geos-mesh/tests/test_mesh_stats.py | 20 +++++------- geos-mesh/tests/test_vtk_utils.py | 17 +++++----- 6 files changed, 56 insertions(+), 54 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 182e5d3..1bf6d33 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -224,8 +224,8 @@ def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "po logging.error( f"Invalid component type chosen to build MeshComponentData. Defaulted to point." ) scalar_names: list[ str ] = list() - scalar_min_values: list[ float] = list() - scalar_max_values: list[ float] = list() + scalar_min_values: list[ float ] = list() + scalar_max_values: list[ float ] = list() tensor_names: list[ str ] = list() tensor_min_values: list[ list[ float ] ] = list() tensor_max_values: list[ list[ float ] ] = list() @@ -393,7 +393,7 @@ def get_cells_neighbors_number( mesh: vtkUnstructuredGrid ) -> np.array: # If a cell_id ends up having no neighbor = cell is disconnected cells_neighbors_number: np.array = np.zeros( ( mesh.GetNumberOfCells(), 1 ), dtype=int ) for cell_ids in faces_node_ids.values(): - if len(cell_ids) > 1: # if a face node ids is shared by more than 1 cell = all cells sharing are neighbors + if len( cell_ids ) > 1: # if a face node ids is shared by more than 1 cell = all cells sharing are neighbors for cell_id in cell_ids: cells_neighbors_number[ cell_id ] += 1 return cells_neighbors_number diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index b9517cc..7b18aee 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -51,8 +51,11 @@ def get_all_array_names( mesh: vtkUnstructuredGrid ) -> dict[ str, dict[ str, in "FieldData": { ... }, "PointData": { ... } } """ - data_types: dict[ str, any ] = { "CellData": mesh.GetCellData, "FieldData": mesh.GetFieldData, - "PointData": mesh.GetPointData } + data_types: dict[ str, any ] = { + "CellData": mesh.GetCellData, + "FieldData": mesh.GetFieldData, + "PointData": mesh.GetPointData + } all_array_names: dict[ str, dict[ str, int ] ] = { data_type: dict() for data_type in data_types } for typ, data in data_types.items(): for i in range( data().GetNumberOfArrays() ): diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index dfc408e..3bbc290 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -29,23 +29,28 @@ def fill_subparser( subparsers ) -> None: type=str, required=True, default=__OPERATIONS_DEFAULT, - help="[list of string comma separated]: The syntax here is function0:new_name0, function1:new_name1, ... " + - "Allows to perform a wide arrays of operations to add new data to your output mesh using the source file data. " + - "Examples are the following: 1. Copy of the field 'poro' from the input to the ouput with 'poro:poro'. " + - "2. Copy of the field 'PERM' from the input to the ouput with a multiplication of the values by 10 with 'PERM*10:PERM'. " + - "3. Copy of the field 'TEMP' from the input to the ouput with an addition to the values by 0.5 and change the name of the field to 'temperature' with 'TEMP+0.5:TEMPERATURE'. " + - "4. Create a new field 'NEW_PARAM' using the input 'PERM' field and having the square root of it with 'sqrt(PERM):NEW_PARAM'. " + - "Another method is to use precoded functions available which are: " + - "1. 'distances_mesh_center' will create a field where the distances from the mesh centerare calculated for all the elements chosen as support. To use: 'distances_mesh_center:NEW_FIELD_NAME'. " + - "2. 'random' will create a field with samples from a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'." ) + help="[list of string comma separated]: The syntax here is function0:new_name0, " + + "function1:new_name1, ... Allows to perform a wide arrays of operations to add new data to your " + + "output mesh using the source file data. Examples are the following: 1. Copy of the field " + + " 'poro' from the input to the ouput with 'poro:poro'. 2. Copy of the field 'PERM' from the " + + "input to the ouput with a multiplication of the values by 10 with 'PERM*10:PERM'. " + + "3. Copy of the field 'TEMP' from the input to the ouput with an addition to the values by 0.5 " + + "and change the name of the field to 'temperature' with 'TEMP+0.5:TEMPERATURE'. 4. Create a new " + + "field 'NEW_PARAM' using the input 'PERM' field and having the square root of it with " + + "'sqrt(PERM):NEW_PARAM'. Another method is to use precoded functions available which are: " + + "1. 'distances_mesh_center' will create a field where the distances from the mesh center are " + + "calculated for all the elements chosen as support. To use: " + + "'distances_mesh_center:NEW_FIELD_NAME'. 2. 'random' will create a field with samples from " + + "a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'." ) p.add_argument( '--' + __WHICH_VTM, type=str, required=False, default=__WHICH_VTM_SUGGESTIONS[ 1 ], - help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " - "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " - "'first' will select the initial timestep; 'last' will select the final timestep; or you can enter " - "directly the index starting from 0 of the timestep (not the time). By default, the value is set to 'last'." ) + help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " + + "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " + + "'first' will select the initial timestep; 'last' will select the final timestep; or you can " + + "enter directly the index starting from 0 of the timestep (not the time). By default, the value" + + " is set to 'last'." ) vtk_output_parsing.fill_vtk_output_subparser( p ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index f65b09e..45b7937 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -24,20 +24,20 @@ def fill_subparser( subparsers ) -> None: p.add_argument( '--' + __WRITE_STATS, type=int, required=True, - metavar=[0, 1], + metavar=[ 0, 1 ], default=__WRITE_STATS_DEFAULT, help=( f"\t[int]: The stats of the mesh will be printed in a file" + " to the folder specified in --output." ) ) p.add_argument( '--' + __DISCONNECTED, type=int, required=False, - metavar=[0, 1], + metavar=[ 0, 1 ], default=__DISCONNECTED_DEFAULT, help=f"\t[int]: Display all disconnected nodes ids and disconnected cell ids." ) p.add_argument( '--' + __FIELD_VALUES, type=int, required=False, - metavar=[0, 1], + metavar=[ 0, 1 ], default=__FIELD_VALUES_DEFAULT, help=f"\t[int]: Display all range of field values that seem not realistic." ) p.add_argument( '--' + __OUTPUT, @@ -45,6 +45,7 @@ def fill_subparser( subparsers ) -> None: required=False, help=f"[string]: The output folder destination where the stats will be written." ) + def convert( parsed_options ) -> Options: write_stats = parsed_options[ __WRITE_STATS ] output_folder = parsed_options[ __OUTPUT ] @@ -60,13 +61,13 @@ def convert( parsed_options ) -> Options: def display_results( options: Options, result: Result ): log_stream = StringIO() - stream_handler = logging.StreamHandler(log_stream) - stream_handler.setLevel(logging.INFO) + stream_handler = logging.StreamHandler( log_stream ) + stream_handler.setLevel( logging.INFO ) # Get the root logger and add the StreamHandler to it to possibly output the log to an external file logger = logging.getLogger() - logger.addHandler(stream_handler) - logger.setLevel(logging.INFO) + logger.addHandler( stream_handler ) + logger.setLevel( logging.INFO ) logging.info( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) logging.info( f"There are {result.number_cell_types} different types of cells in the mesh:" ) @@ -78,20 +79,20 @@ def display_results( options: Options, result: Result ): logging.info( "\tNeighbors\tNumber of cells concerned" ) for number_neighbors, count in zip( unique_numbers_neighbors, counts ): logging.info( f"\t{number_neighbors}\t\t{count}" ) - + logging.info( "Number of nodes being shared by exactly N cells:" ) logging.info( "\tCells\t\tNumber of nodes" ) for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): logging.info( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) - if 0 in unique_numbers_neighbors: # unique_numbers_neighbors sorted in ascending order from minimum positive number + if 0 in unique_numbers_neighbors: # unique_numbers_neighbors sorted in ascending order from minimum positive number number_cells_disconnected: int = unique_numbers_neighbors[ 0 ] else: number_cells_disconnected = 0 logging.info( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) if number_cells_disconnected > 0: logging.info( "\tIndexes of disconnected cells" ) - indexes = where(result.cells_neighbors_number == 0) + indexes = where( result.cells_neighbors_number == 0 ) logging.info( f"{indexes[ 0 ]}" ) logging.info( f"Number of disconnected nodes in the mesh: {len( result.disconnected_nodes )}" ) @@ -124,14 +125,14 @@ def display_results( options: Options, result: Result ): logging.info( f"There are {len( data.scalar_names )} scalar fields from the {data_type}:" ) for i in range( len( data.scalar_names ) ): logging.info( f"\t{data.scalar_names[i]}" + harmonious_spacing( data.scalar_names, i, space_size ) + - f"min = {data.scalar_min_values[ i ]}" + " " * space_size + - f"max = {data.scalar_max_values[ i ]}" ) + f"min = {data.scalar_min_values[ i ]}" + " " * space_size + + f"max = {data.scalar_max_values[ i ]}" ) logging.info( f"There are {len( data.tensor_names )} vector/tensor fields from the {data_type}:" ) for i in range( len( data.tensor_names ) ): logging.info( f"\t{data.tensor_names[ i ]}" + harmonious_spacing( data.tensor_names, i, space_size ) + - f"min = {data.tensor_min_values[ i ]}" + " " * space_size + - f"max = {data.tensor_max_values[ i ]}" ) + f"min = {data.tensor_min_values[ i ]}" + " " * space_size + + f"max = {data.tensor_max_values[ i ]}" ) fields_validity_types: dict[ str, any ] = { "CellData": result.fields_validity_cell_data, diff --git a/geos-mesh/tests/test_mesh_stats.py b/geos-mesh/tests/test_mesh_stats.py index f5bf0af..6f1406c 100644 --- a/geos-mesh/tests/test_mesh_stats.py +++ b/geos-mesh/tests/test_mesh_stats.py @@ -9,8 +9,6 @@ from geos.mesh.doctor.checks.vtk_utils import VtkOutput, write_mesh from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkHexahedron from vtkmodules.util.numpy_support import numpy_to_vtk - - """ For creation of output test meshes """ @@ -103,7 +101,7 @@ cell_data.AddArray( vtk_array_poro ) point_data.AddArray( vtk_array_temp ) -# cube4 is a cube with an extra hex cell disconnected added +# cube4 is a cube with an extra hex cell disconnected added options_cube4: Options = Options( vtk_output=out, generate_cells_global_ids=False, generate_points_global_ids=False, @@ -117,14 +115,13 @@ cube4: vtkUnstructuredGrid = __build( options_cube4 ) number_cells_cube4: int = cube4.GetNumberOfCells() hex = vtkHexahedron() -coords_new_hex = ( (3.0, 0.0, 0.0), (4.0, 0.0, 0.0), (4.0, 1.0, 0.0), (3.0, 1.0, 0.0), - (3.0, 0.0, 1.0), (4.0, 0.0, 1.0), (4.0, 1.0, 1.0), (3.0, 1.0, 1.0) ) +coords_new_hex = ( ( 3.0, 0.0, 0.0 ), ( 4.0, 0.0, 0.0 ), ( 4.0, 1.0, 0.0 ), ( 3.0, 1.0, 0.0 ), ( 3.0, 0.0, 1.0 ), + ( 4.0, 0.0, 1.0 ), ( 4.0, 1.0, 1.0 ), ( 3.0, 1.0, 1.0 ) ) for i in range( len( coords_new_hex ) ): hex.GetPoints().InsertNextPoint( coords_new_hex[ i ] ) hex.GetPointIds().SetId( i, number_cells_cube4 + i ) cube4.InsertNextCell( hex.GetCellType(), hex.GetPointIds() ) - # Last mesh: test mesh for output and check of execution of mesh_stats f_poro: FieldInfo = FieldInfo( "POROSITY", 1, "CELLS" ) f_perm: FieldInfo = FieldInfo( "PERMEABILITY", 3, "CELLS" ) @@ -148,14 +145,14 @@ a_poro: np.array = np.linspace( 0, 1, number_cells ) a_perm: np.array = np.empty( ( number_cells, f_perm.dimension ) ) for i in range( f_perm.dimension ): - a_perm[:, i] = np.linspace( 1e-14 * 10**i, 1e-12 * 10**i, number_cells ) + a_perm[ :, i ] = np.linspace( 1e-14 * 10**i, 1e-12 * 10**i, number_cells ) a_density: np.array = np.linspace( 500, 40000, number_cells ) a_pressure: np.array = np.linspace( 1e5, 1e7, number_cells ) a_temp: np.array = np.linspace( 1e2, 5e3, number_points ) a_temp = a_temp.reshape( number_points, 1 ) a_displacement: np.array = np.empty( ( number_points, f_displacement.dimension ) ) for i in range( f_displacement.dimension ): - a_displacement[:, i] = np.linspace( 1e-4 * 10**i, 1e-2 * 10**i, number_points ) + a_displacement[ :, i ] = np.linspace( 1e-4 * 10**i, 1e-2 * 10**i, number_points ) for array in [ a_density, a_pressure, a_poro ]: array = array.reshape( number_cells, 1 ) @@ -269,13 +266,12 @@ def test_get_cells_neighbors_number( self ): expected2[ 8 ] = 0 assert np.array_equal( result2, expected2 ) - def test_mesh_stats_execution( self ): write_mesh( cube_output, test_mesh_for_stats ) invalidTest = False command = [ - "python", MESH_DOCTOR_FILEPATH, "-v", "-i", test_mesh_for_stats.output, "mesh_stats", "--write_stats", - "0", "--output", dir_name, "--disconnected", "0", "--field_values", "0" + "python", MESH_DOCTOR_FILEPATH, "-v", "-i", test_mesh_for_stats.output, "mesh_stats", "--write_stats", "0", + "--output", dir_name, "--disconnected", "0", "--field_values", "0" ] try: result = subprocess.run( command, shell=True, stderr=subprocess.PIPE, universal_newlines=True ) @@ -341,4 +337,4 @@ def test_mesh_stats_execution( self ): invalidTest = True if invalidTest: - raise ValueError( "test_mesh_stats_execution has failed." ) \ No newline at end of file + raise ValueError( "test_mesh_stats_execution has failed." ) diff --git a/geos-mesh/tests/test_vtk_utils.py b/geos-mesh/tests/test_vtk_utils.py index 2507c87..1f5eaea 100644 --- a/geos-mesh/tests/test_vtk_utils.py +++ b/geos-mesh/tests/test_vtk_utils.py @@ -8,8 +8,6 @@ from vtkmodules.vtkCommonCore import vtkPoints from vtkmodules.vtkCommonDataModel import ( vtkMultiBlockDataSet, vtkUnstructuredGrid, vtkCellArray, vtkHexahedron, vtkCompositeDataSet, VTK_HEXAHEDRON ) - - """ For creation of output test meshes """ @@ -19,11 +17,11 @@ filepath_mesh_for_stats: str = os.path.join( dir_name, pattern_test + ".vtu" ) test_mesh_for_stats: vu.VtkOutput = vu.VtkOutput( filepath_mesh_for_stats, True ) geos_hierarchy: str = os.path.join( "mesh", "Level0" ) - - """ Utility functions for tests """ + + def split_list( initial_list: list[ any ], number_sub_lists: int ) -> list[ list[ any ] ]: initial_len: int = len( initial_list ) assert number_sub_lists <= initial_len @@ -52,8 +50,7 @@ def create_vtk_hexahedron( point_ids: list[ int ] ) -> vtkHexahedron: return hex -def create_type_vtk_grid( point_3D_coords: list[ list[ float ] ], - all_point_ids: list[ list[ int ] ], +def create_type_vtk_grid( point_3D_coords: list[ list[ float ] ], all_point_ids: list[ list[ int ] ], vtk_type: int ) -> vtkUnstructuredGrid: points: vtkPoints = create_vtk_points( point_3D_coords ) cells: vtkCellArray = vtkCellArray() @@ -117,6 +114,7 @@ def create_geos_pvd( all_grids_per_vtm: dict[ str, dict[ str, list[ vtkUnstructu """ Grids to perform tests on. """ +# yapf: disable # 4 Hexahedrons four_hex_ids: list[ list[ int ] ] = [ [ 0, 1, 4, 3, 6, 7, 10, 9 ], [ 1, 2, 5, 4, 7, 8, 11, 10 ], @@ -213,7 +211,7 @@ def create_geos_pvd( all_grids_per_vtm: dict[ str, dict[ str, list[ vtkUnstructu [ 2.0, 2.0, 2.0 ] ] # point11 two_hex4_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex4_points_coords, two_hex_ids, VTK_HEXAHEDRON ) all_two_hex_grids: list[ vtkUnstructuredGrid ] = [ two_hex1_grid, two_hex2_grid, two_hex3_grid, two_hex4_grid ] - +# yapf: enable ## Duplicated grids but with different DataArrays per region and per timestep number_timesteps: int = 2 @@ -253,8 +251,8 @@ def test_to_vtk_id_list_and_vtk_iter( self ): data2: tuple[ int ] = ( 3, 4, 5, 6 ) result = vu.to_vtk_id_list( data1 ) result2 = vu.to_vtk_id_list( data2 ) - assert result.IsA("vtkIdList") - assert result2.IsA("vtkIdList") + assert result.IsA( "vtkIdList" ) + assert result2.IsA( "vtkIdList" ) assert result.GetNumberOfIds() == 3 assert result2.GetNumberOfIds() == 4 # vtk_iter @@ -329,7 +327,6 @@ def test_get_filepath_from_pvd_and_vtm( self ): assert path2.endswith( os.path.join( geos_hierarchy, region_name, "rank_00.vtu" ) ) else: assert path2.endswith( os.path.join( geos_hierarchy, region_name, "rank_01.vtu" ) ) - def test_has_invalid_field( self ): # initialize test meshes From f8ed3bca12025951399438760156c65b6fc8d2c8 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Fri, 20 Dec 2024 18:32:13 -0800 Subject: [PATCH 34/34] Updates after review --- .../mesh/doctor/checks/field_operations.py | 168 +++++++++--------- .../src/geos/mesh/doctor/checks/mesh_stats.py | 58 +++--- .../src/geos/mesh/doctor/checks/vtk_utils.py | 56 +++++- .../parsing/field_operations_parsing.py | 23 ++- .../mesh/doctor/parsing/mesh_stats_parsing.py | 41 ++--- geos-mesh/tests/test_field_operations.py | 37 +--- geos-mesh/tests/test_vtk_utils.py | 13 ++ 7 files changed, 205 insertions(+), 191 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py index 7c62e8d..060cb08 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -1,22 +1,21 @@ import logging -from numexpr import evaluate from dataclasses import dataclass -from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, - get_vtm_filepath_from_pvd, get_vtu_filepaths_from_vtm, - get_all_array_names, read_mesh, write_mesh ) +from numexpr import evaluate from numpy import array, empty, full, sqrt, int64, nan from numpy.random import rand from scipy.spatial import KDTree from tqdm import tqdm from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy -from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkDataSetAttributes +from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, + get_vtu_filepaths, get_all_array_names, read_mesh, write_mesh ) @dataclass( frozen=True ) class Options: support: str # choice between 'cell' and 'point' to operate on fields source: str # file from where the data is collected - operations: list[ tuple[ str ] ] # [ ( function0, new_name0 ), ... ] + operations: list[ tuple[ str, str ] ] # [ ( function0, new_name0 ), ... ] vtm_index: int # useful when source is a .pvd or .vtm file vtk_output: VtkOutput @@ -27,85 +26,96 @@ class Result: __SUPPORT_CHOICES = [ "point", "cell" ] -support_construction: dict[ str, tuple[ any ] ] = { - __SUPPORT_CHOICES[ 0 ]: get_points_coords_from_vtk, - __SUPPORT_CHOICES[ 1 ]: get_cell_centers_array -} -def get_distances_mesh_center( mesh: vtkUnstructuredGrid, support: str ) -> array: - f"""For a specific support type {__SUPPORT_CHOICES}, returns a numpy array filled with the distances between - their coordinates and the center of the mesh. +def check_valid_support( support: str ) -> None: + if support not in __SUPPORT_CHOICES: + raise ValueError( f"For support, the only choices available are '{__SUPPORT_CHOICES}', not '{support}'." ) + + +def get_support_data( mesh: vtkUnstructuredGrid, support: str ) -> vtkDataSetAttributes: + f"""Returns the support vtkPointData or vtkCellData. Args: + mesh (vtkUnstructuredGrid): A vtk grid. support (str): Choice between {__SUPPORT_CHOICES}. Returns: - array: [ distance0, distance1, ..., distanceN ] with N being the number of support elements. + any: vtkPointData or vtkCellData. """ - if support == __SUPPORT_CHOICES[ 0 ]: - coords: array = get_points_coords_from_vtk( mesh ) - elif support == __SUPPORT_CHOICES[ 1 ]: - coords = get_cell_centers_array( mesh ) - else: - raise ValueError( f"For support, the only choices available are {__SUPPORT_CHOICES}." ) + check_valid_support( support ) + support_data: dict[ str, any ] = { "point": mesh.GetPointData, "cell": mesh.GetCellData } + if list( support_data.keys() ).sort() != __SUPPORT_CHOICES.sort(): + raise ValueError( f"No implementation defined to access the {support} data." ) + return support_data[ support ]() - center = ( coords.max( axis=0 ) + coords.min( axis=0 ) ) / 2 - distances = empty( coords.shape[ 0 ] ) - for i in range( coords.shape[ 0 ] ): - distance_squared: float = 0.0 - coord = coords[ i ] - for j in range( len( coord ) ): - distance_squared += ( coord[ j ] - center[ j ] ) * ( coord[ j ] - center[ j ] ) - distances[ i ] = distance_squared - distances = sqrt( distances ) - return distances +def get_support_elements( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""Returns the support elements which are either points coordinates or cell centers coordinates. -def get_random_field( mesh: vtkUnstructuredGrid, support: str ) -> array: - f"""For a specific support type {__SUPPORT_CHOICES}, an array with samples from a uniform distribution over [0, 1). + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + int: Number of points or cells. + """ + check_valid_support( support ) + support_elements: dict[ str, any ] = { "point": get_points_coords_from_vtk, "cell": get_cell_centers_array } + if list( support_elements.keys() ).sort() != __SUPPORT_CHOICES.sort(): + raise ValueError( f"No implementation defined to access the {support} data." ) + return support_elements[ support ]( mesh ) + + +def get_number_elements( mesh: vtkUnstructuredGrid, support: str ) -> int: + f"""Returns the number of points or cells depending on the support. Args: + mesh (vtkUnstructuredGrid): A vtk grid. support (str): Choice between {__SUPPORT_CHOICES}. Returns: - array: Array of size N being the number of support elements. + int: Number of points or cells. """ - if support == __SUPPORT_CHOICES[ 0 ]: - number_elements: int = mesh.GetNumberOfPoints() - elif support == __SUPPORT_CHOICES[ 1 ]: - number_elements = mesh.GetNumberOfCells() - else: - raise ValueError( f"For support, the only choices available are {__SUPPORT_CHOICES}." ) - return rand( number_elements, 1 ) + check_valid_support( support ) + number_funct: dict[ str, any ] = { 'point': mesh.GetNumberOfPoints, 'cell': mesh.GetNumberOfCells } + if list( number_funct.keys() ).sort() != __SUPPORT_CHOICES.sort(): + raise ValueError( f"No implementation defined to return the number of elements for {support} data." ) + return number_funct[ support ]() -create_precoded_fields: dict[ str, any ] = { - "distances_mesh_center": get_distances_mesh_center, - "random": get_random_field -} +def build_distances_mesh_center( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""For a specific support type {__SUPPORT_CHOICES}, returns an array filled with the distances between + their coordinates and the center of the mesh. + + Args: + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + array: [ distance0, distance1, ..., distanceN ] with N being the number of support elements. + """ + coords: array = get_support_elements( mesh, support ) + center = ( coords.max( axis=0 ) + coords.min( axis=0 ) ) / 2 + distances = sqrt( ( ( coords - center )**2 ).sum( axis=1 ) ) + return distances -def get_vtu_filepaths( options: Options ) -> tuple[ str ]: - """Returns the vtu filepaths to use for the rest of the workflow. +def build_random_uniform_distribution( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""For a specific support type {__SUPPORT_CHOICES}, an array with samples from a uniform distribution over [0, 1). Args: - options (Options): Options chosen by the user. + support (str): Choice between {__SUPPORT_CHOICES}. Returns: - tuple[ str ]: ( "file/path/0.vtu", ..., "file/path/N.vtu" ) + array: Array of size N being the number of support elements. """ - source_filepath: str = options.source - if source_filepath.endswith( ".vtu" ): - return ( source_filepath, ) - elif source_filepath.endswith( ".vtm" ): - return get_vtu_filepaths_from_vtm( source_filepath ) - elif source_filepath.endswith( ".pvd" ): - vtm_filepath: str = get_vtm_filepath_from_pvd( source_filepath, options.vtm_index ) - return get_vtu_filepaths_from_vtm( vtm_filepath ) - else: - raise ValueError( f"The source filepath '{options.source}' provided does not target a .vtu, a .vtm nor a " + - ".pvd file." ) + return rand( get_number_elements( mesh, support ), 1 ) + + +create_precoded_fields: dict[ str, any ] = { + "distances_mesh_center": build_distances_mesh_center, + "random_uniform_distribution": build_random_uniform_distribution +} def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid, support: str ) -> array: @@ -121,9 +131,9 @@ def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid Returns: np.array: [ cell_idK_grid, cell_idN_grid, ... ] or [ point_idK_grid, point_idN_grid, ... ] """ - support_elements: array = support_construction[ support ]( sub_grid ) + support_elements: array = get_support_elements( sub_grid, support ) # now that you have the support elements, you can map them to the reference grid - number_elements: int = support_elements.shape[ 0 ] + number_elements: int = get_number_elements( sub_grid, support ) mapping: array = empty( number_elements, dtype=int64 ) for cell_id in range( number_elements ): _, index = kd_tree_grid_ref.query( support_elements[ cell_id ] ) @@ -143,12 +153,10 @@ def get_array_names_to_collect_and_options( sub_vtu_filepath: str, Returns: list[ str ]: Array names. """ + check_valid_support( options.support ) ref_mesh: vtkUnstructuredGrid = read_mesh( sub_vtu_filepath ) all_array_names: dict[ str, dict[ str, int ] ] = get_all_array_names( ref_mesh ) - if options.support == __SUPPORT_CHOICES[ 0 ]: # point - support_array_names: list[ str ] = list( all_array_names[ "PointData" ].keys() ) - else: # cell - support_array_names = list( all_array_names[ "CellData" ].keys() ) + support_array_names: list[ str ] = list( all_array_names[ options.support ].keys() ) to_use_arrays: set[ str ] = set() to_use_operate: list[ tuple[ str ] ] = list() @@ -158,12 +166,8 @@ def get_array_names_to_collect_and_options( sub_vtu_filepath: str, to_use_operate.append( function_newname ) continue - is_usable: bool = False - for support_array_name in support_array_names: - if support_array_name in funct: - to_use_arrays.add( support_array_name ) - is_usable = True - if is_usable: + if any( name in funct for name in support_array_names ): + to_use_arrays.update( name for name in support_array_names if name in funct ) to_use_operate.append( function_newname ) else: logging.warning( f"Cannot perform operations with '{funct}' because some or all the fields do not " + @@ -184,15 +188,15 @@ def merge_local_in_global_array( global_array: array, local_array: array, mappin local_array (np.array): Array of size M <= N that is representing a subset of the global_array. mapping (np.array): Array of global indexes of size M. """ - size_global, size_local = global_array.shape, local_array.shape - if size_global[ 0 ] < size_local[ 0 ]: + global_shape, local_shape = global_array.shape, local_array.shape + if global_shape[ 0 ] < local_shape[ 0 ]: raise ValueError( "The global array to fill is smaller than the local array to merge." ) - number_columns_global: int = size_global[ 1 ] if len( size_global ) == 2 else 1 - number_columns_local: int = size_local[ 1 ] if len( size_local ) == 2 else 1 + number_columns_global: int = global_shape[ 1 ] if len( global_shape ) == 2 else 1 + number_columns_local: int = local_shape[ 1 ] if len( local_shape ) == 2 else 1 if number_columns_global != number_columns_local: raise ValueError( "The arrays do not have same number of columns." ) # when converting a numpy array to vtk array, you need to make sure to have a 2D array - if len( size_local ) == 1: + if len( local_shape ) == 1: local_array = local_array.reshape( -1, 1 ) global_array[ mapping ] = local_array @@ -205,10 +209,8 @@ def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array global_arrays (dict[ str, np.array ]): { "array_name0": np.array, ..., "array_nameN": np.array } options (Options): Options chosen by the user. """ - data = mesh.GetPointData() if options.support == __SUPPORT_CHOICES[ 0 ] else mesh.GetCellData() - number_elements: int = mesh.GetNumberOfPoints() if options.support == __SUPPORT_CHOICES[ 0 ] else \ - mesh.GetNumberOfCells() - + support_data: vtkDataSetAttributes = get_support_data( mesh, options.support ) + number_elements: int = get_number_elements( mesh, options.support ) arrays_to_implement: dict[ str, array ] = dict() # proceed operations for function_newname in tqdm( options.operations, desc="Performing operations" ): @@ -229,7 +231,7 @@ def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array else: vtk_array = numpy_to_vtk( final_array ) vtk_array.SetName( final_name ) - data.AddArray( vtk_array ) + support_data.AddArray( vtk_array ) def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: @@ -243,14 +245,14 @@ def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: output_mesh.CopyStructure( grid_ref ) output_mesh.CopyAttributes( grid_ref ) # find the support elements to use and construct their KDTree - support_elements: array = support_construction[ new_options.support ]( output_mesh ) + support_elements: array = get_support_elements( output_mesh, options.support ) number_elements: int = support_elements.shape[ 0 ] kd_tree_ref: KDTree = KDTree( support_elements ) # perform operations to construct the global arrays to implement in the output mesh from copy global_arrays: dict[ str, array ] = dict() for vtu_id in tqdm( range( len( sub_vtu_filepaths ) ), desc="Processing VTU files" ): sub_grid: vtkUnstructuredGrid = read_mesh( sub_vtu_filepaths[ vtu_id ] ) - sub_data = sub_grid.GetPointData() if new_options.support == __SUPPORT_CHOICES[ 0 ] else sub_grid.GetCellData() + sub_data: vtkDataSetAttributes = get_support_data( sub_grid, options.support ) usable_arrays: list[ tuple[ int, str ] ] = list() for array_index in range( sub_data.GetNumberOfArrays() ): array_name: str = sub_data.GetArrayName( array_index ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py index 1bf6d33..e2d5941 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -5,8 +5,8 @@ from enum import Enum from typing import TypeAlias from vtkmodules.util.numpy_support import vtk_to_numpy -from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkCell -from geos.mesh.doctor.checks import vtk_utils +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkCell, vtkFieldData +from geos.mesh.doctor.checks.vtk_utils import read_mesh, vtkid_to_string """ TypeAliases for this file """ @@ -94,7 +94,7 @@ def associate_min_max_field_values() -> dict[ str, tuple[ float ] ]: Returns: dict[ str, tuple[ float ] ]: { poro: (min_value, max_value), perm: (min_value, max_value), ... } """ - assoc_min_max_field_values: dict[ str, tuple[ float ] ] = {} + assoc_min_max_field_values: dict[ str, tuple[ float ] ] = dict() for name in MIN_FIELD.__members__: mini = MIN_FIELD[ name ] maxi = MAX_FIELD[ name ] @@ -120,12 +120,12 @@ def get_cell_types_and_counts( mesh: vtkUnstructuredGrid ) -> tuple[ int, int, l # Get the different cell types in the mesh cell_types: list[ str ] = list() for cell_type in range( number_cell_types ): - cell_types.append( vtk_utils.vtkid_to_string( distinct_array_types.GetTuple( cell_type )[ 0 ] ) ) + cell_types.append( vtkid_to_string( distinct_array_types.GetTuple( cell_type )[ 0 ] ) ) # Counts how many of each type are present cell_type_counts: list[ int ] = [ 0 ] * number_cell_types for cell in range( number_cells ): for cell_type in range( number_cell_types ): - if vtk_utils.vtkid_to_string( mesh.GetCell( cell ).GetCellType() ) == cell_types[ cell_type ]: + if vtkid_to_string( mesh.GetCell( cell ).GetCellType() ) == cell_types[ cell_type ]: cell_type_counts[ cell_type ] += 1 break return ( number_cells, number_cell_types, cell_types, cell_type_counts ) @@ -140,7 +140,7 @@ def get_number_cells_per_nodes( mesh: vtkUnstructuredGrid ) -> dict[ int, int ]: Returns: dict[ int, int ]: { point_id0: 8, ..., point_idN: 4 } """ - number_cells_per_nodes: dict[ int, int ] = {} + number_cells_per_nodes: dict[ int, int ] = dict() for point_id in range( mesh.GetNumberOfPoints() ): number_cells_per_nodes[ point_id ] = 0 for cell_id in range( mesh.GetNumberOfCells() ): @@ -161,7 +161,7 @@ def summary_number_cells_per_nodes( number_cells_per_nodes: dict[ int, int ] ) - dict[ int, int ]: Connected to N cells as key, Number of nodes concerned as value """ unique_number_cells = set( [ value for value in number_cells_per_nodes.values() ] ) - summary: dict[ int, int ] = {} + summary: dict[ int, int ] = dict() for unique_number in unique_number_cells: summary[ unique_number ] = 0 for number_cells in number_cells_per_nodes.values(): @@ -196,10 +196,10 @@ def check_NaN_fields( mesh: vtkUnstructuredGrid ) -> dict[ str, int ]: Returns: dict[ str, int ]: { array_mame0: 12, array_name4: 2, ... } """ - fields_number_of_NaNs: dict[ str, int ] = {} + fields_number_of_NaNs: dict[ str, int ] = dict() data_to_use = ( mesh.GetCellData, mesh.GetPointData, mesh.GetFieldData ) for getDataFuncion in data_to_use: - data = getDataFuncion() + data: vtkFieldData = getDataFuncion() for i in range( data.GetNumberOfArrays() ): array = data.GetArray( i ) array_name: str = data.GetArrayName( i ) @@ -221,7 +221,7 @@ def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "po """ if componentType not in [ "point", "cell", "field" ]: componentType = "point" - logging.error( f"Invalid component type chosen to build MeshComponentData. Defaulted to point." ) + logging.error( "Invalid component type chosen to build MeshComponentData. Defaulted to point." ) scalar_names: list[ str ] = list() scalar_min_values: list[ float ] = list() @@ -231,7 +231,7 @@ def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "po tensor_max_values: list[ list[ float ] ] = list() data_to_use = { "cell": mesh.GetCellData, "point": mesh.GetPointData, "field": mesh.GetFieldData } - data = data_to_use[ componentType ]() + data: vtkFieldData = data_to_use[ componentType ]() for i in range( data.GetNumberOfArrays() ): data_array = data.GetArray( i ) data_array_name: str = data_array.GetName() @@ -272,25 +272,24 @@ def field_values_validity( mcdata: MeshComponentData ) -> FieldValidity: Returns: FieldValidity: {poro: (True, Min_Max_poro), perm: (False, Min_Max_perm), ...} """ - field_values_validity: dict[ str, tuple[ bool, tuple[ float ] ] ] = {} + field_values_validity: dict[ str, tuple[ bool, tuple[ float ] ] ] = dict() assoc_min_max_field: dict[ str, tuple[ float ] ] = associate_min_max_field_values() # for scalar values - for i in range( len( mcdata.scalar_names ) ): + for i, scalar_name in enumerate( mcdata.scalar_names ): for field_param, min_max in assoc_min_max_field.items(): - if field_param in mcdata.scalar_names[ i ].lower(): - field_values_validity[ mcdata.scalar_names[ i ] ] = ( True, min_max ) - if mcdata.scalar_min_values[ i ] < min_max[ 0 ] or mcdata.scalar_max_values[ i ] > min_max[ 1 ]: - field_values_validity[ mcdata.scalar_names[ i ] ] = ( False, min_max ) + if field_param in scalar_name.lower(): + is_valid = mcdata.scalar_min_values[ i ] >= min_max[ 0 ] \ + and mcdata.scalar_max_values[ i ] <= min_max[ 1 ] + field_values_validity[ scalar_name ] = ( is_valid, min_max ) break # for tensor values - for i in range( len( mcdata.tensor_names ) ): + for i, tensor_name in enumerate( mcdata.tensor_names ): for field_param, min_max in assoc_min_max_field.items(): - if field_param in mcdata.tensor_names[ i ].lower(): - field_values_validity[ mcdata.tensor_names[ i ] ] = ( True, min_max ) + if field_param in tensor_name.lower(): for sub_value_min, sub_value_max in zip( mcdata.tensor_min_values[ i ], mcdata.tensor_max_values[ i ] ): - if sub_value_min < min_max[ 0 ] or sub_value_max > min_max[ 1 ]: - field_values_validity[ mcdata.tensor_names[ i ] ] = ( False, min_max ) - break + is_valid = sub_value_min >= min_max[ 0 ] and sub_value_max <= min_max[ 1 ] + field_values_validity[ tensor_name ] = ( is_valid, min_max ) + break break return field_values_validity @@ -331,12 +330,8 @@ def get_disconnected_nodes_coords( mesh: vtkUnstructuredGrid ) -> dict[ int, tup dict[ int, tuple[ float ] ]: {nodeId0: (x0, y0, z0), nodeId23: (x23, y23, z23), ..., nodeIdM: (xM, yM, zM)} """ disconnected_nodes_id: list[ int ] = get_disconnected_nodes_id( mesh ) - disconnected_nodes_coords: dict[ int, tuple[ float ] ] = {} points = mesh.GetPoints() - for node_id in disconnected_nodes_id: - node_coords: tuple[ float ] = points.GetPoint( node_id ) - disconnected_nodes_coords[ node_id ] = node_coords - return disconnected_nodes_coords + return { node_id: points.GetPoint( node_id ) for node_id in disconnected_nodes_id } def get_cell_faces_node_ids( cell: vtkCell, sort_ids: bool = False ) -> tuple[ tuple[ int ] ]: @@ -356,8 +351,7 @@ def get_cell_faces_node_ids( cell: vtkCell, sort_ids: bool = False ) -> tuple[ t node_ids: list[ int ] = list() for i in range( face.GetNumberOfPoints() ): node_ids.append( face.GetPointId( i ) ) - if sort_ids: - node_ids.sort() + node_ids.sort() if sort_ids else None cell_faces_node_ids.append( tuple( node_ids ) ) return tuple( cell_faces_node_ids ) @@ -380,7 +374,7 @@ def get_cells_neighbors_number( mesh: vtkUnstructuredGrid ) -> np.array: """ # First we need to get the node ids for all faces of every cell in the mesh. # The keys are face node ids, values are cell_id of cells that have this face node ids in common - faces_node_ids: dict[ tuple[ int ], list[ int ] ] = {} + faces_node_ids: dict[ tuple[ int ], list[ int ] ] = dict() for cell_id in range( mesh.GetNumberOfCells() ): cell_faces_node_ids: tuple[ tuple[ int ] ] = get_cell_faces_node_ids( mesh.GetCell( cell_id ), True ) for cell_face_node_ids in cell_faces_node_ids: @@ -443,6 +437,6 @@ def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: def check( vtk_input_file: str, options: Options ) -> Result: - mesh = vtk_utils.read_mesh( vtk_input_file ) + mesh = read_mesh( vtk_input_file ) options.input_filepath = vtk_input_file return __check( mesh, options ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index 7b18aee..b679128 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -18,6 +18,27 @@ class VtkOutput: is_data_mode_binary: bool +vtk_type_name_mapping: dict[ int, str ] = { + 1: 'Vertex', + 3: 'Line', + 5: 'Triangle', + 8: 'Pixel', + 9: 'Quad', + 10: 'Tetra', + 11: 'Voxel', + 12: 'Hex', + 13: 'Wedge', + 14: 'Pyramid', + 15: 'Pentagonal prism', + 16: 'Hexagonal Prism', + 42: 'Polyhedron' +} + + +def vtkid_to_string( id: int ) -> str: + return vtk_type_name_mapping.get( id, 'Unknown type' ) + + def to_vtk_id_list( data ) -> vtkIdList: result = vtkIdList() result.Allocate( len( data ) ) @@ -47,15 +68,11 @@ def get_all_array_names( mesh: vtkUnstructuredGrid ) -> dict[ str, dict[ str, in mesh (vtkUnstructuredGrid): A vtk grid. Returns: - dict[ str, dict[ str, int ] ]: { "CellData": { array_name0: 3, array_name1: 0, ... }, - "FieldData": { ... }, - "PointData": { ... } } + dict[ str, dict[ str, int ] ]: { "cell": { array_name0: 3, array_name1: 0, ... }, + "field": { ... }, + "point": { ... } } """ - data_types: dict[ str, any ] = { - "CellData": mesh.GetCellData, - "FieldData": mesh.GetFieldData, - "PointData": mesh.GetPointData - } + data_types: dict[ str, any ] = { "point": mesh.GetCellData, "cell": mesh.GetFieldData, "field": mesh.GetPointData } all_array_names: dict[ str, dict[ str, int ] ] = { data_type: dict() for data_type in data_types } for typ, data in data_types.items(): for i in range( data().GetNumberOfArrays() ): @@ -234,6 +251,29 @@ def get_vtu_filepaths_from_vtm( vtm_filepath: str ) -> tuple[ str ]: return tuple( vtu_filepaths ) # to lock the order of the vtus like in the vtm +def get_vtu_filepaths( vtk_filepath: str, vtm_index: int = -1 ) -> tuple[ str ]: + """When dealing with a .pvd or .vtm file, returns the filepaths that are contained inside that file. + When dealing with a .vtu, just returns the filepath of the given file. + + Args: + vtk_filepath (str): Filepath to a .pvd, .vtm or .vtu file. + vtm_index (int, optional): When dealing with a .pvd file, selects which vtm index to use. Defaults to -1. + + Returns: + tuple[ str ]: ( "file/path/0.vtu", ..., "file/path/N.vtu" ) + """ + if vtk_filepath.endswith( ".vtu" ): + return ( vtk_filepath, ) + elif vtk_filepath.endswith( ".vtm" ): + return get_vtu_filepaths_from_vtm( vtk_filepath ) + elif vtk_filepath.endswith( ".pvd" ): + vtm_filepath: str = get_vtm_filepath_from_pvd( vtk_filepath, vtm_index ) + return get_vtu_filepaths_from_vtm( vtm_filepath ) + else: + raise ValueError( f"The source filepath '{vtk_filepath}' provided does not target a .vtu, a .vtm nor a " + + ".pvd file." ) + + def __write_vtk( mesh: vtkUnstructuredGrid, output: str ) -> int: logging.info( f"Writing mesh into file \"{output}\" using legacy format." ) writer = vtkUnstructuredGridWriter() diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py index 3bbc290..676ad45 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -1,5 +1,5 @@ import logging -from geos.mesh.doctor.checks.field_operations import Options, Result, __SUPPORT_CHOICES +from geos.mesh.doctor.checks.field_operations import Options, Result, check_valid_support, __SUPPORT_CHOICES from geos.mesh.doctor.parsing import vtk_output_parsing, FIELD_OPERATIONS __SUPPORT = "support" @@ -40,8 +40,8 @@ def fill_subparser( subparsers ) -> None: "'sqrt(PERM):NEW_PARAM'. Another method is to use precoded functions available which are: " + "1. 'distances_mesh_center' will create a field where the distances from the mesh center are " + "calculated for all the elements chosen as support. To use: " + - "'distances_mesh_center:NEW_FIELD_NAME'. 2. 'random' will create a field with samples from " + - "a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'." ) + "'distances_mesh_center:NEW_FIELD_NAME'. 2. 'random_uniform_distribution' will create a field " + + "with samples from a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'." ) p.add_argument( '--' + __WHICH_VTM, type=str, required=False, @@ -56,21 +56,18 @@ def fill_subparser( subparsers ) -> None: def convert( parsed_options ) -> Options: support: str = parsed_options[ __SUPPORT ] - if support not in __SUPPORT_CHOICES: - raise ValueError( f"For --{__SUPPORT}, the only choices available are {__SUPPORT_CHOICES}." ) + check_valid_support( support ) operations: list[ tuple[ str ] ] = list() parsed_operations: str = parsed_options[ __OPERATIONS ] if parsed_operations == __OPERATIONS_DEFAULT: raise ValueError( f"No operation was found. Cannot execute this feature." ) - else: - splitted_operations: list[ str ] = parsed_operations.split( "," ) - for operation in splitted_operations: - function_newname: tuple[ str ] = tuple( operation.split( ":" ) ) - if len( function_newname ) == 0 or len( function_newname ) > 2: - raise ValueError( f"The correct format for '--{__OPERATIONS}' is to have 'function:newname'." ) - else: - operations.append( function_newname ) + splitted_operations: list[ str ] = parsed_operations.split( "," ) + for operation in splitted_operations: + function_newname: tuple[ str ] = tuple( operation.split( ":" ) ) + if not len( function_newname ) == 2: + raise ValueError( f"The correct format for '--{__OPERATIONS}' is to have 'function:newname'." ) + operations.append( function_newname ) which_vtm: str = parsed_options[ __WHICH_VTM ] if which_vtm in __WHICH_VTM_SUGGESTIONS: diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py index 45b7937..402c8ae 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -1,11 +1,11 @@ import logging import os -from io import StringIO from datetime import datetime -from typing import Iterable +from io import StringIO from numpy import unique, where +from typing import Iterable from geos.mesh.doctor.checks.mesh_stats import Options, Result -from . import MESH_STATS +from geos.mesh.doctor.parsing import MESH_STATS __WRITE_STATS = "write_stats" __WRITE_STATS_DEFAULT = 0 @@ -47,16 +47,12 @@ def fill_subparser( subparsers ) -> None: def convert( parsed_options ) -> Options: - write_stats = parsed_options[ __WRITE_STATS ] - output_folder = parsed_options[ __OUTPUT ] - disconnected = parsed_options[ __DISCONNECTED ] - field_values = parsed_options[ __FIELD_VALUES ] # input_filepath will be defined in check function before calling __check - return Options( write_stats=write_stats, - output_folder=output_folder, + return Options( write_stats=parsed_options[ __WRITE_STATS ], + output_folder=parsed_options[ __OUTPUT ], input_filepath="", - disconnected=disconnected, - field_values=field_values ) + disconnected=parsed_options[ __DISCONNECTED ], + field_values=parsed_options[ __FIELD_VALUES ] ) def display_results( options: Options, result: Result ): @@ -71,8 +67,8 @@ def display_results( options: Options, result: Result ): logging.info( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) logging.info( f"There are {result.number_cell_types} different types of cells in the mesh:" ) - for i in range( result.number_cell_types ): - logging.info( f"\t{result.cell_types[ i ]}\t\t({result.cell_type_counts[ i ]} cells)" ) + for cell_type, type_count in zip( result.cell_types, result.cell_type_counts ): + logging.info( f"\t{cell_type}\t\t({type_count} cells)" ) logging.info( f"Number of cells that have exactly N neighbors:" ) unique_numbers_neighbors, counts = unique( result.cells_neighbors_number, return_counts=True ) @@ -85,10 +81,8 @@ def display_results( options: Options, result: Result ): for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): logging.info( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) - if 0 in unique_numbers_neighbors: # unique_numbers_neighbors sorted in ascending order from minimum positive number - number_cells_disconnected: int = unique_numbers_neighbors[ 0 ] - else: - number_cells_disconnected = 0 + # unique_numbers_neighbors sorted in ascending order from minimum positive number + number_cells_disconnected: int = unique_numbers_neighbors[ 0 ] if 0 in unique_numbers_neighbors else 0 logging.info( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) if number_cells_disconnected > 0: logging.info( "\tIndexes of disconnected cells" ) @@ -141,17 +135,14 @@ def display_results( options: Options, result: Result ): } for field_vailidity_type, data in fields_validity_types.items(): logging.info( f"Unexpected range of values for vector/tensor fields from the {field_vailidity_type}:" ) - for field_name, validity_range in data.items(): - is_valid: bool = validity_range[ 0 ] - min_max: tuple[ float ] = validity_range[ 1 ] + for field_name, ( is_valid, min_max ) in data.items(): if not is_valid: logging.info( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) - if options.write_stats: - if is_valid_to_write_folder( options.output_folder ): - filepath: str = build_filepath_output_file( options ) - with open( filepath, 'w' ) as file: - file.writelines( log_stream.getvalue() ) + if options.write_stats and is_valid_to_write_folder( options.output_folder ): + filepath: str = build_filepath_output_file( options ) + with open( filepath, 'w' ) as file: + file.writelines( log_stream.getvalue() ) def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int = 3 ) -> str: diff --git a/geos-mesh/tests/test_field_operations.py b/geos-mesh/tests/test_field_operations.py index d3397b8..6e377a7 100644 --- a/geos-mesh/tests/test_field_operations.py +++ b/geos-mesh/tests/test_field_operations.py @@ -134,8 +134,8 @@ class TestClass: def test_precoded_fields( self ): - result_points: array = fo.get_distances_mesh_center( eight_hex_grid_empty, "point" ) - result_cells: array = fo.get_distances_mesh_center( eight_hex_grid_empty, "cell" ) + result_points: array = fo.build_distances_mesh_center( eight_hex_grid_empty, "point" ) + result_cells: array = fo.build_distances_mesh_center( eight_hex_grid_empty, "cell" ) sq2, sq3, sq3h = sqrt( 2 ), sqrt( 3 ), sqrt( 3 ) / 2 expected_points: array = array( [ sq3, sq2, sq3, sq2, 1.0, sq2, sq3, sq2, sq3, sq2, 1.0, sq2, 1.0, 0.0, 1.0, sq2, 1.0, sq2, sq3, sq2, sq3, @@ -144,37 +144,14 @@ def test_precoded_fields( self ): expected_cells: array = array( [ sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h ] ) assert array_equal( result_points, expected_points ) assert array_equal( result_cells, expected_cells ) - random_points: array = fo.get_random_field( eight_hex_grid_empty, "point" ) - random_cells: array = fo.get_random_field( eight_hex_grid_empty, "cell" ) + random_points: array = fo.build_random_uniform_distribution( eight_hex_grid_empty, "point" ) + random_cells: array = fo.build_random_uniform_distribution( eight_hex_grid_empty, "cell" ) assert eight_hex_grid_empty.GetNumberOfPoints() == random_points.shape[ 0 ] assert eight_hex_grid_empty.GetNumberOfCells() == random_cells.shape[ 0 ] - def test_get_vtu_filepaths( self ): - pvd_filepath: str = tvu.create_geos_pvd( tvu.stored_grids, tvu.pvd_directory ) - options_pvd0: fo.Options = fo.Options( support="point", - source=pvd_filepath, - operations=dict(), - vtm_index=0, - vtk_output=out_points ) - options_pvd1: fo.Options = fo.Options( support="point", - source=pvd_filepath, - operations=dict(), - vtm_index=-1, - vtk_output=out_points ) - result0: tuple[ str ] = fo.get_vtu_filepaths( options_pvd0 ) - result1: tuple[ str ] = fo.get_vtu_filepaths( options_pvd1 ) - try: - shutil.rmtree( tvu.pvd_directory ) - except OSError as e: - print( f"Error: {e}" ) - os.remove( pvd_filepath ) - for i in range( len( result0 ) ): - assert "time0" in result0[ i ] # looking through first vtm which is time0 - assert "time1" in result1[ i ] # looking through last vtm which is time1 - def test_get_reorder_mapping( self ): - support_points: array = fo.support_construction[ "point" ]( eight_hex_grid ) - support_cells: array = fo.support_construction[ "cell" ]( eight_hex_grid ) + support_points: array = fo.get_support_elements( eight_hex_grid, "point" ) + support_cells: array = fo.get_support_elements( eight_hex_grid, "cell" ) kd_tree_points: KDTree = KDTree( support_points ) kd_tree_cells: KDTree = KDTree( support_cells ) result_points1: array = fo.get_reorder_mapping( kd_tree_points, hex0_grid, "point" ) @@ -254,7 +231,7 @@ def test_implement_arrays( self ): "cell_param2": eight_hex_grid_values[ "cell_param2" ] / 0.1 - 0.5, "new0": log( eight_hex_grid_values[ "point_param0" ] ), "new1": sqrt( eight_hex_grid_values[ "point_param1" ] ), - "new2": fo.get_distances_mesh_center( empty_mesh, "point" ).reshape( ( npoints, 1 ) ), + "new2": fo.build_distances_mesh_center( empty_mesh, "point" ).reshape( ( npoints, 1 ) ), "new3": sqrt( eight_hex_grid_values[ "cell_param0" ] ), "new4": log10( eight_hex_grid_values[ "cell_param1" ] ), "new5": eight_hex_grid_values[ "cell_param0" ] + eight_hex_grid_values[ "cell_param1" ] diff --git a/geos-mesh/tests/test_vtk_utils.py b/geos-mesh/tests/test_vtk_utils.py index 1f5eaea..23756c4 100644 --- a/geos-mesh/tests/test_vtk_utils.py +++ b/geos-mesh/tests/test_vtk_utils.py @@ -328,6 +328,19 @@ def test_get_filepath_from_pvd_and_vtm( self ): else: assert path2.endswith( os.path.join( geos_hierarchy, region_name, "rank_01.vtu" ) ) + def test_get_vtu_filepaths( self ): + pvd_filepath: str = create_geos_pvd( stored_grids, pvd_directory ) + result0: tuple[ str ] = vu.get_vtu_filepaths( pvd_filepath, 0 ) + result1: tuple[ str ] = vu.get_vtu_filepaths( pvd_filepath, 1 ) + try: + shutil.rmtree( pvd_directory ) + except OSError as e: + print( f"Error: {e}" ) + os.remove( pvd_filepath ) + for i in range( len( result0 ) ): + assert "time0" in result0[ i ] # looking through first vtm which is time0 + assert "time1" in result1[ i ] # looking through last vtm which is time1 + def test_has_invalid_field( self ): # initialize test meshes test_mesh_points: vtkUnstructuredGrid = four_hex_grid.NewInstance()