diff --git a/docs/geos-mesh.rst b/docs/geos-mesh.rst index 82b8551..adefeeb 100644 --- a/docs/geos-mesh.rst +++ b/docs/geos-mesh.rst @@ -113,6 +113,47 @@ Cells with negative volumes will typically be an issue for ``geos`` and should b -h, --help show this help message and exit --min 0.0 [float]: The minimum acceptable volume. Defaults to 0.0. +``field_operations`` +"""""""""""""""""""""""""" + +Using a source file containing PointData or CellData, allows to perform operations on that data from the source file to an output .vtu file. +The source file can be a .vtu, .vtm or .pvd file as long as the geometry of the multiblock corresponds to the geometry of the output .vtu file. +An example of source file can be the vtkOutput.pvd from a GEOS simulation and the output file can be your VTK mesh used in this simulation. +The term 'operations' covers two distinct categories: +'COPY' operations which copies data arrays from the source file to the output file, with the possibility to rename the arrays copied and to apply multiplication or addition to these arrays. +'CREATE' operations which uses the source file data to create new arrays by performing addition between several arrays, applying log or sqrt functions ... allowed by the numexpr library. + +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py field_operations --help + usage: mesh_doctor.py field_operations [-h] [--support point, cell] [--source SOURCE] [--operations OPERATIONS] + [--which_vtm WHICH_VTM] --output OUTPUT [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --support point, cell + [string]: Where to define field. + --source SOURCE [string]: Where field data to use for operation comes from .vtu, .vtm or .pvd file. + --operations OPERATIONS + [list of string comma separated]: The syntax here is function0:new_name0, function1:new_name1, ... + Allows to perform a wide arrays of operations to add new data to your output mesh using the source file data. + Examples are the following: + 1. Copy of the field 'poro' from the input to the ouput with 'poro:poro'. + 2. Copy of the field 'PERM' from the input to the ouput with a multiplication of the values by 10 with 'PERM*10:PERM'. + 3. Copy of the field 'TEMP' from the input to the ouput with an addition to the values by 0.5 and change the name of the field to 'temperature' with 'TEMP+0.5:TEMPERATURE'. + 4. Create a new field 'NEW_PARAM' using the input 'PERM' field and having the square root of it with 'sqrt(PERM):NEW_PARAM'. + Another method is to use precoded functions available which are: + 1. 'distances_mesh_center' will create a field where the distances from the mesh centerare calculated for all the elements chosen as support. To use: 'distances_mesh_center:NEW_FIELD_NAME'. + 2. 'random' will create a field with samples from a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'. + --which_vtm WHICH_VTM + [string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique timestep) will be used for the operation. + To do so, you can choose amongst these possibilities: 'first' will select the initial timestep; + 'last' will select the final timestep; or you can enter directly the index starting from 0 of the timestep (not the time). + By default, the value is set to 'last'. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. + ``fix_elements_orderings`` """""""""""""""""""""""""" @@ -229,6 +270,27 @@ The ``generate_global_ids`` can generate `global ids` for the imported ``vtk`` m --data-mode binary, ascii [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. +``mesh_stats`` +""""""""""""""""" + +Performs a summary over certain geometrical, topological and data painting mesh properties. +The future goal for this feature would be to provide a deeper mesh analysis and to evaluate the 'quality' of this mesh before using it in GEOS. + +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py mesh_stats --help + usage: mesh_doctor.py mesh_stats [-h] --write_stats [0, 1] [--disconnected [0, 1]] + [--field_values [0, 1]] [--output OUTPUT] + + options: + -h, --help show this help message and exit + --write_stats [0, 1] [int]: The stats of the mesh will be printed in a file to the folder specified in --output. + --disconnected [0, 1] + [int]: Display all disconnected nodes ids and disconnected cell ids. + --field_values [0, 1] + [int]: Display all range of field values that seem not realistic. + --output OUTPUT [string]: The output folder destination where the stats will be written. + ``non_conformal`` """"""""""""""""" @@ -339,8 +401,4 @@ API ^^^ .. automodule:: geos.mesh.conversion.abaqus_converter - :members: - - - - + :members: \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt index 6ecdf80..af812eb 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -9,3 +9,4 @@ vtk >= 9.1 networkx >= 2.4 tqdm numpy +numexpr diff --git a/geos-mesh/pyproject.toml b/geos-mesh/pyproject.toml index a15ceba..192ea25 100644 --- a/geos-mesh/pyproject.toml +++ b/geos-mesh/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ "networkx >= 2.4", "tqdm", "numpy", + "numexpr", "meshio>=5.3.2", ] diff --git a/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py new file mode 100644 index 0000000..060cb08 --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/checks/field_operations.py @@ -0,0 +1,278 @@ +import logging +from dataclasses import dataclass +from numexpr import evaluate +from numpy import array, empty, full, sqrt, int64, nan +from numpy.random import rand +from scipy.spatial import KDTree +from tqdm import tqdm +from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkDataSetAttributes +from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, get_points_coords_from_vtk, get_cell_centers_array, + get_vtu_filepaths, get_all_array_names, read_mesh, write_mesh ) + + +@dataclass( frozen=True ) +class Options: + support: str # choice between 'cell' and 'point' to operate on fields + source: str # file from where the data is collected + operations: list[ tuple[ str, str ] ] # [ ( function0, new_name0 ), ... ] + vtm_index: int # useful when source is a .pvd or .vtm file + vtk_output: VtkOutput + + +@dataclass( frozen=True ) +class Result: + info: bool + + +__SUPPORT_CHOICES = [ "point", "cell" ] + + +def check_valid_support( support: str ) -> None: + if support not in __SUPPORT_CHOICES: + raise ValueError( f"For support, the only choices available are '{__SUPPORT_CHOICES}', not '{support}'." ) + + +def get_support_data( mesh: vtkUnstructuredGrid, support: str ) -> vtkDataSetAttributes: + f"""Returns the support vtkPointData or vtkCellData. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + any: vtkPointData or vtkCellData. + """ + check_valid_support( support ) + support_data: dict[ str, any ] = { "point": mesh.GetPointData, "cell": mesh.GetCellData } + if list( support_data.keys() ).sort() != __SUPPORT_CHOICES.sort(): + raise ValueError( f"No implementation defined to access the {support} data." ) + return support_data[ support ]() + + +def get_support_elements( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""Returns the support elements which are either points coordinates or cell centers coordinates. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + int: Number of points or cells. + """ + check_valid_support( support ) + support_elements: dict[ str, any ] = { "point": get_points_coords_from_vtk, "cell": get_cell_centers_array } + if list( support_elements.keys() ).sort() != __SUPPORT_CHOICES.sort(): + raise ValueError( f"No implementation defined to access the {support} data." ) + return support_elements[ support ]( mesh ) + + +def get_number_elements( mesh: vtkUnstructuredGrid, support: str ) -> int: + f"""Returns the number of points or cells depending on the support. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + int: Number of points or cells. + """ + check_valid_support( support ) + number_funct: dict[ str, any ] = { 'point': mesh.GetNumberOfPoints, 'cell': mesh.GetNumberOfCells } + if list( number_funct.keys() ).sort() != __SUPPORT_CHOICES.sort(): + raise ValueError( f"No implementation defined to return the number of elements for {support} data." ) + return number_funct[ support ]() + + +def build_distances_mesh_center( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""For a specific support type {__SUPPORT_CHOICES}, returns an array filled with the distances between + their coordinates and the center of the mesh. + + Args: + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + array: [ distance0, distance1, ..., distanceN ] with N being the number of support elements. + """ + coords: array = get_support_elements( mesh, support ) + center = ( coords.max( axis=0 ) + coords.min( axis=0 ) ) / 2 + distances = sqrt( ( ( coords - center )**2 ).sum( axis=1 ) ) + return distances + + +def build_random_uniform_distribution( mesh: vtkUnstructuredGrid, support: str ) -> array: + f"""For a specific support type {__SUPPORT_CHOICES}, an array with samples from a uniform distribution over [0, 1). + + Args: + support (str): Choice between {__SUPPORT_CHOICES}. + + Returns: + array: Array of size N being the number of support elements. + """ + return rand( get_number_elements( mesh, support ), 1 ) + + +create_precoded_fields: dict[ str, any ] = { + "distances_mesh_center": build_distances_mesh_center, + "random_uniform_distribution": build_random_uniform_distribution +} + + +def get_reorder_mapping( kd_tree_grid_ref: KDTree, sub_grid: vtkUnstructuredGrid, support: str ) -> array: + """Builds an array containing the indexes of the reference grid linked to every + cell ids / point ids of the subset grid. + + Args: + kd_tree_grid_ref (KDTree): A KDTree of the nearest neighbor cell centers for every cells / + points coordinates for point of the reference grid. + sub_grid (vtkUnstructuredGrid): A vtk grid that is a subset of the reference grid. + support (str): Either "point" or "cell". + + Returns: + np.array: [ cell_idK_grid, cell_idN_grid, ... ] or [ point_idK_grid, point_idN_grid, ... ] + """ + support_elements: array = get_support_elements( sub_grid, support ) + # now that you have the support elements, you can map them to the reference grid + number_elements: int = get_number_elements( sub_grid, support ) + mapping: array = empty( number_elements, dtype=int64 ) + for cell_id in range( number_elements ): + _, index = kd_tree_grid_ref.query( support_elements[ cell_id ] ) + mapping[ cell_id ] = index + return mapping + + +def get_array_names_to_collect_and_options( sub_vtu_filepath: str, + options: Options ) -> tuple[ list[ tuple[ str ] ], Options ]: + """We need to have the list of array names that are required to perform copy and creation of new arrays. To build + global_arrays to perform operations, we need only these names and not all array names present in the sub meshes. + + Args: + sub_vtu_filepath (str): Path to sub vtu file that can be used to find the names of the arrays within its data. + options (Options): Options chosen by the user. + + Returns: + list[ str ]: Array names. + """ + check_valid_support( options.support ) + ref_mesh: vtkUnstructuredGrid = read_mesh( sub_vtu_filepath ) + all_array_names: dict[ str, dict[ str, int ] ] = get_all_array_names( ref_mesh ) + support_array_names: list[ str ] = list( all_array_names[ options.support ].keys() ) + + to_use_arrays: set[ str ] = set() + to_use_operate: list[ tuple[ str ] ] = list() + for function_newname in options.operations: + funct: str = function_newname[ 0 ] + if funct in create_precoded_fields: + to_use_operate.append( function_newname ) + continue + + if any( name in funct for name in support_array_names ): + to_use_arrays.update( name for name in support_array_names if name in funct ) + to_use_operate.append( function_newname ) + else: + logging.warning( f"Cannot perform operations with '{funct}' because some or all the fields do not " + + f"exist in '{sub_vtu_filepath}'." ) + + updated_options: Options = Options( options.support, options.source, to_use_operate, options.vtm_index, + options.vtk_output ) + return ( list( to_use_arrays ), updated_options ) + + +def merge_local_in_global_array( global_array: array, local_array: array, mapping: array ) -> None: + """Fill the values of a global_array using the values contained in a local_array that is smaller or equal to the + size as the global_array. A mapping is used to copy the values from the local_array to the right indexes in + the global_array. + + Args: + global_array (np.array): Array of size N. + local_array (np.array): Array of size M <= N that is representing a subset of the global_array. + mapping (np.array): Array of global indexes of size M. + """ + global_shape, local_shape = global_array.shape, local_array.shape + if global_shape[ 0 ] < local_shape[ 0 ]: + raise ValueError( "The global array to fill is smaller than the local array to merge." ) + number_columns_global: int = global_shape[ 1 ] if len( global_shape ) == 2 else 1 + number_columns_local: int = local_shape[ 1 ] if len( local_shape ) == 2 else 1 + if number_columns_global != number_columns_local: + raise ValueError( "The arrays do not have same number of columns." ) + # when converting a numpy array to vtk array, you need to make sure to have a 2D array + if len( local_shape ) == 1: + local_array = local_array.reshape( -1, 1 ) + global_array[ mapping ] = local_array + + +def implement_arrays( mesh: vtkUnstructuredGrid, global_arrays: dict[ str, array ], options: Options ) -> None: + """Implement the arrays that are contained in global_arrays into the Data of a mesh. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + global_arrays (dict[ str, np.array ]): { "array_name0": np.array, ..., "array_nameN": np.array } + options (Options): Options chosen by the user. + """ + support_data: vtkDataSetAttributes = get_support_data( mesh, options.support ) + number_elements: int = get_number_elements( mesh, options.support ) + arrays_to_implement: dict[ str, array ] = dict() + # proceed operations + for function_newname in tqdm( options.operations, desc="Performing operations" ): + funct, new_name = function_newname + if funct in create_precoded_fields: + created_arr: array = create_precoded_fields[ funct ]( mesh, options.support ) + else: + created_arr = evaluate( funct, local_dict=global_arrays ) + arrays_to_implement[ new_name ] = created_arr + + # once the data is selected, we can implement the global arrays inside it + for final_name, final_array in arrays_to_implement.items(): + number_columns: int = final_array.shape[ 1 ] if len( final_array.shape ) == 2 else 1 + if number_columns > 1: # Reshape the VTK array to match the original dimensions + vtk_array = numpy_to_vtk( final_array.flatten() ) + vtk_array.SetNumberOfComponents( number_columns ) + vtk_array.SetNumberOfTuples( number_elements ) + else: + vtk_array = numpy_to_vtk( final_array ) + vtk_array.SetName( final_name ) + support_data.AddArray( vtk_array ) + + +def __check( grid_ref: vtkUnstructuredGrid, options: Options ) -> Result: + sub_vtu_filepaths: tuple[ str ] = get_vtu_filepaths( options ) + array_names_to_collect, new_options = get_array_names_to_collect_and_options( sub_vtu_filepaths[ 0 ], options ) + if len( array_names_to_collect ) == 0: + raise ValueError( "No array corresponding to the operations suggested was found in the source" + + f" {new_options.support} data. Check your support and source file." ) + # create the output grid + output_mesh: vtkUnstructuredGrid = grid_ref.NewInstance() + output_mesh.CopyStructure( grid_ref ) + output_mesh.CopyAttributes( grid_ref ) + # find the support elements to use and construct their KDTree + support_elements: array = get_support_elements( output_mesh, options.support ) + number_elements: int = support_elements.shape[ 0 ] + kd_tree_ref: KDTree = KDTree( support_elements ) + # perform operations to construct the global arrays to implement in the output mesh from copy + global_arrays: dict[ str, array ] = dict() + for vtu_id in tqdm( range( len( sub_vtu_filepaths ) ), desc="Processing VTU files" ): + sub_grid: vtkUnstructuredGrid = read_mesh( sub_vtu_filepaths[ vtu_id ] ) + sub_data: vtkDataSetAttributes = get_support_data( sub_grid, options.support ) + usable_arrays: list[ tuple[ int, str ] ] = list() + for array_index in range( sub_data.GetNumberOfArrays() ): + array_name: str = sub_data.GetArrayName( array_index ) + if array_name in array_names_to_collect: + usable_arrays.append( ( array_index, array_name ) ) + if not array_name in global_arrays: + number_components: int = sub_data.GetArray( array_index ).GetNumberOfComponents() + global_arrays[ array_name ] = full( ( number_elements, number_components ), nan ) + + if len( usable_arrays ) > 0: + reorder_mapping: array = get_reorder_mapping( kd_tree_ref, sub_grid, new_options.support ) + for index_name in usable_arrays: + sub_array: array = vtk_to_numpy( sub_data.GetArray( index_name[ 0 ] ) ) + merge_local_in_global_array( global_arrays[ index_name[ 1 ] ], sub_array, reorder_mapping ) + # The global arrays have been filled, so now we need to implement them in the output_mesh + implement_arrays( output_mesh, global_arrays, new_options ) + write_mesh( output_mesh, new_options.vtk_output ) + return Result( info="OK" ) + + +def check( vtk_input_file: str, options: Options ) -> Result: + mesh = read_mesh( vtk_input_file ) + return __check( mesh, options ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py new file mode 100644 index 0000000..e2d5941 --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/checks/mesh_stats.py @@ -0,0 +1,442 @@ +import logging +import numpy as np +import numpy.typing as npt +from dataclasses import dataclass +from enum import Enum +from typing import TypeAlias +from vtkmodules.util.numpy_support import vtk_to_numpy +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkCell, vtkFieldData +from geos.mesh.doctor.checks.vtk_utils import read_mesh, vtkid_to_string +""" +TypeAliases for this file +""" +ArrayGeneric: TypeAlias = npt.NDArray[ np.generic ] +FieldValidity: TypeAlias = dict[ str, tuple[ bool, tuple[ float ] ] ] + + +@dataclass( frozen=False ) +class Options: + write_stats: int + output_folder: str + input_filepath: str + disconnected: int + field_values: int + + +@dataclass( frozen=True ) +class MeshComponentData: + componentType: str + scalar_names: list[ str ] + scalar_min_values: list[ np.generic ] # base class for all scalar types numpy + scalar_max_values: list[ np.generic ] + tensor_names: list[ str ] + tensor_min_values: list[ ArrayGeneric ] + tensor_max_values: list[ ArrayGeneric ] + + +@dataclass( frozen=True ) +class Result: + number_cells: int + number_points: int + number_cell_types: int + cell_types: list[ str ] + cell_type_counts: list[ int ] + sum_number_cells_per_nodes: dict[ int, int ] + disconnected_nodes: dict[ int, tuple[ float ] ] + cells_neighbors_number: np.array + min_coords: np.ndarray + max_coords: np.ndarray + is_empty_point_global_ids: bool + is_empty_cell_global_ids: bool + fields_with_NaNs: dict[ int, str ] + point_data: MeshComponentData + cell_data: MeshComponentData + field_data: MeshComponentData + fields_validity_point_data: FieldValidity + fields_validity_cell_data: FieldValidity + fields_validity_field_data: FieldValidity + + +class MIN_FIELD( float, Enum ): # SI Units + PORO = 0.0 + PERM = 0.0 + FLUIDCOMP = 0.0 + PRESSURE = 0.0 + BHP = 0.0 + TEMPERATURE = 0.0 + DENSITY = 0.0 + COMPRESSIBILITY = 0.0 + VISCOSITY = 0.0 + NTG = 0.0 + BULKMOD = 0.0 + SHEARMOD = 0.0 + + +class MAX_FIELD( float, Enum ): # SI Units + PORO = 1.0 + PERM = 1.0 + FLUIDCOMP = 1.0 + PRESSURE = 1.0e9 + BHP = 1.0e9 + TEMPERATURE = 2.0e3 + DENSITY = 2.5e4 + COMPRESSIBILITY = 1.0e-4 + VISCOSITY = 1.0e24 + NTG = 1.0 + BULKMOD = 1.0e12 + SHEARMOD = 1.0e12 + + +def associate_min_max_field_values() -> dict[ str, tuple[ float ] ]: + """Using MIN_FIELD and MAX_FIELD, associate the min and max value reachable for a + property in GEOS to a property tag such as poro, perm etc... + + Returns: + dict[ str, tuple[ float ] ]: { poro: (min_value, max_value), perm: (min_value, max_value), ... } + """ + assoc_min_max_field_values: dict[ str, tuple[ float ] ] = dict() + for name in MIN_FIELD.__members__: + mini = MIN_FIELD[ name ] + maxi = MAX_FIELD[ name ] + assoc_min_max_field_values[ name.lower() ] = ( mini.value, maxi.value ) + return assoc_min_max_field_values + + +def get_cell_types_and_counts( mesh: vtkUnstructuredGrid ) -> tuple[ int, int, list[ str ], list[ int ] ]: + """From an unstructured grid, collects the number of cells, + the number of cell types, the list of cell types and the counts + of each cell element. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + tuple[int, int, list[str], list[int]]: In order, + (number_cells, number_cell_types, cell_types, cell_type_counts) + """ + number_cells: int = mesh.GetNumberOfCells() + distinct_array_types = mesh.GetDistinctCellTypesArray() + number_cell_types: int = distinct_array_types.GetNumberOfTuples() + # Get the different cell types in the mesh + cell_types: list[ str ] = list() + for cell_type in range( number_cell_types ): + cell_types.append( vtkid_to_string( distinct_array_types.GetTuple( cell_type )[ 0 ] ) ) + # Counts how many of each type are present + cell_type_counts: list[ int ] = [ 0 ] * number_cell_types + for cell in range( number_cells ): + for cell_type in range( number_cell_types ): + if vtkid_to_string( mesh.GetCell( cell ).GetCellType() ) == cell_types[ cell_type ]: + cell_type_counts[ cell_type ] += 1 + break + return ( number_cells, number_cell_types, cell_types, cell_type_counts ) + + +def get_number_cells_per_nodes( mesh: vtkUnstructuredGrid ) -> dict[ int, int ]: + """Finds for each point_id the number of cells sharing that same node. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + dict[ int, int ]: { point_id0: 8, ..., point_idN: 4 } + """ + number_cells_per_nodes: dict[ int, int ] = dict() + for point_id in range( mesh.GetNumberOfPoints() ): + number_cells_per_nodes[ point_id ] = 0 + for cell_id in range( mesh.GetNumberOfCells() ): + cell = mesh.GetCell( cell_id ) + for v in range( cell.GetNumberOfPoints() ): + point_id = cell.GetPointId( v ) + number_cells_per_nodes[ point_id ] += 1 + return number_cells_per_nodes + + +def summary_number_cells_per_nodes( number_cells_per_nodes: dict[ int, int ] ) -> dict[ int, int ]: + """Obtain the number of nodes that are a node of X number of cells. + + Args: + number_cells_per_nodes (dict[ int, int ]): { point_id0: 8, ..., point_idN: 4 } + + Returns: + dict[ int, int ]: Connected to N cells as key, Number of nodes concerned as value + """ + unique_number_cells = set( [ value for value in number_cells_per_nodes.values() ] ) + summary: dict[ int, int ] = dict() + for unique_number in unique_number_cells: + summary[ unique_number ] = 0 + for number_cells in number_cells_per_nodes.values(): + summary[ number_cells ] += 1 + return summary + + +def get_coords_min_max( mesh: vtkUnstructuredGrid ) -> tuple[ np.ndarray ]: + """From an unstructured mesh, returns the coordinates of + the minimum and maximum points. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + tuple[np.ndarray]: Min and Max coordinates. + """ + coords: np.ndarray = vtk_to_numpy( mesh.GetPoints().GetData() ) + min_coords: np.ndarray = coords.min( axis=0 ) + max_coords: np.ndarray = coords.max( axis=0 ) + return ( min_coords, max_coords ) + + +def check_NaN_fields( mesh: vtkUnstructuredGrid ) -> dict[ str, int ]: + """For every array of the mesh belonging to CellData, PointData or FieldArray, + checks that no NaN value was found. + If a NaN value is found, the name of the array is outputed with the number of NaNs encountered. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + dict[ str, int ]: { array_mame0: 12, array_name4: 2, ... } + """ + fields_number_of_NaNs: dict[ str, int ] = dict() + data_to_use = ( mesh.GetCellData, mesh.GetPointData, mesh.GetFieldData ) + for getDataFuncion in data_to_use: + data: vtkFieldData = getDataFuncion() + for i in range( data.GetNumberOfArrays() ): + array = data.GetArray( i ) + array_name: str = data.GetArrayName( i ) + number_nans: int = np.count_nonzero( np.isnan( vtk_to_numpy( array ) ) ) + if number_nans > 0: + fields_number_of_NaNs[ array_name ] = number_nans + return fields_number_of_NaNs + + +def build_MeshComponentData( mesh: vtkUnstructuredGrid, componentType: str = "point" ) -> MeshComponentData: + """Builds a MeshComponentData object for a specific component ("point", "cell", "field") + If the component type chosen is invalid, chooses "point" by default. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + meshCD (MeshComponentData): Object that gathers data regarding a mesh component. + """ + if componentType not in [ "point", "cell", "field" ]: + componentType = "point" + logging.error( "Invalid component type chosen to build MeshComponentData. Defaulted to point." ) + + scalar_names: list[ str ] = list() + scalar_min_values: list[ float ] = list() + scalar_max_values: list[ float ] = list() + tensor_names: list[ str ] = list() + tensor_min_values: list[ list[ float ] ] = list() + tensor_max_values: list[ list[ float ] ] = list() + + data_to_use = { "cell": mesh.GetCellData, "point": mesh.GetPointData, "field": mesh.GetFieldData } + data: vtkFieldData = data_to_use[ componentType ]() + for i in range( data.GetNumberOfArrays() ): + data_array = data.GetArray( i ) + data_array_name: str = data_array.GetName() + number_components: int = data_array.GetNumberOfComponents() + if number_components == 1: # assumes scalar cell data for max and min + scalar_names.append( data_array_name ) + min_value, max_value = data_array.GetRange() + scalar_min_values.append( min_value ) + scalar_max_values.append( max_value ) + else: + tensor_names.append( data_array_name ) + min_values: list[ float ] = list() + max_values: list[ float ] = list() + for component_index in range( number_components ): + min_value, max_value = data_array.GetRange( component_index ) + min_values.append( min_value ) + max_values.append( max_value ) + tensor_min_values.append( min_values ) + tensor_max_values.append( max_values ) + + return MeshComponentData( componentType=componentType, + scalar_names=scalar_names, + scalar_min_values=scalar_min_values, + scalar_max_values=scalar_max_values, + tensor_names=tensor_names, + tensor_min_values=tensor_min_values, + tensor_max_values=tensor_max_values ) + + +def field_values_validity( mcdata: MeshComponentData ) -> FieldValidity: + """Check that for every min and max values found in the scalar and tensor fields, + none of these values is out of bounds. If the value is out of bound, False validity flag + is given to the field, True if no problem. + + Args: + mcdata (MeshComponentData): Object that gathers data regarding a mesh component. + + Returns: + FieldValidity: {poro: (True, Min_Max_poro), perm: (False, Min_Max_perm), ...} + """ + field_values_validity: dict[ str, tuple[ bool, tuple[ float ] ] ] = dict() + assoc_min_max_field: dict[ str, tuple[ float ] ] = associate_min_max_field_values() + # for scalar values + for i, scalar_name in enumerate( mcdata.scalar_names ): + for field_param, min_max in assoc_min_max_field.items(): + if field_param in scalar_name.lower(): + is_valid = mcdata.scalar_min_values[ i ] >= min_max[ 0 ] \ + and mcdata.scalar_max_values[ i ] <= min_max[ 1 ] + field_values_validity[ scalar_name ] = ( is_valid, min_max ) + break + # for tensor values + for i, tensor_name in enumerate( mcdata.tensor_names ): + for field_param, min_max in assoc_min_max_field.items(): + if field_param in tensor_name.lower(): + for sub_value_min, sub_value_max in zip( mcdata.tensor_min_values[ i ], mcdata.tensor_max_values[ i ] ): + is_valid = sub_value_min >= min_max[ 0 ] and sub_value_max <= min_max[ 1 ] + field_values_validity[ tensor_name ] = ( is_valid, min_max ) + break + break + return field_values_validity + + +def get_disconnected_nodes_id( mesh: vtkUnstructuredGrid ) -> list[ int ]: + """Checks the nodes of the mesh to see if they are disconnected. + If a node does not appear in connectivity graph, we can assume that it is disconnected. + Returns the list of node ids that are disconnected. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + list[ int ]: [nodeId0, nodeId23, ..., nodeIdM] + """ + disconnected_nodes_id: list[ int ] = list() + connectivity = mesh.GetCells().GetConnectivityArray() + connectivity_unique_points: set = set() + for i in range( connectivity.GetNumberOfValues() ): + connectivity_unique_points.add( connectivity.GetValue( i ) ) + for v in range( mesh.GetNumberOfPoints() ): + if v in connectivity_unique_points: + connectivity_unique_points.remove( v ) + else: + disconnected_nodes_id.append( v ) + return disconnected_nodes_id + + +def get_disconnected_nodes_coords( mesh: vtkUnstructuredGrid ) -> dict[ int, tuple[ float ] ]: + """Checks the nodes of the mesh to see if they are disconnected. + If a node does not appear in connectivity graph, we can assume that it is disconnected. + Returns a dict zhere the keys are the node id of disconnected nodes and the values are their coordinates. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + dict[ int, tuple[ float ] ]: {nodeId0: (x0, y0, z0), nodeId23: (x23, y23, z23), ..., nodeIdM: (xM, yM, zM)} + """ + disconnected_nodes_id: list[ int ] = get_disconnected_nodes_id( mesh ) + points = mesh.GetPoints() + return { node_id: points.GetPoint( node_id ) for node_id in disconnected_nodes_id } + + +def get_cell_faces_node_ids( cell: vtkCell, sort_ids: bool = False ) -> tuple[ tuple[ int ] ]: + """For any vtkCell given, returns the list of faces node ids. + + Args: + cell (vtkCell): A vtk cell object. + sort_ids (bool, optional): If you want the node ids to be sorted by increasing value, use True. + Defaults to False. + + Returns: + tuple[ tuple[ int ] ]: [ [face0_nodeId0, ..., face0_nodeIdN], ..., [faceN_nodeId0, ..., faceN_nodeIdN] ] + """ + cell_faces_node_ids: list[ tuple[ int ] ] = list() + for f in range( cell.GetNumberOfFaces() ): + face = cell.GetFace( f ) + node_ids: list[ int ] = list() + for i in range( face.GetNumberOfPoints() ): + node_ids.append( face.GetPointId( i ) ) + node_ids.sort() if sort_ids else None + cell_faces_node_ids.append( tuple( node_ids ) ) + return tuple( cell_faces_node_ids ) + + +def get_cells_neighbors_number( mesh: vtkUnstructuredGrid ) -> np.array: + """For every cell of a mesh, returns the number of neighbors that it has.\n + WARNINGS:\n + 1) Will give invalid numbers if "supposedly" neighbor cells faces do not share node ids + because of collocated nodes. + 2) Node ids for each face are sorted to avoid comparison issues, because cell faces node ids + can be read in different order regarding spatial orientation. Therefore, we lose the ordering of + the nodes that construct the face. It should not cause problems unless you have degenerated faces. + + Args: + mesh (vtkUnstructuredGrid): An unstructured grid. + + Returns: + np.array: Every index of this array represents a cell_id of the mesh, the value contained at this index + is the number of neighbors for that cell. + """ + # First we need to get the node ids for all faces of every cell in the mesh. + # The keys are face node ids, values are cell_id of cells that have this face node ids in common + faces_node_ids: dict[ tuple[ int ], list[ int ] ] = dict() + for cell_id in range( mesh.GetNumberOfCells() ): + cell_faces_node_ids: tuple[ tuple[ int ] ] = get_cell_faces_node_ids( mesh.GetCell( cell_id ), True ) + for cell_face_node_ids in cell_faces_node_ids: + if cell_face_node_ids not in faces_node_ids: + faces_node_ids[ cell_face_node_ids ] = [ cell_id ] + else: + faces_node_ids[ cell_face_node_ids ].append( cell_id ) + # Now that we know for each face node ids, which cell_ids share it. + # We can identify if a cell is disconnected by checking that one of its face node ids is shared with another cell. + # If a cell_id ends up having no neighbor = cell is disconnected + cells_neighbors_number: np.array = np.zeros( ( mesh.GetNumberOfCells(), 1 ), dtype=int ) + for cell_ids in faces_node_ids.values(): + if len( cell_ids ) > 1: # if a face node ids is shared by more than 1 cell = all cells sharing are neighbors + for cell_id in cell_ids: + cells_neighbors_number[ cell_id ] += 1 + return cells_neighbors_number + + +def __check( mesh: vtkUnstructuredGrid, options: Options ) -> Result: + number_points: int = mesh.GetNumberOfPoints() + cells_info = get_cell_types_and_counts( mesh ) + number_cells: int = cells_info[ 0 ] + number_cell_types: int = cells_info[ 1 ] + cell_types: int = cells_info[ 2 ] + cell_type_counts: int = cells_info[ 3 ] + number_cells_per_nodes: dict[ int, int ] = get_number_cells_per_nodes( mesh ) + sum_number_cells_per_nodes: dict[ int, int ] = summary_number_cells_per_nodes( number_cells_per_nodes ) + disconnected_nodes: dict[ int, tuple[ float ] ] = get_disconnected_nodes_coords( mesh ) + cells_neighbors_number: np.array = get_cells_neighbors_number( mesh ) + min_coords, max_coords = get_coords_min_max( mesh ) + point_ids: bool = not bool( mesh.GetPointData().GetGlobalIds() ) + cell_ids: bool = not bool( mesh.GetCellData().GetGlobalIds() ) + fields_with_NaNs: dict[ str, int ] = check_NaN_fields( mesh ) + point_data: MeshComponentData = build_MeshComponentData( mesh, "point" ) + cell_data: MeshComponentData = build_MeshComponentData( mesh, "cell" ) + field_data: MeshComponentData = build_MeshComponentData( mesh, "field" ) + fields_validity_point_data: FieldValidity = field_values_validity( point_data ) + fields_validity_cell_data: FieldValidity = field_values_validity( cell_data ) + fields_validity_field_data: FieldValidity = field_values_validity( field_data ) + + return Result( number_points=number_points, + number_cells=number_cells, + number_cell_types=number_cell_types, + cell_types=cell_types, + cell_type_counts=cell_type_counts, + sum_number_cells_per_nodes=sum_number_cells_per_nodes, + disconnected_nodes=disconnected_nodes, + cells_neighbors_number=cells_neighbors_number, + min_coords=min_coords, + max_coords=max_coords, + is_empty_point_global_ids=point_ids, + is_empty_cell_global_ids=cell_ids, + fields_with_NaNs=fields_with_NaNs, + point_data=point_data, + cell_data=cell_data, + field_data=field_data, + fields_validity_point_data=fields_validity_point_data, + fields_validity_cell_data=fields_validity_cell_data, + fields_validity_field_data=fields_validity_field_data ) + + +def check( vtk_input_file: str, options: Options ) -> Result: + mesh = read_mesh( vtk_input_file ) + options.input_filepath = vtk_input_file + return __check( mesh, options ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index 3b581b7..b679128 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -1,11 +1,15 @@ -import os.path import logging +import os.path +import xml.etree.ElementTree as ET from dataclasses import dataclass +from numpy import array from typing import Iterator, Optional +from vtkmodules.vtkFiltersCore import vtkCellCenters from vtkmodules.vtkCommonCore import vtkIdList -from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkMultiBlockDataSet, vtkPolyData from vtkmodules.vtkIOLegacy import vtkUnstructuredGridWriter, vtkUnstructuredGridReader -from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader, vtkXMLUnstructuredGridWriter +from vtkmodules.vtkIOXML import ( vtkXMLUnstructuredGridReader, vtkXMLUnstructuredGridWriter, + vtkXMLMultiBlockDataReader, vtkXMLMultiBlockDataWriter ) @dataclass( frozen=True ) @@ -14,6 +18,27 @@ class VtkOutput: is_data_mode_binary: bool +vtk_type_name_mapping: dict[ int, str ] = { + 1: 'Vertex', + 3: 'Line', + 5: 'Triangle', + 8: 'Pixel', + 9: 'Quad', + 10: 'Tetra', + 11: 'Voxel', + 12: 'Hex', + 13: 'Wedge', + 14: 'Pyramid', + 15: 'Pentagonal prism', + 16: 'Hexagonal Prism', + 42: 'Polyhedron' +} + + +def vtkid_to_string( id: int ) -> str: + return vtk_type_name_mapping.get( id, 'Unknown type' ) + + def to_vtk_id_list( data ) -> vtkIdList: result = vtkIdList() result.Allocate( len( data ) ) @@ -36,6 +61,26 @@ def vtk_iter( l ) -> Iterator[ any ]: yield l.GetCellType( i ) +def get_all_array_names( mesh: vtkUnstructuredGrid ) -> dict[ str, dict[ str, int ] ]: + """Returns a dict with the names of each arrays and their indexes for each type of data contained in the mesh. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + + Returns: + dict[ str, dict[ str, int ] ]: { "cell": { array_name0: 3, array_name1: 0, ... }, + "field": { ... }, + "point": { ... } } + """ + data_types: dict[ str, any ] = { "point": mesh.GetCellData, "cell": mesh.GetFieldData, "field": mesh.GetPointData } + all_array_names: dict[ str, dict[ str, int ] ] = { data_type: dict() for data_type in data_types } + for typ, data in data_types.items(): + for i in range( data().GetNumberOfArrays() ): + name: str = data().GetArrayName( i ) + all_array_names[ typ ][ name ] = i + return all_array_names + + def has_invalid_field( mesh: vtkUnstructuredGrid, invalid_fields: list[ str ] ) -> bool: """Checks if a mesh contains at least a data arrays within its cell, field or point data having a certain name. If so, returns True, else False. @@ -47,27 +92,47 @@ def has_invalid_field( mesh: vtkUnstructuredGrid, invalid_fields: list[ str ] ) Returns: bool: True if one field found, else False. """ - # Check the cell data fields - cell_data = mesh.GetCellData() - for i in range( cell_data.GetNumberOfArrays() ): - if cell_data.GetArrayName( i ) in invalid_fields: - logging.error( f"The mesh contains an invalid cell field name '{cell_data.GetArrayName( i )}'." ) - return True - # Check the field data fields - field_data = mesh.GetFieldData() - for i in range( field_data.GetNumberOfArrays() ): - if field_data.GetArrayName( i ) in invalid_fields: - logging.error( f"The mesh contains an invalid field name '{field_data.GetArrayName( i )}'." ) - return True - # Check the point data fields - point_data = mesh.GetPointData() - for i in range( point_data.GetNumberOfArrays() ): - if point_data.GetArrayName( i ) in invalid_fields: - logging.error( f"The mesh contains an invalid point field name '{point_data.GetArrayName( i )}'." ) - return True + all_array_names: dict[ str, dict[ str, int ] ] = get_all_array_names( mesh ) + for data_type, array_names in all_array_names.items(): + for array_name in array_names.keys(): + if array_name in invalid_fields: + logging.error( f"The mesh contains an invalid {data_type} array name '{array_name}'." ) + return True return False +def get_points_coords_from_vtk( data: vtkPolyData ) -> array: + """Extracts the coordinates of every point from a vtkPolyData and returns them in a numpy array. + + Args: + data (vtkPolyData): vtkPolyData object. + + Returns: + array: Numpy array of shape( number_of_points, 3 ) + """ + points = data.GetPoints() + num_points: int = points.GetNumberOfPoints() + points_coords: array = array( [ points.GetPoint( i ) for i in range( num_points ) ], dtype=float ) + return points_coords + + +def get_cell_centers_array( mesh: vtkUnstructuredGrid ) -> array: + """Returns an array containing the cell centers coordinates for every cell of a mesh. + + Args: + mesh (vtkUnstructuredGrid): A vtk grid. + + Returns: + np.array: Shape=( mesh number of cells, 3 ) + """ + cell_centers_filter: vtkCellCenters = vtkCellCenters() + cell_centers_filter.SetInputData( mesh ) + cell_centers_filter.Update() + cell_centers = cell_centers_filter.GetOutput() + cell_centers_array: array = get_points_coords_from_vtk( cell_centers ) + return cell_centers_array + + def __read_vtk( vtk_input_file: str ) -> Optional[ vtkUnstructuredGrid ]: reader = vtkUnstructuredGridReader() logging.info( f"Testing file format \"{vtk_input_file}\" using legacy format reader..." ) @@ -102,7 +167,7 @@ def read_mesh( vtk_input_file: str ) -> vtkUnstructuredGrid: :return: A unstructured grid. """ if not os.path.exists( vtk_input_file ): - err_msg: str = f"Invalid file path. Could not read \"{vtk_input_file}\". Dying..." + err_msg: str = f"Invalid file path. Could not read \"{vtk_input_file}\"." logging.error( err_msg ) raise ValueError( err_msg ) file_extension = os.path.splitext( vtk_input_file )[ -1 ] @@ -118,11 +183,97 @@ def read_mesh( vtk_input_file: str ) -> vtkUnstructuredGrid: if output_mesh: return output_mesh # No reader did work. Dying. - err_msg = f"Could not find the appropriate VTK reader for file \"{vtk_input_file}\". Dying..." + err_msg = f"Could not find the appropriate VTK reader for file \"{vtk_input_file}\"." logging.error( err_msg ) raise ValueError( err_msg ) +def read_vtm( vtk_input_file: str ) -> vtkMultiBlockDataSet: + if not vtk_input_file.endswith( ".vtm" ): + raise ValueError( f"Input file '{vtk_input_file}' is not a .vtm file. Cannot read it." ) + reader = vtkXMLMultiBlockDataReader() + reader.SetFileName( vtk_input_file ) + reader.Update() + return reader.GetOutput() + + +def get_vtm_filepath_from_pvd( vtk_input_file: str, vtm_index: int ) -> str: + """From a GEOS output .pvd file, extracts one .vtm file and returns its filepath. + + Args: + vtk_input_file (str): .pvd filepath + vtm_index (int): Index that will select which .vtm to choose. + + Returns: + str: Filepath to the .vtm at the chosen index. + """ + if not vtk_input_file.endswith( ".pvd" ): + raise ValueError( f"Input file '{vtk_input_file}' is not a .pvd file. Cannot read it." ) + tree = ET.parse( vtk_input_file ) + root = tree.getroot() + # Extract all .vtm file paths contained in the .pvd + vtm_paths: list[ str ] = list() + for dataset in root.findall( ".//DataSet" ): + file_path = dataset.get( "file" ) + if file_path.endswith( ".vtm" ): + vtm_paths.append( file_path ) + number_vtms: int = len( vtm_paths ) + if number_vtms == 0: + raise ValueError( f"The '{vtk_input_file}' does not contain any .vtm path." ) + if vtm_index >= number_vtms: + raise ValueError( f"Cannot access the .vtm at index '{vtm_index}' in the '{vtk_input_file}'." + + f" The indexes available are between 0 and {number_vtms - 1}." ) + # build the complete filepath of the vtm to use + directory: str = os.path.dirname( vtk_input_file ) + vtm_filepath: str = os.path.join( directory, vtm_paths[ vtm_index ] ) + return vtm_filepath + + +def get_vtu_filepaths_from_vtm( vtm_filepath: str ) -> tuple[ str ]: + """By reading a vtm file, returns all the vtu filepaths present inside it. + + Args: + vtm_filepath (str): Filepath to a .vtm + + Returns: + tuple[ str ]: ( "file/path/0.vtu", ..., "file/path/N.vtu" ) + """ + if not vtm_filepath.endswith( ".vtm" ): + raise ValueError( f"Input file '{vtm_filepath}' is not a .vtm file. Cannot read it." ) + # Parse the XML file and find all DataSet elements + tree = ET.parse( vtm_filepath ) + root = tree.getroot() + dataset_elements = root.findall( ".//DataSet" ) + # Extract the file attribute from each DataSet + vtu_filepaths: list[ str ] = [ ds.get( 'file' ) for ds in dataset_elements if ds.get( 'file' ).endswith( '.vtu' ) ] + directory: str = os.path.dirname( vtm_filepath ) + vtu_filepaths = [ os.path.join( directory, vtu_filepath ) for vtu_filepath in vtu_filepaths ] + return tuple( vtu_filepaths ) # to lock the order of the vtus like in the vtm + + +def get_vtu_filepaths( vtk_filepath: str, vtm_index: int = -1 ) -> tuple[ str ]: + """When dealing with a .pvd or .vtm file, returns the filepaths that are contained inside that file. + When dealing with a .vtu, just returns the filepath of the given file. + + Args: + vtk_filepath (str): Filepath to a .pvd, .vtm or .vtu file. + vtm_index (int, optional): When dealing with a .pvd file, selects which vtm index to use. Defaults to -1. + + Returns: + tuple[ str ]: ( "file/path/0.vtu", ..., "file/path/N.vtu" ) + """ + if vtk_filepath.endswith( ".vtu" ): + return ( vtk_filepath, ) + elif vtk_filepath.endswith( ".vtm" ): + return get_vtu_filepaths_from_vtm( vtk_filepath ) + elif vtk_filepath.endswith( ".pvd" ): + vtm_filepath: str = get_vtm_filepath_from_pvd( vtk_filepath, vtm_index ) + return get_vtu_filepaths_from_vtm( vtm_filepath ) + else: + raise ValueError( f"The source filepath '{vtk_filepath}' provided does not target a .vtu, a .vtm nor a " + + ".pvd file." ) + + def __write_vtk( mesh: vtkUnstructuredGrid, output: str ) -> int: logging.info( f"Writing mesh into file \"{output}\" using legacy format." ) writer = vtkUnstructuredGridWriter() @@ -158,7 +309,18 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: success_code = __write_vtu( mesh, vtk_output.output, vtk_output.is_data_mode_binary ) else: # No writer found did work. Dying. - err_msg = f"Could not find the appropriate VTK writer for extension \"{file_extension}\". Dying..." + err_msg = f"Could not find the appropriate VTK writer for extension \"{file_extension}\"." logging.error( err_msg ) raise ValueError( err_msg ) return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. + + +def write_VTM( multiblock: vtkMultiBlockDataSet, vtk_output: VtkOutput ) -> int: + if os.path.exists( vtk_output.output ): + logging.error( f"File \"{vtk_output.output}\" already exists, nothing done." ) + return 1 + writer = vtkXMLMultiBlockDataWriter() + writer.SetFileName( vtk_output.output ) + writer.SetInputData( multiblock ) + writer.SetDataModeToBinary() if vtk_output.is_data_mode_binary else writer.SetDataModeToAscii() + writer.Write() diff --git a/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py b/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py index ea1bfe8..1311145 100644 --- a/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py +++ b/geos-mesh/src/geos/mesh/doctor/mesh_doctor.py @@ -1,17 +1,17 @@ import sys +import logging +from geos.mesh.doctor.parsing import CheckHelper +from geos.mesh.doctor.parsing.cli_parsing import parse_and_set_verbosity +import geos.mesh.doctor.register as register +min_python_version = ( 3, 7 ) try: - min_python_version = ( 3, 7 ) assert sys.version_info >= min_python_version except AssertionError as e: print( f"Please update python to at least version {'.'.join(map(str, min_python_version))}." ) sys.exit( 1 ) -import logging - -from geos.mesh.doctor.parsing import CheckHelper -from geos.mesh.doctor.parsing.cli_parsing import parse_and_set_verbosity -import geos.mesh.doctor.register as register +MESH_DOCTOR_FILEPATH = __file__ def main(): diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py index 679f880..4c9e8d2 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/__init__.py @@ -4,10 +4,12 @@ COLLOCATES_NODES = "collocated_nodes" ELEMENT_VOLUMES = "element_volumes" +FIELD_OPERATIONS = "field_operations" FIX_ELEMENTS_ORDERINGS = "fix_elements_orderings" GENERATE_CUBE = "generate_cube" GENERATE_FRACTURES = "generate_fractures" GENERATE_GLOBAL_IDS = "generate_global_ids" +MESH_STATS = "mesh_stats" NON_CONFORMAL = "non_conformal" SELF_INTERSECTING_ELEMENTS = "self_intersecting_elements" SUPPORTED_ELEMENTS = "supported_elements" diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py new file mode 100644 index 0000000..676ad45 --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/parsing/field_operations_parsing.py @@ -0,0 +1,91 @@ +import logging +from geos.mesh.doctor.checks.field_operations import Options, Result, check_valid_support, __SUPPORT_CHOICES +from geos.mesh.doctor.parsing import vtk_output_parsing, FIELD_OPERATIONS + +__SUPPORT = "support" +__SOURCE = "source" + +__OPERATIONS = "operations" +__OPERATIONS_DEFAULT = "" + +__WHICH_VTM = "which_vtm" +__WHICH_VTM_SUGGESTIONS = [ "first", "last" ] + + +def fill_subparser( subparsers ) -> None: + p = subparsers.add_parser( FIELD_OPERATIONS, + help=f"Allows to perform an operation on fields from a source to your input mesh." ) + p.add_argument( '--' + __SUPPORT, + type=str, + required=True, + metavar=", ".join( map( str, __SUPPORT_CHOICES ) ), + default=__SUPPORT_CHOICES[ 0 ], + help="[string]: Where to define field." ) + p.add_argument( '--' + __SOURCE, + type=str, + required=True, + help="[string]: Where field data to use for operation comes from .vtu, .vtm or .pvd file." ) + p.add_argument( '--' + __OPERATIONS, + type=str, + required=True, + default=__OPERATIONS_DEFAULT, + help="[list of string comma separated]: The syntax here is function0:new_name0, " + + "function1:new_name1, ... Allows to perform a wide arrays of operations to add new data to your " + + "output mesh using the source file data. Examples are the following: 1. Copy of the field " + + " 'poro' from the input to the ouput with 'poro:poro'. 2. Copy of the field 'PERM' from the " + + "input to the ouput with a multiplication of the values by 10 with 'PERM*10:PERM'. " + + "3. Copy of the field 'TEMP' from the input to the ouput with an addition to the values by 0.5 " + + "and change the name of the field to 'temperature' with 'TEMP+0.5:TEMPERATURE'. 4. Create a new " + + "field 'NEW_PARAM' using the input 'PERM' field and having the square root of it with " + + "'sqrt(PERM):NEW_PARAM'. Another method is to use precoded functions available which are: " + + "1. 'distances_mesh_center' will create a field where the distances from the mesh center are " + + "calculated for all the elements chosen as support. To use: " + + "'distances_mesh_center:NEW_FIELD_NAME'. 2. 'random_uniform_distribution' will create a field " + + "with samples from a uniform distribution over (0, 1). To use: 'random:NEW_FIELD_NAME'." ) + p.add_argument( '--' + __WHICH_VTM, + type=str, + required=False, + default=__WHICH_VTM_SUGGESTIONS[ 1 ], + help="[string]: If your input is a .pvd, choose which .vtm (each .vtm corresponding to a unique " + + "timestep) will be used for the operation. To do so, you can choose amongst these possibilities: " + + "'first' will select the initial timestep; 'last' will select the final timestep; or you can " + + "enter directly the index starting from 0 of the timestep (not the time). By default, the value" + + " is set to 'last'." ) + vtk_output_parsing.fill_vtk_output_subparser( p ) + + +def convert( parsed_options ) -> Options: + support: str = parsed_options[ __SUPPORT ] + check_valid_support( support ) + + operations: list[ tuple[ str ] ] = list() + parsed_operations: str = parsed_options[ __OPERATIONS ] + if parsed_operations == __OPERATIONS_DEFAULT: + raise ValueError( f"No operation was found. Cannot execute this feature." ) + splitted_operations: list[ str ] = parsed_operations.split( "," ) + for operation in splitted_operations: + function_newname: tuple[ str ] = tuple( operation.split( ":" ) ) + if not len( function_newname ) == 2: + raise ValueError( f"The correct format for '--{__OPERATIONS}' is to have 'function:newname'." ) + operations.append( function_newname ) + + which_vtm: str = parsed_options[ __WHICH_VTM ] + if which_vtm in __WHICH_VTM_SUGGESTIONS: + vtm_index: int = 0 if which_vtm == __WHICH_VTM_SUGGESTIONS[ 0 ] else -1 + else: + try: + vtm_index = int( which_vtm ) + except ValueError: + raise ValueError( f"The choice for --{__WHICH_VTM} needs to be an integer or " + + f"'{__WHICH_VTM_SUGGESTIONS[ 0 ]}' or '{__WHICH_VTM_SUGGESTIONS[ 1 ]}'." ) + + return Options( support=support, + source=parsed_options[ __SOURCE ], + operations=operations, + vtm_index=vtm_index, + vtk_output=vtk_output_parsing.convert( parsed_options ) ) + + +def display_results( options: Options, result: Result ): + if result.info != "OK": + logging.error( f"Field addition failed" ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py new file mode 100644 index 0000000..402c8ae --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/parsing/mesh_stats_parsing.py @@ -0,0 +1,193 @@ +import logging +import os +from datetime import datetime +from io import StringIO +from numpy import unique, where +from typing import Iterable +from geos.mesh.doctor.checks.mesh_stats import Options, Result +from geos.mesh.doctor.parsing import MESH_STATS + +__WRITE_STATS = "write_stats" +__WRITE_STATS_DEFAULT = 0 + +__OUTPUT = "output" + +__DISCONNECTED = "disconnected" +__DISCONNECTED_DEFAULT = 0 + +__FIELD_VALUES = "field_values" +__FIELD_VALUES_DEFAULT = 0 + + +def fill_subparser( subparsers ) -> None: + p = subparsers.add_parser( MESH_STATS, help=f"Outputs basic properties of a mesh." ) + p.add_argument( '--' + __WRITE_STATS, + type=int, + required=True, + metavar=[ 0, 1 ], + default=__WRITE_STATS_DEFAULT, + help=( f"\t[int]: The stats of the mesh will be printed in a file" + + " to the folder specified in --output." ) ) + p.add_argument( '--' + __DISCONNECTED, + type=int, + required=False, + metavar=[ 0, 1 ], + default=__DISCONNECTED_DEFAULT, + help=f"\t[int]: Display all disconnected nodes ids and disconnected cell ids." ) + p.add_argument( '--' + __FIELD_VALUES, + type=int, + required=False, + metavar=[ 0, 1 ], + default=__FIELD_VALUES_DEFAULT, + help=f"\t[int]: Display all range of field values that seem not realistic." ) + p.add_argument( '--' + __OUTPUT, + type=str, + required=False, + help=f"[string]: The output folder destination where the stats will be written." ) + + +def convert( parsed_options ) -> Options: + # input_filepath will be defined in check function before calling __check + return Options( write_stats=parsed_options[ __WRITE_STATS ], + output_folder=parsed_options[ __OUTPUT ], + input_filepath="", + disconnected=parsed_options[ __DISCONNECTED ], + field_values=parsed_options[ __FIELD_VALUES ] ) + + +def display_results( options: Options, result: Result ): + log_stream = StringIO() + stream_handler = logging.StreamHandler( log_stream ) + stream_handler.setLevel( logging.INFO ) + + # Get the root logger and add the StreamHandler to it to possibly output the log to an external file + logger = logging.getLogger() + logger.addHandler( stream_handler ) + logger.setLevel( logging.INFO ) + + logging.info( f"The mesh has {result.number_cells} cells and {result.number_points} points." ) + logging.info( f"There are {result.number_cell_types} different types of cells in the mesh:" ) + for cell_type, type_count in zip( result.cell_types, result.cell_type_counts ): + logging.info( f"\t{cell_type}\t\t({type_count} cells)" ) + + logging.info( f"Number of cells that have exactly N neighbors:" ) + unique_numbers_neighbors, counts = unique( result.cells_neighbors_number, return_counts=True ) + logging.info( "\tNeighbors\tNumber of cells concerned" ) + for number_neighbors, count in zip( unique_numbers_neighbors, counts ): + logging.info( f"\t{number_neighbors}\t\t{count}" ) + + logging.info( "Number of nodes being shared by exactly N cells:" ) + logging.info( "\tCells\t\tNumber of nodes" ) + for number_cells_per_node, number_of_occurences in result.sum_number_cells_per_nodes.items(): + logging.info( f"\t{number_cells_per_node}\t\t{number_of_occurences}" ) + + # unique_numbers_neighbors sorted in ascending order from minimum positive number + number_cells_disconnected: int = unique_numbers_neighbors[ 0 ] if 0 in unique_numbers_neighbors else 0 + logging.info( f"Number of disconnected cells in the mesh: {number_cells_disconnected}" ) + if number_cells_disconnected > 0: + logging.info( "\tIndexes of disconnected cells" ) + indexes = where( result.cells_neighbors_number == 0 ) + logging.info( f"{indexes[ 0 ]}" ) + + logging.info( f"Number of disconnected nodes in the mesh: {len( result.disconnected_nodes )}" ) + if len( result.disconnected_nodes ) > 0: + logging.info( "\tNodeId\t\tCoordinates" ) + for node_id, coordinates in result.disconnected_nodes.items(): + logging.info( f"\t{node_id}\t\t{coordinates}" ) + + logging.info( "The domain is contained in:" ) + logging.info( f"\t{result.min_coords[ 0 ]} <= x <= {result.max_coords[ 0 ]}" ) + logging.info( f"\t{result.min_coords[ 1 ]} <= y <= {result.max_coords[ 1 ]}" ) + logging.info( f"\t{result.min_coords[ 2 ]} <= z <= {result.max_coords[ 2 ]}" ) + + logging.info( f"Does the mesh have global point ids: {not result.is_empty_point_global_ids}" ) + logging.info( f"Does the mesh have global cell ids: {not result.is_empty_cell_global_ids}" ) + + logging.info( f"Number of fields data containing NaNs values: {len( result.fields_with_NaNs )}" ) + if len( result.fields_with_NaNs ) > 0: + logging.info( "\tFieldName\tNumber of NaNs" ) + for field_name, number_NaNs in result.fields_with_NaNs.items(): + logging.info( f"\t{field_name}\t{number_NaNs}" ) + + space_size: int = 3 + data_types: dict[ str, any ] = { + "CellData": result.cell_data, + "PointData": result.point_data, + "FieldData": result.field_data + } + for data_type, data in data_types.items(): + logging.info( f"There are {len( data.scalar_names )} scalar fields from the {data_type}:" ) + for i in range( len( data.scalar_names ) ): + logging.info( f"\t{data.scalar_names[i]}" + harmonious_spacing( data.scalar_names, i, space_size ) + + f"min = {data.scalar_min_values[ i ]}" + " " * space_size + + f"max = {data.scalar_max_values[ i ]}" ) + + logging.info( f"There are {len( data.tensor_names )} vector/tensor fields from the {data_type}:" ) + for i in range( len( data.tensor_names ) ): + logging.info( f"\t{data.tensor_names[ i ]}" + harmonious_spacing( data.tensor_names, i, space_size ) + + f"min = {data.tensor_min_values[ i ]}" + " " * space_size + + f"max = {data.tensor_max_values[ i ]}" ) + + fields_validity_types: dict[ str, any ] = { + "CellData": result.fields_validity_cell_data, + "PointData": result.fields_validity_point_data, + "FieldData": result.fields_validity_field_data + } + for field_vailidity_type, data in fields_validity_types.items(): + logging.info( f"Unexpected range of values for vector/tensor fields from the {field_vailidity_type}:" ) + for field_name, ( is_valid, min_max ) in data.items(): + if not is_valid: + logging.info( f"{field_name} expected to be between {min_max[ 0 ]} and {min_max[ 1 ]}." ) + + if options.write_stats and is_valid_to_write_folder( options.output_folder ): + filepath: str = build_filepath_output_file( options ) + with open( filepath, 'w' ) as file: + file.writelines( log_stream.getvalue() ) + + +def harmonious_spacing( iterable_objs: Iterable[ Iterable ], indexIter: int, space_size: int = 3 ) -> str: + longest_element: Iterable = max( iterable_objs, key=len ) + ideal_space: int = len( longest_element ) - len( iterable_objs[ indexIter ] ) + space_size + return " " * ideal_space + + +def is_valid_to_write_folder( folder_path: str ) -> bool: + """Checks if a folder path is valid to write a file within it. + + Args: + folder_path (str): Path to a folder. + + Returns: + bool: + """ + if not os.path.exists( folder_path ): + logging.error( f"The folder path given '{folder_path}' to write the log in does not exist. No file written." ) + return False + if not os.path.isdir( folder_path ): + logging.error( f"The path given '{folder_path}' to write the log is not a directory path. No file written." ) + return False + if not os.access( folder_path, os.W_OK ): + logging.error( f"You do not have writing access to the folder chosen '{folder_path}' to write the log." + + " No file written." ) + return False + return True + + +def build_filepath_output_file( options: Options ) -> str: + """Knowing the filepath of the mesh on which the stats will be gathered, and the directory path to where the user + wants to save the stats, builds a unique filename. + + Args: + options (Options): Options given by the user. + + Returns: + str: Complete filepath for the creation of the output file. + """ + base_name = os.path.basename( options.input_filepath ) + # Split the base name into a mesh name and extension + mesh_name, _ = os.path.splitext( base_name ) + current_time = datetime.now() + time = current_time.strftime( "%Y%m%d_%H%M%S" ) + filename: str = mesh_name + "_stats_" + time + ".txt" + filepath: str = os.path.join( options.output_folder, filename ) + return filepath diff --git a/geos-mesh/src/geos/mesh/doctor/register.py b/geos-mesh/src/geos/mesh/doctor/register.py index 75e8d48..12a4519 100644 --- a/geos-mesh/src/geos/mesh/doctor/register.py +++ b/geos-mesh/src/geos/mesh/doctor/register.py @@ -53,9 +53,10 @@ def closure_trick( cn: str ): __CHECKS[ check_name ] = lambda: __load_module_check( cn ) # Register the modules to load here. - for check_name in ( parsing.COLLOCATES_NODES, parsing.ELEMENT_VOLUMES, parsing.FIX_ELEMENTS_ORDERINGS, - parsing.GENERATE_CUBE, parsing.GENERATE_FRACTURES, parsing.GENERATE_GLOBAL_IDS, - parsing.NON_CONFORMAL, parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS ): + for check_name in ( parsing.COLLOCATES_NODES, parsing.ELEMENT_VOLUMES, parsing.FIELD_OPERATIONS, + parsing.FIX_ELEMENTS_ORDERINGS, parsing.GENERATE_CUBE, parsing.GENERATE_FRACTURES, + parsing.GENERATE_GLOBAL_IDS, parsing.MESH_STATS, parsing.NON_CONFORMAL, + parsing.SELF_INTERSECTING_ELEMENTS, parsing.SUPPORTED_ELEMENTS ): closure_trick( check_name ) loaded_checks: Dict[ str, Callable[ [ str, Any ], Any ] ] = __load_checks() loaded_checks_helpers: Dict[ str, CheckHelper ] = dict() diff --git a/geos-mesh/tests/__init__.py b/geos-mesh/tests/__init__.py new file mode 100644 index 0000000..b1cfe26 --- /dev/null +++ b/geos-mesh/tests/__init__.py @@ -0,0 +1 @@ +# Empty \ No newline at end of file diff --git a/geos-mesh/tests/test_field_operations.py b/geos-mesh/tests/test_field_operations.py new file mode 100644 index 0000000..6e377a7 --- /dev/null +++ b/geos-mesh/tests/test_field_operations.py @@ -0,0 +1,245 @@ +import os +import shutil +from numpy import array, arange, full, array_equal, sqrt, nan, log, log10 +from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy +from scipy.spatial import KDTree +from geos.mesh.doctor.checks import field_operations as fo +from geos.mesh.doctor.checks import vtk_utils as vu +from tests import test_vtk_utils as tvu +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, VTK_HEXAHEDRON +""" +For tests creation +""" +# yapf: disable +## GRID 1 +eight_hex_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], #point0 + [ 1.0, 0.0, 0.0 ], #point1 + [ 2.0, 0.0, 0.0 ], #point2 + [ 0.0, 1.0, 0.0 ], #point3 + [ 1.0, 1.0, 0.0 ], #point4 + [ 2.0, 1.0, 0.0 ], #point5 + [ 0.0, 2.0, 0.0 ], #point6 + [ 1.0, 2.0, 0.0 ], #point7 + [ 2.0, 2.0, 0.0 ], #point8 + [ 0.0, 0.0, 1.0 ], #point9 + [ 1.0, 0.0, 1.0 ], #point10 + [ 2.0, 0.0, 1.0 ], #point11 + [ 0.0, 1.0, 1.0 ], #point12 + [ 1.0, 1.0, 1.0 ], #point13 + [ 2.0, 1.0, 1.0 ], #point14 + [ 0.0, 2.0, 1.0 ], #point15 + [ 1.0, 2.0, 1.0 ], #point16 + [ 2.0, 2.0, 1.0 ], #point17 + [ 0.0, 0.0, 2.0 ], #point18 + [ 1.0, 0.0, 2.0 ], #point19 + [ 2.0, 0.0, 2.0 ], #point20 + [ 0.0, 1.0, 2.0 ], #point21 + [ 1.0, 1.0, 2.0 ], #point22 + [ 2.0, 1.0, 2.0 ], #point23 + [ 0.0, 2.0, 2.0 ], #point24 + [ 1.0, 2.0, 2.0 ], #point25 + [ 2.0, 2.0, 2.0 ] ] #point26 +eight_hex_ids = [ [ 0, 1, 4, 3, 9, 10, 13, 12 ], + [ 0 + 1, 1 + 1, 4 + 1, 3 + 1, 9 + 1, 10 + 1, 13 + 1, 12 + 1 ], + [ 0 + 3, 1 + 3, 4 + 3, 3 + 3, 9 + 3, 10 + 3, 13 + 3, 12 + 3 ], + [ 0 + 4, 1 + 4, 4 + 4, 3 + 4, 9 + 4, 10 + 4, 13 + 4, 12 + 4 ], + [ 0 + 9, 1 + 9, 4 + 9, 3 + 9, 9 + 9, 10 + 9, 13 + 9, 12 + 9 ], + [ 0 + 10, 1 + 10, 4 + 10, 3 + 10, 9 + 10, 10 + 10, 13 + 10, 12 + 10 ], + [ 0 + 12, 1 + 12, 4 + 12, 3 + 12, 9 + 12, 10 + 12, 13 + 12, 12 + 12 ], + [ 0 + 13, 1 + 13, 4 + 13, 3 + 13, 9 + 13, 10 + 13, 13 + 13, 12 + 13 ] ] +eight_hex_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( eight_hex_points_coords, eight_hex_ids, VTK_HEXAHEDRON ) +eight_hex_grid_output = vu.VtkOutput( os.path.join( tvu.dir_name, "eight_hex.vtu" ), False ) + +eight_hex_grid_empty: vtkUnstructuredGrid = vtkUnstructuredGrid() +eight_hex_grid_empty.DeepCopy( eight_hex_grid ) +eight_hex_grid_empty_output = vu.VtkOutput( os.path.join( tvu.dir_name, "eight_hex_empty.vtu" ), False ) + +#GRID 2 which is a cell0 of GRID 1 +hex_ids = [ [ 0, 1, 2, 3, 4, 5, 6, 7 ] ] +hex0_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], #point0 + [ 1.0, 0.0, 0.0 ], #point1 + [ 1.0, 1.0, 0.0 ], #point2 + [ 0.0, 1.0, 0.0 ], #point3 + [ 0.0, 0.0, 1.0 ], #point4 + [ 1.0, 0.0, 1.0 ], #point5 + [ 1.0, 1.0, 1.0 ], #point6 + [ 0.0, 1.0, 1.0 ] ] #point7 +hex0_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( hex0_points_coords, hex_ids, VTK_HEXAHEDRON ) + +#GRID 3 which is cell1 of GRID 1 +hex1_points_coords: list[ list[ float ] ] = [ [ 1.0, 0.0, 0.0 ], #point0 + [ 2.0, 0.0, 0.0 ], #point1 + [ 2.0, 1.0, 0.0 ], #point2 + [ 1.0, 1.0, 0.0 ], #point3 + [ 1.0, 0.0, 1.0 ], #point4 + [ 2.0, 0.0, 1.0 ], #point5 + [ 2.0, 1.0, 1.0 ], #point6 + [ 1.0, 1.0, 1.0 ] ] #point7 +hex1_grid: vtkUnstructuredGrid = tvu.create_type_vtk_grid( hex1_points_coords, hex_ids, VTK_HEXAHEDRON ) + +sub_grids: list[ vtkUnstructuredGrid ] = [ hex0_grid, hex1_grid ] +sub_grids_values: list[ dict[ str, array ] ] = [ dict() for _ in range( len( sub_grids ) ) ] +sub_grids_output: list[ vu.VtkOutput ] = [ vu.VtkOutput( os.path.join( tvu.dir_name, f"sub_grid{i}.vtu" ), True ) for i in range( len( sub_grids ) ) ] +# yapf: enable +""" +Add arrays in each grid +""" +ncells: int = eight_hex_grid.GetNumberOfCells() +npoints: int = eight_hex_grid.GetNumberOfPoints() +eight_hex_grid_values: dict[ str, array ] = { + "cell_param0": arange( 0, ncells ).reshape( ncells, 1 ), + "cell_param1": arange( ncells, ncells * 3 ).reshape( ncells, 2 ), + "cell_param2": arange( ncells * 3, ncells * 6 ).reshape( ncells, 3 ), + "point_param0": arange( ncells * 6, ncells * 6 + npoints ).reshape( npoints, 1 ), + "point_param1": arange( ncells * 6 + npoints, ncells * 6 + npoints * 3 ).reshape( npoints, 2 ), + "point_param2": arange( ncells * 6 + npoints * 3, ncells * 6 + npoints * 6 ).reshape( npoints, 3 ) +} +for name, value in eight_hex_grid_values.items(): + arr_values = numpy_to_vtk( value ) + arr_values.SetName( name ) + if "cell" in name: + eight_hex_grid.GetCellData().AddArray( arr_values ) + else: + eight_hex_grid.GetPointData().AddArray( arr_values ) + for i in range( len( sub_grids_values ) ): + if len( value.shape ) == 1: + sub_grids_values[ i ][ name ] = value[ i ] + else: + sub_grids_values[ i ][ name ] = [ value[ i ][ j ] for j in range( value.shape[ 1 ] ) ] + +for i, sub_grid in enumerate( sub_grids ): + for name, value in sub_grids_values[ i ].items(): + arr_values = numpy_to_vtk( value ) + arr_values.SetName( name ) + if "cell" in name: + sub_grid.GetCellData().AddArray( arr_values ) + else: + sub_grid.GetPointData().AddArray( arr_values ) + +operations_points: list[ tuple[ str ] ] = [ ( "point_param0", "point_param0" ), ( "point_param1", "point_param1_new" ), + ( "point_param2 * 3", "point_param2_new" ), + ( "log( point_param0 )", "point_param0_created" ), + ( "sqrt( point_param1 )", "point_param1_created" ), + ( "point_param0 + point_param1 * 2", "point_param2_created" ) ] +operations_cells: list[ tuple[ str ] ] = [ ( "cell_param0", "cell_param0" ), ( "cell_param1", "cell_param1_new" ), + ( "cell_param2 + 10", "cell_param2_new" ), + ( "log( cell_param0 )", "cell_param0_created" ), + ( "sqrt( cell_param1 )", "cell_param1_created" ), + ( "cell_param0 + cell_param1 * 2", "cell_param2_created" ) ] + +out_points: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "points.vtu" ), True ) +out_cells: vu.VtkOutput = vu.VtkOutput( os.path.join( tvu.dir_name, "cells.vtu" ), True ) + + +class TestClass: + + def test_precoded_fields( self ): + result_points: array = fo.build_distances_mesh_center( eight_hex_grid_empty, "point" ) + result_cells: array = fo.build_distances_mesh_center( eight_hex_grid_empty, "cell" ) + sq2, sq3, sq3h = sqrt( 2 ), sqrt( 3 ), sqrt( 3 ) / 2 + expected_points: array = array( [ + sq3, sq2, sq3, sq2, 1.0, sq2, sq3, sq2, sq3, sq2, 1.0, sq2, 1.0, 0.0, 1.0, sq2, 1.0, sq2, sq3, sq2, sq3, + sq2, 1.0, sq2, sq3, sq2, sq3 + ] ) + expected_cells: array = array( [ sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h, sq3h ] ) + assert array_equal( result_points, expected_points ) + assert array_equal( result_cells, expected_cells ) + random_points: array = fo.build_random_uniform_distribution( eight_hex_grid_empty, "point" ) + random_cells: array = fo.build_random_uniform_distribution( eight_hex_grid_empty, "cell" ) + assert eight_hex_grid_empty.GetNumberOfPoints() == random_points.shape[ 0 ] + assert eight_hex_grid_empty.GetNumberOfCells() == random_cells.shape[ 0 ] + + def test_get_reorder_mapping( self ): + support_points: array = fo.get_support_elements( eight_hex_grid, "point" ) + support_cells: array = fo.get_support_elements( eight_hex_grid, "cell" ) + kd_tree_points: KDTree = KDTree( support_points ) + kd_tree_cells: KDTree = KDTree( support_cells ) + result_points1: array = fo.get_reorder_mapping( kd_tree_points, hex0_grid, "point" ) + result_cells1: array = fo.get_reorder_mapping( kd_tree_cells, hex0_grid, "cell" ) + result_points2: array = fo.get_reorder_mapping( kd_tree_points, hex1_grid, "point" ) + result_cells2: array = fo.get_reorder_mapping( kd_tree_cells, hex1_grid, "cell" ) + assert result_points1.tolist() == [ 0, 1, 4, 3, 9, 10, 13, 12 ] + assert result_points2.tolist() == [ 0 + 1, 1 + 1, 4 + 1, 3 + 1, 9 + 1, 10 + 1, 13 + 1, 12 + 1 ] + assert result_cells1.tolist() == [ 0 ] + assert result_cells2.tolist() == [ 1 ] + + def test_get_array_names_to_collect_and_options( self ): + vu.write_mesh( eight_hex_grid, eight_hex_grid_output ) + options1: fo.Options = fo.Options( "cell", eight_hex_grid_output.output, operations_cells, -1, out_cells ) + options2: fo.Options = fo.Options( "point", eight_hex_grid_output.output, operations_points, -1, out_points ) + result1, options1_new = fo.get_array_names_to_collect_and_options( eight_hex_grid_output.output, options1 ) + result2, options2_new = fo.get_array_names_to_collect_and_options( eight_hex_grid_output.output, options2 ) + os.remove( eight_hex_grid_output.output ) + assert result1.sort() == [ fc[ 0 ] for fc in operations_cells ].sort() + assert result2.sort() == [ fp[ 0 ] for fp in operations_points ].sort() + assert options1_new.operations.sort() == operations_cells.sort() + assert options2_new.operations.sort() == operations_points.sort() + + def test_merge_local_in_global_array( self ): + # create arrays filled with nan values + glob_arr_points_1D: array = full( ( 8, 1 ), nan ) + glob_arr_cells_1D: array = full( ( 8, 1 ), nan ) + glob_arr_points_3D: array = full( ( 8, 3 ), nan ) + glob_arr_cells_3D: array = full( ( 8, 3 ), nan ) + loc_arr_points_1D: array = array( list( range( 0, 4 ) ) ) + loc_arr_cells_1D: array = array( list( range( 4, 8 ) ) ) + loc_arr_points_3D: array = array( + ( list( range( 0, 3 ) ), list( range( 6, 9 ) ), list( range( 12, 15 ) ), list( range( 18, 21 ) ) ) ) + loc_arr_cells_3D: array = array( + ( list( range( 3, 6 ) ), list( range( 9, 12 ) ), list( range( 15, 18 ) ), list( range( 21, 24 ) ) ) ) + mapping_points: array = array( [ 0, 2, 4, 6 ] ) + mapping_cells: array = array( [ 7, 5, 3, 1 ] ) + fo.merge_local_in_global_array( glob_arr_points_1D, loc_arr_points_1D, mapping_points ) + fo.merge_local_in_global_array( glob_arr_cells_1D, loc_arr_cells_1D, mapping_cells ) + fo.merge_local_in_global_array( glob_arr_points_3D, loc_arr_points_3D, mapping_points ) + fo.merge_local_in_global_array( glob_arr_cells_3D, loc_arr_cells_3D, mapping_cells ) + expected_points_1D: array = array( [ 0, nan, 1, nan, 2, nan, 3, nan ] ).reshape( -1, 1 ) + expected_cells_1D: array = array( [ nan, 7, nan, 6, nan, 5, nan, 4 ] ).reshape( -1, 1 ) + expected_points_3D: array = array( [ [ 0, 1, 2 ], [ nan, nan, nan ], [ 6, 7, 8 ], [ nan, nan, nan ], + [ 12, 13, 14 ], [ nan, nan, nan ], [ 18, 19, 20 ], [ nan, nan, nan ] ] ) + expected_cells_3D: array = array( [ [ nan, nan, nan ], [ 21, 22, 23 ], [ nan, nan, nan ], [ 15, 16, 17 ], + [ nan, nan, nan ], [ 9, 10, 11 ], [ nan, nan, nan ], [ 3, 4, 5 ] ] ) + assert array_equal( glob_arr_points_1D, expected_points_1D, equal_nan=True ) + assert array_equal( glob_arr_cells_1D, expected_cells_1D, equal_nan=True ) + assert array_equal( glob_arr_points_3D, expected_points_3D, equal_nan=True ) + assert array_equal( glob_arr_cells_3D, expected_cells_3D, equal_nan=True ) + + def test_implement_arrays( self ): + output: vu.VtkOutput = vu.VtkOutput( "filled.vtu", True ) + empty_mesh: vtkUnstructuredGrid = vtkUnstructuredGrid() + empty_mesh.DeepCopy( eight_hex_grid_empty ) + npoints: int = empty_mesh.GetNumberOfPoints() + ncells: int = empty_mesh.GetNumberOfCells() + ope_points = [ ( "point_param0", "point_param0" ), ( "point_param1", "point_copy1" ), + ( "point_param2 * 10 + 0.1", "point_param2" ), ( "log(point_param0)", "new0" ), + ( "sqrt(point_param1)", "new1" ), ( "distances_mesh_center", "new2" ) ] + ope_cells = [ ( "cell_param0", "cell_param0" ), ( "cell_param1", "cell_copy1" ), + ( "cell_param2 / 0.1 - 0.5", "cell_param2" ), ( "sqrt(cell_param0)", "new3" ), + ( "log10(cell_param1)", "new4" ), ( "cell_param0 + cell_param1", "new5" ) ] + options_point = fo.Options( "point", "empty.vtu", ope_points, -1, output ) + options_cell = fo.Options( "cell", "empty.vtu", ope_cells, -1, output ) + fo.implement_arrays( empty_mesh, eight_hex_grid_values, options_point ) + fo.implement_arrays( empty_mesh, eight_hex_grid_values, options_cell ) + point_data_mesh = empty_mesh.GetPointData() + cell_data_mesh = empty_mesh.GetCellData() + expected_results: dict[ str, array ] = { + "point_param0": eight_hex_grid_values[ "point_param0" ], + "point_copy1": eight_hex_grid_values[ "point_param1" ], + "point_param2": eight_hex_grid_values[ "point_param2" ] * 10 + 0.1, + "cell_param0": eight_hex_grid_values[ "cell_param0" ], + "cell_copy1": eight_hex_grid_values[ "cell_param1" ], + "cell_param2": eight_hex_grid_values[ "cell_param2" ] / 0.1 - 0.5, + "new0": log( eight_hex_grid_values[ "point_param0" ] ), + "new1": sqrt( eight_hex_grid_values[ "point_param1" ] ), + "new2": fo.build_distances_mesh_center( empty_mesh, "point" ).reshape( ( npoints, 1 ) ), + "new3": sqrt( eight_hex_grid_values[ "cell_param0" ] ), + "new4": log10( eight_hex_grid_values[ "cell_param1" ] ), + "new5": eight_hex_grid_values[ "cell_param0" ] + eight_hex_grid_values[ "cell_param1" ] + } + for data, nelements in zip( [ point_data_mesh, cell_data_mesh ], [ npoints, ncells ] ): + for i in range( data.GetNumberOfArrays() ): + array_name: str = data.GetArrayName( i ) + result: array = vtk_to_numpy( data.GetArray( i ) ) + if len( result.shape ) == 1: + result = result.reshape( ( nelements, 1 ) ) + assert array_equal( result, expected_results[ array_name ] ) diff --git a/geos-mesh/tests/test_mesh_stats.py b/geos-mesh/tests/test_mesh_stats.py new file mode 100644 index 0000000..6f1406c --- /dev/null +++ b/geos-mesh/tests/test_mesh_stats.py @@ -0,0 +1,340 @@ +import os +import re +import logging +import subprocess +import numpy as np +from geos.mesh.doctor.mesh_doctor import MESH_DOCTOR_FILEPATH +from geos.mesh.doctor.checks import mesh_stats as ms +from geos.mesh.doctor.checks.generate_cube import Options, FieldInfo, __build +from geos.mesh.doctor.checks.vtk_utils import VtkOutput, write_mesh +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkHexahedron +from vtkmodules.util.numpy_support import numpy_to_vtk +""" +For creation of output test meshes +""" +current_file_path: str = __file__ +dir_name: str = os.path.dirname( current_file_path ) +pattern_test: str = "to_check_mesh" +filepath_mesh_for_stats: str = os.path.join( dir_name, pattern_test + ".vtu" ) +test_mesh_for_stats: VtkOutput = VtkOutput( filepath_mesh_for_stats, True ) +""" +Grids for stats tests +""" +# First mesh: no anomalies to look for +out: VtkOutput = VtkOutput( "test", False ) +field0: FieldInfo = FieldInfo( "scalar_cells", 1, "CELLS" ) +field1: FieldInfo = FieldInfo( "tensor_cells", 3, "CELLS" ) +field2: FieldInfo = FieldInfo( "scalar_points", 1, "POINTS" ) +field3: FieldInfo = FieldInfo( "tensor_points", 3, "POINTS" ) +options_cube0: Options = Options( vtk_output=out, + generate_cells_global_ids=True, + generate_points_global_ids=True, + xs=np.array( [ 0.0, 1.0, 2.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0 ] ), + nxs=[ 1, 1 ], + nys=[ 1, 1 ], + nzs=[ 1, 1 ], + fields=[ field0, field1, field2, field3 ] ) +cube0: vtkUnstructuredGrid = __build( options_cube0 ) + +# Second mesh: disconnected nodes are added +cube1: vtkUnstructuredGrid = __build( options_cube0 ) +cube1.GetPoints().InsertNextPoint( ( 3.0, 0.0, 0.0 ) ) +cube1.GetPoints().InsertNextPoint( ( 3.0, 1.0, 0.0 ) ) +cube1.GetPoints().InsertNextPoint( ( 3.0, 2.0, 0.0 ) ) + +# Third mesh: fields with invalid ranges of values are added +field_poro: FieldInfo = FieldInfo( "POROSITY", 1, "CELLS" ) +field_perm: FieldInfo = FieldInfo( "PERMEABILITY", 3, "CELLS" ) +field_density: FieldInfo = FieldInfo( "DENSITY", 1, "CELLS" ) +field_temp: FieldInfo = FieldInfo( "TEMPERATURE", 1, "POINTS" ) +field_pressure: FieldInfo = FieldInfo( "PRESSURE", 3, "POINTS" ) +options_cube2: Options = Options( vtk_output=out, + generate_cells_global_ids=True, + generate_points_global_ids=True, + xs=np.array( [ 0.0, 1.0, 2.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0 ] ), + nxs=[ 1, 1 ], + nys=[ 1, 1 ], + nzs=[ 1, 1 ], + fields=[ field_poro, field_perm, field_density, field_temp, field_pressure ] ) +cube2: vtkUnstructuredGrid = __build( options_cube2 ) +number_cells: int = cube2.GetNumberOfCells() +number_points: int = cube2.GetNumberOfPoints() +array_poro: np.array = np.ones( ( number_cells, field_poro.dimension ), dtype=float ) * ( -1.0 ) +array_perm: np.array = np.ones( ( number_cells, field_perm.dimension ), dtype=float ) * 2.0 +array_density: np.array = np.ones( ( number_cells, field_density.dimension ), dtype=float ) * ( 100000.0 ) +array_temp: np.array = np.ones( ( number_points, field_temp.dimension ), dtype=float ) * ( -1.0 ) +array_pressure: np.array = np.ones( ( number_points, field_pressure.dimension ), dtype=float ) * ( -1.0 ) +vtk_array_poro = numpy_to_vtk( array_poro ) +vtk_array_perm = numpy_to_vtk( array_perm ) +vtk_array_density = numpy_to_vtk( array_density ) +vtk_array_temp = numpy_to_vtk( array_temp ) +vtk_array_pressure = numpy_to_vtk( array_pressure ) +vtk_array_poro.SetName( field_poro.name + "_invalid" ) +vtk_array_perm.SetName( field_perm.name + "_invalid" ) +vtk_array_density.SetName( field_density.name + "_invalid" ) +vtk_array_temp.SetName( field_temp.name + "_invalid" ) +vtk_array_pressure.SetName( field_pressure.name + "_invalid" ) +cell_data = cube2.GetCellData() +point_data = cube2.GetPointData() +cell_data.AddArray( vtk_array_poro ) +cell_data.AddArray( vtk_array_perm ) +cell_data.AddArray( vtk_array_density ) +point_data.AddArray( vtk_array_temp ) +point_data.AddArray( vtk_array_pressure ) + +# In this mesh, certain fields have NaN values +cube3: vtkUnstructuredGrid = __build( options_cube2 ) +array_poro = array_poro * ( -1 ) +array_temp = array_temp * ( -1 ) +array_poro[ 0 ], array_poro[ -1 ] = np.nan, np.nan +array_temp[ 0 ], array_temp[ -1 ] = np.nan, np.nan +vtk_array_poro = numpy_to_vtk( array_poro ) +vtk_array_temp = numpy_to_vtk( array_temp ) +vtk_array_poro.SetName( field_poro.name + "_invalid" ) +vtk_array_temp.SetName( field_temp.name + "_invalid" ) +cell_data = cube3.GetCellData() +point_data = cube3.GetPointData() +cell_data.AddArray( vtk_array_poro ) +point_data.AddArray( vtk_array_temp ) + +# cube4 is a cube with an extra hex cell disconnected added +options_cube4: Options = Options( vtk_output=out, + generate_cells_global_ids=False, + generate_points_global_ids=False, + xs=np.array( [ 0.0, 1.0, 2.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0 ] ), + nxs=[ 1, 1 ], + nys=[ 1, 1 ], + nzs=[ 1, 1 ], + fields=[] ) +cube4: vtkUnstructuredGrid = __build( options_cube4 ) +number_cells_cube4: int = cube4.GetNumberOfCells() +hex = vtkHexahedron() +coords_new_hex = ( ( 3.0, 0.0, 0.0 ), ( 4.0, 0.0, 0.0 ), ( 4.0, 1.0, 0.0 ), ( 3.0, 1.0, 0.0 ), ( 3.0, 0.0, 1.0 ), + ( 4.0, 0.0, 1.0 ), ( 4.0, 1.0, 1.0 ), ( 3.0, 1.0, 1.0 ) ) +for i in range( len( coords_new_hex ) ): + hex.GetPoints().InsertNextPoint( coords_new_hex[ i ] ) + hex.GetPointIds().SetId( i, number_cells_cube4 + i ) +cube4.InsertNextCell( hex.GetCellType(), hex.GetPointIds() ) + +# Last mesh: test mesh for output and check of execution of mesh_stats +f_poro: FieldInfo = FieldInfo( "POROSITY", 1, "CELLS" ) +f_perm: FieldInfo = FieldInfo( "PERMEABILITY", 3, "CELLS" ) +f_density: FieldInfo = FieldInfo( "DENSITY", 1, "CELLS" ) +f_pressure: FieldInfo = FieldInfo( "PRESSURE", 1, "CELLS" ) +f_temp: FieldInfo = FieldInfo( "TEMPERATURE", 1, "POINTS" ) +f_displacement: FieldInfo = FieldInfo( "DISPLACEMENT", 3, "POINTS" ) +options_cube_output: Options = Options( vtk_output=out, + generate_cells_global_ids=True, + generate_points_global_ids=True, + xs=np.array( [ 0.0, 1.0, 2.0, 3.0 ] ), + ys=np.array( [ 0.0, 1.0, 2.0, 3.0 ] ), + zs=np.array( [ 0.0, 1.0, 2.0, 3.0 ] ), + nxs=[ 1, 1, 1 ], + nys=[ 1, 1, 1 ], + nzs=[ 1, 1, 1 ], + fields=[ f_poro, f_perm, f_density, f_pressure, f_temp, f_displacement ] ) +cube_output: vtkUnstructuredGrid = __build( options_cube_output ) +number_cells: int = cube_output.GetNumberOfCells() +number_points: int = cube_output.GetNumberOfPoints() +a_poro: np.array = np.linspace( 0, 1, number_cells ) +a_perm: np.array = np.empty( ( number_cells, f_perm.dimension ) ) +for i in range( f_perm.dimension ): + a_perm[ :, i ] = np.linspace( 1e-14 * 10**i, 1e-12 * 10**i, number_cells ) +a_density: np.array = np.linspace( 500, 40000, number_cells ) +a_pressure: np.array = np.linspace( 1e5, 1e7, number_cells ) +a_temp: np.array = np.linspace( 1e2, 5e3, number_points ) +a_temp = a_temp.reshape( number_points, 1 ) +a_displacement: np.array = np.empty( ( number_points, f_displacement.dimension ) ) +for i in range( f_displacement.dimension ): + a_displacement[ :, i ] = np.linspace( 1e-4 * 10**i, 1e-2 * 10**i, number_points ) +for array in [ a_density, a_pressure, a_poro ]: + array = array.reshape( number_cells, 1 ) + +vtk_a_poro = numpy_to_vtk( a_poro ) +vtk_a_perm = numpy_to_vtk( a_perm ) +vtk_a_density = numpy_to_vtk( a_density ) +vtk_a_pressure = numpy_to_vtk( a_pressure ) +vtk_a_temp = numpy_to_vtk( a_temp ) +vtk_a_displacement = numpy_to_vtk( a_displacement ) +vtk_a_poro.SetName( f_poro.name ) +vtk_a_perm.SetName( f_perm.name ) +vtk_a_density.SetName( f_density.name + "_invalid" ) +vtk_a_pressure.SetName( f_pressure.name ) +vtk_a_temp.SetName( f_temp.name + "_invalid" ) +vtk_a_displacement.SetName( f_displacement.name ) + +cell_data_output = cube_output.GetCellData() +point_data_output = cube_output.GetPointData() +cell_data_output.AddArray( vtk_a_poro ) +cell_data_output.AddArray( vtk_a_perm ) +cell_data_output.AddArray( vtk_a_density ) +cell_data_output.AddArray( vtk_a_pressure ) +point_data_output.AddArray( vtk_a_temp ) +point_data_output.AddArray( vtk_a_displacement ) + + +class TestClass: + + def test_get_cell_types_and_counts( self ): + result: tuple[ int, int, list[ str ], list[ int ] ] = ms.get_cell_types_and_counts( cube0 ) + assert result[ 0 ] == 8 + assert result[ 1 ] == 1 + assert result[ 2 ] == [ "Hex" ] + assert result[ 3 ] == [ 8 ] + + def test_get_number_cells_per_nodes( self ): + result: dict[ int, int ] = ms.get_number_cells_per_nodes( cube0 ) + for node_id in [ 0, 2, 6, 8, 18, 20, 24, 26 ]: + assert result[ node_id ] == 1 + for node_id in [ 1, 3, 5, 7, 9, 11, 15, 17, 19, 21, 23, 25 ]: + assert result[ node_id ] == 2 + for node_id in [ 4, 10, 12, 14, 16, 22 ]: + assert result[ node_id ] == 4 + assert result[ 13 ] == 8 + result2: dict[ int, int ] = ms.summary_number_cells_per_nodes( result ) + assert result2 == { 1: 8, 2: 12, 4: 6, 8: 1 } + + def test_get_coords_min_max( self ): + result: tuple[ np.ndarray ] = ms.get_coords_min_max( cube0 ) + assert np.array_equal( result[ 0 ], np.array( [ 0.0, 0.0, 0.0 ] ) ) + assert np.array_equal( result[ 1 ], np.array( [ 2.0, 2.0, 2.0 ] ) ) + + def test_build_MeshComponentData( self ): + result: ms.MeshComponentData = ms.build_MeshComponentData( cube0, "point" ) + assert result.componentType == "point" + assert result.scalar_names == [ "scalar_points", "GLOBAL_IDS_POINTS" ] + assert result.scalar_min_values == [ np.float64( 1.0 ), np.int64( 0 ) ] + assert result.scalar_max_values == [ np.float64( 1.0 ), np.int64( 26 ) ] + assert result.tensor_names == [ "tensor_points" ] + assert np.array_equal( result.tensor_min_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + assert np.array_equal( result.tensor_max_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + + result2: ms.MeshComponentData = ms.build_MeshComponentData( cube0, "cell" ) + assert result2.componentType == "cell" + assert result2.scalar_names == [ "scalar_cells", "GLOBAL_IDS_CELLS" ] + assert result2.scalar_min_values == [ np.float64( 1.0 ), np.int64( 0 ) ] + assert result2.scalar_max_values == [ np.float64( 1.0 ), np.int64( 7 ) ] + assert result2.tensor_names == [ "tensor_cells" ] + assert np.array_equal( result2.tensor_min_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + assert np.array_equal( result2.tensor_max_values[ 0 ], np.array( [ 1.0, 1.0, 1.0 ] ) ) + + result3: ms.MeshComponentData = ms.build_MeshComponentData( cube0, "random" ) + assert result3.componentType == "point" + + def test_get_disconnected_nodes( self ): + result: list[ int ] = ms.get_disconnected_nodes_id( cube1 ) + assert result == [ 27, 28, 29 ] + result2: dict[ int, tuple[ float ] ] = ms.get_disconnected_nodes_coords( cube1 ) + assert result2 == { 27: ( 3.0, 0.0, 0.0 ), 28: ( 3.0, 1.0, 0.0 ), 29: ( 3.0, 2.0, 0.0 ) } + + def test_field_values_validity( self ): + mcd_point: ms.MeshComponentData = ms.build_MeshComponentData( cube2, "point" ) + mcd_cell: ms.MeshComponentData = ms.build_MeshComponentData( cube2, "cell" ) + result_points: dict[ str, tuple[ bool, tuple[ float ] ] ] = ms.field_values_validity( mcd_point ) + result_cells: dict[ str, tuple[ bool, tuple[ float ] ] ] = ms.field_values_validity( mcd_cell ) + assert result_points == { + "TEMPERATURE": ( True, ( ms.MIN_FIELD.TEMPERATURE.value, ms.MAX_FIELD.TEMPERATURE.value ) ), + "PRESSURE": ( True, ( ms.MIN_FIELD.PRESSURE.value, ms.MAX_FIELD.PRESSURE.value ) ), + "TEMPERATURE_invalid": ( False, ( ms.MIN_FIELD.TEMPERATURE.value, ms.MAX_FIELD.TEMPERATURE.value ) ), + "PRESSURE_invalid": ( False, ( ms.MIN_FIELD.PRESSURE.value, ms.MAX_FIELD.PRESSURE.value ) ), + } + assert result_cells == { + "PERMEABILITY": ( True, ( ms.MIN_FIELD.PERM.value, ms.MAX_FIELD.PERM.value ) ), + "POROSITY": ( True, ( ms.MIN_FIELD.PORO.value, ms.MAX_FIELD.PORO.value ) ), + "DENSITY": ( True, ( ms.MIN_FIELD.DENSITY.value, ms.MAX_FIELD.DENSITY.value ) ), + "PERMEABILITY_invalid": ( False, ( ms.MIN_FIELD.PERM.value, ms.MAX_FIELD.PERM.value ) ), + "POROSITY_invalid": ( False, ( ms.MIN_FIELD.PORO.value, ms.MAX_FIELD.PORO.value ) ), + "DENSITY_invalid": ( False, ( ms.MIN_FIELD.DENSITY.value, ms.MAX_FIELD.DENSITY.value ) ), + } + + def test_check_NaN_fields( self ): + result: dict[ str, int ] = ms.check_NaN_fields( cube3 ) + assert result == { "POROSITY_invalid": 2, "TEMPERATURE_invalid": 2 } + + def test_get_cells_neighbors_number( self ): + result: np.array = ms.get_cells_neighbors_number( cube0 ) + expected: np.array = np.ones( ( 8, 1 ), dtype=int ) * 3 + assert np.array_equal( result, expected ) + result2: np.array = ms.get_cells_neighbors_number( cube4 ) + expected2: np.array = np.ones( ( 9, 1 ), dtype=int ) * 3 + expected2[ 8 ] = 0 + assert np.array_equal( result2, expected2 ) + + def test_mesh_stats_execution( self ): + write_mesh( cube_output, test_mesh_for_stats ) + invalidTest = False + command = [ + "python", MESH_DOCTOR_FILEPATH, "-v", "-i", test_mesh_for_stats.output, "mesh_stats", "--write_stats", "0", + "--output", dir_name, "--disconnected", "0", "--field_values", "0" + ] + try: + result = subprocess.run( command, shell=True, stderr=subprocess.PIPE, universal_newlines=True ) + os.remove( test_mesh_for_stats.output ) + stderr = result.stderr + assert result.returncode == 0 + raw_stderr = r"{}".format( stderr ) + pattern = r"\[.*?\]\[.*?\] (.*)" + matches = re.findall( pattern, raw_stderr ) + no_log = "\n".join( matches ) + mesh_output_stats: str = no_log[ no_log.index( "The mesh has" ): ] + # yapf: disable + expected_stats: str = ( "The mesh has 27 cells and 64 points.\n" + + "There are 1 different types of cells in the mesh:\n" + + "\tHex\t\t(27 cells)\n" + + "Number of cells that have exactly N neighbors:\n" + + "\tNeighbors\tNumber of cells concerned\n" + + "\t3\t\t8\n" + + "\t4\t\t12\n" + + "\t5\t\t6\n" + + "\t6\t\t1\n" + + "Number of nodes being shared by exactly N cells:\n" + + "\tCells\t\tNumber of nodes\n" + + "\t8\t\t8\n" + + "\t1\t\t8\n" + + "\t2\t\t24\n" + + "\t4\t\t24\n" + + "Number of disconnected cells in the mesh: 0\n" + + "Number of disconnected nodes in the mesh: 0\n" + + "The domain is contained in:\n" + + "\t0.0 <= x <= 3.0\n" + + "\t0.0 <= y <= 3.0\n" + + "\t0.0 <= z <= 3.0\n" + + "Does the mesh have global point ids: True\n" + + "Does the mesh have global cell ids: True\n" + + "Number of fields data containing NaNs values: 0\n" + + "There are 5 scalar fields from the CellData:\n" + + "\tPOROSITY min = 0.0 max = 1.0\n" + + "\tDENSITY min = 1.0 max = 1.0\n" + + "\tPRESSURE min = 100000.0 max = 10000000.0\n" + + "\tGLOBAL_IDS_CELLS min = 0.0 max = 26.0\n" + + "\tDENSITY_invalid min = 500.0 max = 40000.0\n" + + "There are 1 vector/tensor fields from the CellData:\n" + + "\tPERMEABILITY min = [1e-14, 1e-13, 1e-12] max = [1e-12, 1e-11, 1e-10]\n" + + "There are 3 scalar fields from the PointData:\n" + + "\tTEMPERATURE min = 1.0 max = 1.0\n" + + "\tGLOBAL_IDS_POINTS min = 0.0 max = 63.0\n" + + "\tTEMPERATURE_invalid min = 100.0 max = 5000.0\n" + + "There are 1 vector/tensor fields from the PointData:\n" + + "\tDISPLACEMENT min = [0.0001, 0.001, 0.01] max = [0.01, 0.1, 1.0]\n" + + "There are 0 scalar fields from the FieldData:\n" + + "There are 0 vector/tensor fields from the FieldData:\n" + + "Unexpected range of values for vector/tensor fields from the CellData:\n" + + "DENSITY_invalid expected to be between 0.0 and 25000.0.\n" + + "Unexpected range of values for vector/tensor fields from the PointData:\n" + + "TEMPERATURE_invalid expected to be between 0.0 and 2000.0.\n" + + "Unexpected range of values for vector/tensor fields from the FieldData:" ) + # yapf: enable + assert mesh_output_stats == expected_stats + except Exception as e: + logging.error( "Invalid command input. Test has failed." ) + logging.error( e ) + invalidTest = True + + if invalidTest: + raise ValueError( "test_mesh_stats_execution has failed." ) diff --git a/geos-mesh/tests/test_vtk_utils.py b/geos-mesh/tests/test_vtk_utils.py new file mode 100644 index 0000000..23756c4 --- /dev/null +++ b/geos-mesh/tests/test_vtk_utils.py @@ -0,0 +1,384 @@ +import glob +import os +import shutil +import xml.etree.ElementTree as ET +from geos.mesh.doctor.checks import vtk_utils as vu +from numpy import array, ones, array_equal +from vtkmodules.util.numpy_support import numpy_to_vtk +from vtkmodules.vtkCommonCore import vtkPoints +from vtkmodules.vtkCommonDataModel import ( vtkMultiBlockDataSet, vtkUnstructuredGrid, vtkCellArray, vtkHexahedron, + vtkCompositeDataSet, VTK_HEXAHEDRON ) +""" +For creation of output test meshes +""" +current_file_path: str = __file__ +dir_name: str = os.path.dirname( current_file_path ) +pattern_test: str = "to_check_mesh" +filepath_mesh_for_stats: str = os.path.join( dir_name, pattern_test + ".vtu" ) +test_mesh_for_stats: vu.VtkOutput = vu.VtkOutput( filepath_mesh_for_stats, True ) +geos_hierarchy: str = os.path.join( "mesh", "Level0" ) +""" +Utility functions for tests +""" + + +def split_list( initial_list: list[ any ], number_sub_lists: int ) -> list[ list[ any ] ]: + initial_len: int = len( initial_list ) + assert number_sub_lists <= initial_len + average: int = initial_len // number_sub_lists + remainder: int = initial_len % number_sub_lists + new_lists: list = list() + start: int = 0 + for i in range( initial_len ): + end: int = start + average + ( 1 if i < remainder else 0 ) + new_lists.append( initial_list[ start:end ] ) + start = end + return new_lists + + +def create_vtk_points( point_3D_coords: list[ list[ float ] ] ) -> vtkPoints: + points: vtkPoints = vtkPoints() + for coord in point_3D_coords: + points.InsertNextPoint( coord ) + return points + + +def create_vtk_hexahedron( point_ids: list[ int ] ) -> vtkHexahedron: + hex: vtkHexahedron = vtkHexahedron() + for i, point_id in enumerate( point_ids ): + hex.GetPointIds().SetId( i, point_id ) + return hex + + +def create_type_vtk_grid( point_3D_coords: list[ list[ float ] ], all_point_ids: list[ list[ int ] ], + vtk_type: int ) -> vtkUnstructuredGrid: + points: vtkPoints = create_vtk_points( point_3D_coords ) + cells: vtkCellArray = vtkCellArray() + for point_ids in all_point_ids: + cells.InsertNextCell( create_vtk_hexahedron( point_ids ) ) + grid: vtkUnstructuredGrid = vtkUnstructuredGrid() + grid.SetPoints( points ) + grid.SetCells( vtk_type, cells ) + return grid + + +def create_geos_pvd( all_grids_per_vtm: dict[ str, dict[ str, list[ vtkUnstructuredGrid ] ] ], + pvd_dir_path: str ) -> str: + # Create the .pvd + os.makedirs( pvd_dir_path, exist_ok=True ) + pvd_name = os.path.basename( pvd_dir_path ) + root_pvd = ET.Element( "VTKFile", type="Collection", version="1.0" ) + collection = ET.SubElement( root_pvd, "Collection" ) + + for timestep, regions_with_grids in all_grids_per_vtm.items(): + vtm_directory: str = os.path.join( pvd_dir_path, timestep ) + os.makedirs( vtm_directory, exist_ok=True ) + vtm_sub_path: str = os.path.join( pvd_name, timestep + ".vtm" ) + ET.SubElement( collection, "DataSet", timestep=timestep, file=vtm_sub_path ) + + # Create the .vtm file respecting GEOS format + root_vtm = ET.Element( "VTKFile", type="vtkMultiBlockDataSet", version="1.0" ) + vtm = ET.SubElement( root_vtm, "vtkMultiBlockDataSet" ) + mesh_block = ET.SubElement( vtm, "Block", name="mesh" ) + level0_block = ET.SubElement( mesh_block, "Block", name="Level0" ) + cell_element_region_block = ET.SubElement( level0_block, "Block", name="CellElementRegion" ) + + for region, grids in regions_with_grids.items(): + region_directory: str = os.path.join( vtm_directory, geos_hierarchy, region ) + os.makedirs( region_directory, exist_ok=True ) + + # Create block element for regions + region_block = ET.SubElement( cell_element_region_block, "Block", name=region ) + for i, grid in enumerate( grids ): + rank_name: str = "rank_0" + str( i ) + vtu_name: str = rank_name + ".vtu" + path_from_vtm: str = os.path.join( timestep, geos_hierarchy, region, vtu_name ) + ET.SubElement( region_block, "DataSet", name=rank_name, file=path_from_vtm ) + vtu_filepath: str = os.path.join( region_directory, vtu_name ) + output_vtu: vu.VtkOutput = vu.VtkOutput( vtu_filepath, False ) + vu.write_mesh( grid, output_vtu ) + + # write the vtm for each timestep + vtm_filepath: str = os.path.join( pvd_directory, timestep + ".vtm" ) + tree_vtm = ET.ElementTree( root_vtm ) + tree_vtm.write( vtm_filepath, encoding='utf-8', xml_declaration=True ) + + # write the pvd file link to the vtms written before + tree_pvd = ET.ElementTree( root_pvd ) + pvd_filepath: str = os.path.join( os.path.dirname( pvd_dir_path ), pvd_name + ".pvd" ) + tree_pvd.write( pvd_filepath, encoding='utf-8', xml_declaration=True ) + + return pvd_filepath + + +""" +Grids to perform tests on. +""" +# yapf: disable +# 4 Hexahedrons +four_hex_ids: list[ list[ int ] ] = [ [ 0, 1, 4, 3, 6, 7, 10, 9 ], + [ 1, 2, 5, 4, 7, 8, 11, 10 ], + [ 6, 7, 10, 9, 12, 13, 16, 15 ], + [ 7, 8, 11, 10, 13, 14, 17, 16 ] ] + +four_hexs_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], # point0 + [ 1.0, 0.0, 0.0 ], # point1 + [ 2.0, 0.0, 0.0 ], # point2 + [ 0.0, 1.0, 0.0 ], # point3 + [ 1.0, 1.0, 0.0 ], # point4 + [ 2.0, 1.0, 0.0 ], # point5 + [ 0.0, 0.0, 1.0 ], # point6 + [ 1.0, 0.0, 1.0 ], # point7 + [ 2.0, 0.0, 1.0 ], # point8 + [ 0.0, 1.0, 1.0 ], # point9 + [ 1.0, 1.0, 1.0 ], # point10 + [ 2.0, 1.0, 1.0 ], # point11 + [ 0.0, 0.0, 2.0 ], # point12 + [ 1.0, 0.0, 2.0 ], # point13 + [ 2.0, 0.0, 2.0 ], # point14 + [ 0.0, 1.0, 2.0 ], # point15 + [ 1.0, 1.0, 2.0 ], # point16 + [ 2.0, 1.0, 2.0 ] ] # point17 +# Create grid +four_hex_grid: vtkUnstructuredGrid = create_type_vtk_grid( four_hexs_points_coords, four_hex_ids, VTK_HEXAHEDRON ) +# Create output paths +path_four_hex_vtu: str = os.path.join( dir_name, "4_hex.vtu" ) +path_four_hex_vtk: str = os.path.join( dir_name, "4_hex.vtk" ) +output_four_hex_vtu: vu.VtkOutput = vu.VtkOutput( path_four_hex_vtu, False ) +output_four_hex_vtk: vu.VtkOutput = vu.VtkOutput( path_four_hex_vtk, False ) + + +# 8 Hexahedrons divided in 2 regions and for each region into 2 ranks +two_hex_ids = [ [ 0, 1, 4, 3, 6, 7, 10, 9 ], + [ 1, 2, 5, 4, 7, 8, 11, 10 ] ] +## GRID 1 +two_hex1_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 0.0 ], # point0 + [ 1.0, 0.0, 0.0 ], # point1 + [ 2.0, 0.0, 0.0 ], # point2 + [ 0.0, 1.0, 0.0 ], # point3 + [ 1.0, 1.0, 0.0 ], # point4 + [ 2.0, 1.0, 0.0 ], # point5 + [ 0.0, 0.0, 1.0 ], # point6 + [ 1.0, 0.0, 1.0 ], # point7 + [ 2.0, 0.0, 1.0 ], # point8 + [ 0.0, 1.0, 1.0 ], # point9 + [ 1.0, 1.0, 1.0 ], # point10 + [ 2.0, 1.0, 1.0 ] ] # point11 +two_hex1_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex1_points_coords, two_hex_ids, VTK_HEXAHEDRON ) + +## GRID 2 +two_hex2_points_coords: list[ list[ float ] ] = [ [ 0.0, 1.0, 0.0 ], # point0 + [ 1.0, 1.0, 0.0 ], # point1 + [ 2.0, 1.0, 0.0 ], # point2 + [ 0.0, 2.0, 0.0 ], # point3 + [ 1.0, 2.0, 0.0 ], # point4 + [ 2.0, 2.0, 0.0 ], # point5 + [ 0.0, 1.0, 1.0 ], # point6 + [ 1.0, 1.0, 1.0 ], # point7 + [ 2.0, 1.0, 1.0 ], # point8 + [ 0.0, 2.0, 1.0 ], # point9 + [ 1.0, 2.0, 1.0 ], # point10 + [ 2.0, 2.0, 1.0 ] ] # point11 +two_hex2_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex2_points_coords, two_hex_ids, VTK_HEXAHEDRON ) + +## GRID 3 +two_hex3_points_coords: list[ list[ float ] ] = [ [ 0.0, 0.0, 1.0 ], # point0 + [ 1.0, 0.0, 1.0 ], # point1 + [ 2.0, 0.0, 1.0 ], # point2 + [ 0.0, 1.0, 1.0 ], # point3 + [ 1.0, 1.0, 1.0 ], # point4 + [ 2.0, 1.0, 1.0 ], # point5 + [ 0.0, 0.0, 2.0 ], # point6 + [ 1.0, 0.0, 2.0 ], # point7 + [ 2.0, 0.0, 2.0 ], # point8 + [ 0.0, 1.0, 2.0 ], # point9 + [ 1.0, 1.0, 2.0 ], # point10 + [ 2.0, 1.0, 2.0 ] ] # point11 +two_hex3_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex3_points_coords, two_hex_ids, VTK_HEXAHEDRON ) + +## GRID 4 +two_hex4_points_coords: list[ list[ float ] ] = [ [ 0.0, 1.0, 1.0 ], # point0 + [ 1.0, 1.0, 1.0 ], # point1 + [ 2.0, 1.0, 1.0 ], # point2 + [ 0.0, 2.0, 1.0 ], # point3 + [ 1.0, 2.0, 1.0 ], # point4 + [ 2.0, 2.0, 1.0 ], # point5 + [ 0.0, 1.0, 2.0 ], # point6 + [ 1.0, 1.0, 2.0 ], # point7 + [ 2.0, 1.0, 2.0 ], # point8 + [ 0.0, 2.0, 2.0 ], # point9 + [ 1.0, 2.0, 2.0 ], # point10 + [ 2.0, 2.0, 2.0 ] ] # point11 +two_hex4_grid: vtkUnstructuredGrid = create_type_vtk_grid( two_hex4_points_coords, two_hex_ids, VTK_HEXAHEDRON ) +all_two_hex_grids: list[ vtkUnstructuredGrid ] = [ two_hex1_grid, two_hex2_grid, two_hex3_grid, two_hex4_grid ] +# yapf: enable + +## Duplicated grids but with different DataArrays per region and per timestep +number_timesteps: int = 2 +number_regions: int = 2 + +# Create the target directories for the tests and generate the vtms +pvd_name: str = "vtkOutput" +pvd_directory: str = os.path.join( dir_name, pvd_name ) +region_name: str = "region" +stored_grids: dict[ str, dict[ str, list[ vtkUnstructuredGrid ] ] ] = dict() +for i in range( number_timesteps ): + vtm_name: str = "time" + str( i ) + stored_grids[ vtm_name ] = dict() + splitted_grids_by_region: list[ list[ vtkUnstructuredGrid ] ] = split_list( all_two_hex_grids, number_regions ) + for j in range( number_regions ): + region: str = region_name + str( j ) + stored_grids[ vtm_name ][ region ] = list() + for k, grid in enumerate( splitted_grids_by_region[ j ] ): + new_grid: vtkUnstructuredGrid = vtkUnstructuredGrid() + new_grid.DeepCopy( grid ) + for dimension in [ 1, 2, 3 ]: + arr_np: array = ones( ( new_grid.GetNumberOfCells(), dimension ), dtype=int ) * ( i * 100 + 10 * j + k ) + arr_points = numpy_to_vtk( arr_np ) + arr_cells = numpy_to_vtk( arr_np ) + arr_points.SetName( "point_param" + str( dimension ) ) + arr_cells.SetName( "cell_param" + str( dimension ) ) + new_grid.GetPointData().AddArray( arr_points ) + new_grid.GetCellData().AddArray( arr_cells ) + stored_grids[ vtm_name ][ region ].append( new_grid ) + + +class TestClass: + + def test_to_vtk_id_list_and_vtk_iter( self ): + # vtk_id_list + data1: list[ int ] = [ 0, 1, 2 ] + data2: tuple[ int ] = ( 3, 4, 5, 6 ) + result = vu.to_vtk_id_list( data1 ) + result2 = vu.to_vtk_id_list( data2 ) + assert result.IsA( "vtkIdList" ) + assert result2.IsA( "vtkIdList" ) + assert result.GetNumberOfIds() == 3 + assert result2.GetNumberOfIds() == 4 + # vtk_iter + result3 = list( vu.vtk_iter( result ) ) + result4 = tuple( vu.vtk_iter( result2 ) ) + assert len( result3 ) == 3 + assert len( result4 ) == 4 + assert result3 == data1 + assert result4 == data2 + + def test_write_and_read_mesh( self ): + found_files_vtu: list[ str ] = list() + found_files_vtk: list[ str ] = list() + found_files_vtu.extend( glob.glob( os.path.join( dir_name, "*.vtu" ) ) ) + found_files_vtu.extend( glob.glob( os.path.join( dir_name, "*.vtk" ) ) ) + assert len( found_files_vtu ) == 0 + assert len( found_files_vtk ) == 0 + vu.write_mesh( four_hex_grid, output_four_hex_vtu ) + vu.write_mesh( four_hex_grid, output_four_hex_vtk ) + found_files_vtu.extend( glob.glob( os.path.join( dir_name, "*.vtu" ) ) ) + found_files_vtk.extend( glob.glob( os.path.join( dir_name, "*.vtk" ) ) ) + assert len( found_files_vtu ) == 1 + assert len( found_files_vtk ) == 1 + # no overwritting possible + vu.write_mesh( four_hex_grid, output_four_hex_vtu ) + vu.write_mesh( four_hex_grid, output_four_hex_vtk ) + assert len( found_files_vtu ) == 1 + assert len( found_files_vtk ) == 1 + # read the meshes + read_vtu: vtkUnstructuredGrid = vu.read_mesh( output_four_hex_vtu.output ) + read_vtk: vtkUnstructuredGrid = vu.read_mesh( output_four_hex_vtu.output ) + try: + os.remove( output_four_hex_vtu.output ) + os.remove( output_four_hex_vtk.output ) + except Exception as e: + raise ValueError( f"test_write_and_read_mesh failed because of '{e}'." ) + assert read_vtu.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() + assert read_vtk.GetNumberOfCells() == four_hex_grid.GetNumberOfCells() + + def test_write_and_read_vtm( self ): + multiblock: vtkMultiBlockDataSet = vtkMultiBlockDataSet() + for i in range( 5 ): + vtu: vtkUnstructuredGrid = vtkUnstructuredGrid() + multiblock.SetBlock( i, vtu ) + multiblock.GetMetaData( i ).Set( vtkCompositeDataSet.NAME(), "rank" + str( i ) ) + output_vtk: vu.VtkOutput = vu.VtkOutput( os.path.join( dir_name, "test.vtm" ), True ) + vu.write_VTM( multiblock, output_vtk ) + mulltiblock_read: vtkMultiBlockDataSet = vu.read_vtm( output_vtk.output ) + os.remove( output_vtk.output ) + assert multiblock.GetNumberOfBlocks() == mulltiblock_read.GetNumberOfBlocks() == 5 + + def test_get_filepath_from_pvd_and_vtm( self ): + pvd_filepath: str = create_geos_pvd( stored_grids, pvd_directory ) + result0: str = vu.get_vtm_filepath_from_pvd( pvd_filepath, 0 ) + result1: str = vu.get_vtm_filepath_from_pvd( pvd_filepath, 1 ) + result2: list[ str ] = vu.get_vtu_filepaths_from_vtm( result0 ) + + try: + shutil.rmtree( pvd_directory ) + except OSError as e: + print( f"Error: {e}" ) + os.remove( pvd_filepath ) + + assert result0.endswith( "time0.vtm" ) + assert result1.endswith( "time1.vtm" ) + for i, path2 in enumerate( result2 ): + if i % 4 < 2: + region_name: str = "region0" + else: + region_name = "region1" + if i % 2 == 0: + assert path2.endswith( os.path.join( geos_hierarchy, region_name, "rank_00.vtu" ) ) + else: + assert path2.endswith( os.path.join( geos_hierarchy, region_name, "rank_01.vtu" ) ) + + def test_get_vtu_filepaths( self ): + pvd_filepath: str = create_geos_pvd( stored_grids, pvd_directory ) + result0: tuple[ str ] = vu.get_vtu_filepaths( pvd_filepath, 0 ) + result1: tuple[ str ] = vu.get_vtu_filepaths( pvd_filepath, 1 ) + try: + shutil.rmtree( pvd_directory ) + except OSError as e: + print( f"Error: {e}" ) + os.remove( pvd_filepath ) + for i in range( len( result0 ) ): + assert "time0" in result0[ i ] # looking through first vtm which is time0 + assert "time1" in result1[ i ] # looking through last vtm which is time1 + + def test_has_invalid_field( self ): + # initialize test meshes + test_mesh_points: vtkUnstructuredGrid = four_hex_grid.NewInstance() + test_mesh_cells: vtkUnstructuredGrid = four_hex_grid.NewInstance() + test_mesh: vtkUnstructuredGrid = four_hex_grid.NewInstance() + test_mesh_points.CopyStructure( four_hex_grid ) + test_mesh_cells.CopyStructure( four_hex_grid ) + test_mesh.CopyStructure( four_hex_grid ) + test_mesh_points.CopyAttributes( four_hex_grid ) + test_mesh_cells.CopyAttributes( four_hex_grid ) + test_mesh.CopyAttributes( four_hex_grid ) + # create vtk arrays + array_for_points: array = ones( ( test_mesh_points.GetNumberOfPoints(), 1 ) ) + array_for_cells: array = ones( ( test_mesh_cells.GetNumberOfCells(), 1 ) ) + vtk_array_points_invalid = numpy_to_vtk( array_for_points ) + vtk_array_cells_invalid = numpy_to_vtk( array_for_cells ) + vtk_array_points_valid = numpy_to_vtk( array_for_points ) + vtk_array_cells_valid = numpy_to_vtk( array_for_cells ) + invalid_fields: list[ str ] = [ "PointsWrong", "CellsWrong" ] + vtk_array_points_invalid.SetName( invalid_fields[ 0 ] ) + vtk_array_cells_invalid.SetName( invalid_fields[ 1 ] ) + vtk_array_points_valid.SetName( "PointsValid" ) + vtk_array_cells_valid.SetName( "CellsValid" ) + # add vtk arrays + test_mesh_points.GetPointData().AddArray( vtk_array_points_invalid ) + test_mesh_cells.GetCellData().AddArray( vtk_array_cells_invalid ) + test_mesh.GetPointData().AddArray( vtk_array_points_valid ) + test_mesh.GetCellData().AddArray( vtk_array_cells_valid ) + # check invalid_fields + assert vu.has_invalid_field( test_mesh_points, invalid_fields ) == True + assert vu.has_invalid_field( test_mesh_cells, invalid_fields ) == True + assert vu.has_invalid_field( test_mesh, invalid_fields ) == False + + def test_get_points_coords_from_vtk( self ): + result: array = vu.get_points_coords_from_vtk( four_hex_grid ) + assert four_hexs_points_coords == result.tolist() + + def test_get_cell_centers_array( self ): + result: array = vu.get_cell_centers_array( four_hex_grid ) + assert array_equal( result, + array( [ [ 0.5, 0.5, 0.5 ], [ 1.5, 0.5, 0.5 ], [ 0.5, 0.5, 1.5 ], [ 1.5, 0.5, 1.5 ] ] ) )