From 675937606dada706694bfecda8d9bb3239fb7131 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Mon, 5 Aug 2024 05:38:03 -0700 Subject: [PATCH 01/10] Invalid packages import for geos-mesh, impacting documentation (#32) Updating sphinx and sphinx-argparse solved the issue. --- docs/requirements.txt | 3 ++- geos-mesh/src/geos/mesh/conversion/main.py | 3 +-- geos-mesh/src/geos/mesh/doctor/__init__.py | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 geos-mesh/src/geos/mesh/doctor/__init__.py diff --git a/docs/requirements.txt b/docs/requirements.txt index 8a23448..6ecdf80 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,6 @@ +sphinx >= 7.4.7 sphinx_rtd_theme -sphinx-argparse +sphinx-argparse >= 0.5.2 sphinx-design # Running CLI programs and capture outputs sphinxcontrib-programoutput>=0.17 diff --git a/geos-mesh/src/geos/mesh/conversion/main.py b/geos-mesh/src/geos/mesh/conversion/main.py index 90b048f..9579f32 100644 --- a/geos-mesh/src/geos/mesh/conversion/main.py +++ b/geos-mesh/src/geos/mesh/conversion/main.py @@ -1,6 +1,7 @@ import argparse import logging import sys +from geos.mesh.conversion import abaqus_converter def build_abaqus_converter_input_parser() -> argparse.ArgumentParser: @@ -25,8 +26,6 @@ def main() -> None: output (str): Output mesh file name -v/--verbose (flag): Increase verbosity level """ - from geosx_mesh_tools import abaqus_converter - # Parse the user arguments parser = build_abaqus_converter_input_parser() args = parser.parse_args() diff --git a/geos-mesh/src/geos/mesh/doctor/__init__.py b/geos-mesh/src/geos/mesh/doctor/__init__.py new file mode 100644 index 0000000..b1cfe26 --- /dev/null +++ b/geos-mesh/src/geos/mesh/doctor/__init__.py @@ -0,0 +1 @@ +# Empty \ No newline at end of file From 59d95768502ac5309f722cdd5761b139cb8af4b6 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Thu, 29 Aug 2024 10:34:49 -0700 Subject: [PATCH 02/10] Change dynamic documentation for geos-mesh to static documentation to avoid CI errors. (#37) --- docs/geos-mesh.rst | 205 ++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 183 insertions(+), 22 deletions(-) diff --git a/docs/geos-mesh.rst b/docs/geos-mesh.rst index 8582f10..cbdea0e 100644 --- a/docs/geos-mesh.rst +++ b/docs/geos-mesh.rst @@ -15,14 +15,59 @@ Modules To list all the modules available through ``mesh-doctor``, you can simply use the ``--help`` option, which will list all available modules as well as a quick summary. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py --help + usage: mesh_doctor.py [-h] [-v] [-q] -i VTK_MESH_FILE + {collocated_nodes,element_volumes,fix_elements_orderings,generate_cube,generate_fractures,generate_global_ids,non_conformal,self_intersecting_elements,supported_elements} + ... + + Inspects meshes for GEOSX. + + positional arguments: + {collocated_nodes,element_volumes,fix_elements_orderings,generate_cube,generate_fractures,generate_global_ids,non_conformal,self_intersecting_elements,supported_elements} + Modules + collocated_nodes + Checks if nodes are collocated. + element_volumes + Checks if the volumes of the elements are greater than "min". + fix_elements_orderings + Reorders the support nodes for the given cell types. + generate_cube + Generate a cube and its fields. + generate_fractures + Splits the mesh to generate the faults and fractures. [EXPERIMENTAL] + generate_global_ids + Adds globals ids for points and cells. + non_conformal + Detects non conformal elements. [EXPERIMENTAL] + self_intersecting_elements + Checks if the faces of the elements are self intersecting. + supported_elements + Check that all the elements of the mesh are supported by GEOSX. + + options: + -h, --help + show this help message and exit + -v Use -v 'INFO', -vv for 'DEBUG'. Defaults to 'WARNING'. + -q Use -q to reduce the verbosity of the output. + -i VTK_MESH_FILE, --vtk-input-file VTK_MESH_FILE + + Note that checks are dynamically loaded. + An option may be missing because of an unloaded module. + Increase verbosity (-v, -vv) to get full information. Then, if you are interested in a specific module, you can ask for its documentation using the ``mesh-doctor module_name --help`` pattern. For example -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help + usage: mesh_doctor.py collocated_nodes [-h] --tolerance TOLERANCE + + options: + -h, --help show this help message and exit + --tolerance TOLERANCE [float]: The absolute distance between two nodes for them to be considered collocated. ``mesh-doctor`` loads its module dynamically. If a module can't be loaded, ``mesh-doctor`` will proceed and try to load other modules. @@ -44,8 +89,14 @@ Here is a list and brief description of all the modules available. Displays the neighboring nodes that are closer to each other than a prescribed threshold. It is not uncommon to define multiple nodes for the exact same position, which will typically be an issue for ``geos`` and should be fixed. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py collocated_nodes --help + usage: mesh_doctor.py collocated_nodes [-h] --tolerance TOLERANCE + + options: + -h, --help show this help message and exit + --tolerance TOLERANCE [float]: The absolute distance between two nodes for them to be considered collocated. ``element_volumes`` """"""""""""""""""" @@ -53,8 +104,14 @@ It is not uncommon to define multiple nodes for the exact same position, which w Computes the volumes of all the cells and displays the ones that are below a prescribed threshold. Cells with negative volumes will typically be an issue for ``geos`` and should be fixed. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py element_volumes --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py element_volumes --help + usage: mesh_doctor.py element_volumes [-h] --min 0.0 + + options: + -h, --help show this help message and exit + --min 0.0 [float]: The minimum acceptable volume. Defaults to 0.0. ``fix_elements_orderings`` """""""""""""""""""""""""" @@ -63,8 +120,29 @@ It sometimes happens that an exported mesh does not abide by the ``vtk`` orderin The ``fix_elements_orderings`` module can rearrange the nodes of given types of elements. This can be convenient if you cannot regenerate the mesh. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py fix_elements_orderings --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py fix_elements_orderings --help + usage: mesh_doctor.py fix_elements_orderings [-h] [--Hexahedron 1,6,5,4,7,0,2,3] [--Prism5 8,2,0,7,6,9,5,1,4,3] + [--Prism6 11,2,8,10,5,0,9,7,6,1,4,3] [--Pyramid 3,4,0,2,1] + [--Tetrahedron 2,0,3,1] [--Voxel 1,6,5,4,7,0,2,3] + [--Wedge 3,5,4,0,2,1] --output OUTPUT [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --Hexahedron 1,6,5,4,7,0,2,3 + [list of integers]: node permutation for "Hexahedron". + --Prism5 8,2,0,7,6,9,5,1,4,3 + [list of integers]: node permutation for "Prism5". + --Prism6 11,2,8,10,5,0,9,7,6,1,4,3 + [list of integers]: node permutation for "Prism6". + --Pyramid 3,4,0,2,1 [list of integers]: node permutation for "Pyramid". + --Tetrahedron 2,0,3,1 [list of integers]: node permutation for "Tetrahedron". + --Voxel 1,6,5,4,7,0,2,3 [list of integers]: node permutation for "Voxel". + --Wedge 3,5,4,0,2,1 [list of integers]: node permutation for "Wedge". + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``generate_cube`` """"""""""""""""" @@ -73,8 +151,30 @@ This module conveniently generates cubic meshes in ``vtk``. It can also generate fields with simple values. This tool can also be useful to generate a trial mesh that will later be refined or customized. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py generate_cube --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py generate_cube --help + usage: mesh_doctor.py generate_cube [-h] [--x 0:1.5:3] [--y 0:5:10] [--z 0:1] [--nx 2:2] [--ny 1:1] [--nz 4] + [--fields name:support:dim [name:support:dim ...]] [--cells] [--no-cells] + [--points] [--no-points] --output OUTPUT [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --x 0:1.5:3 [list of floats]: X coordinates of the points. + --y 0:5:10 [list of floats]: Y coordinates of the points. + --z 0:1 [list of floats]: Z coordinates of the points. + --nx 2:2 [list of integers]: Number of elements in the X direction. + --ny 1:1 [list of integers]: Number of elements in the Y direction. + --nz 4 [list of integers]: Number of elements in the Z direction. + --fields name:support:dim + [name:support:dim ...]: Create fields on CELLS or POINTS, with given dimension (typically 1 or 3). + --cells [bool]: Generate global ids for cells. Defaults to true. + --no-cells [bool]: Don't generate global ids for cells. + --points [bool]: Generate global ids for points. Defaults to true. + --no-points [bool]: Don't generate global ids for points. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``generate_fractures`` """""""""""""""""""""" @@ -82,8 +182,31 @@ This tool can also be useful to generate a trial mesh that will later be refined For a conformal fracture to be defined in a mesh, ``geos`` requires the mesh to be split at the faces where the fracture gets across the mesh. The ``generate_fractures`` module will split the mesh and generate the multi-block ``vtk`` files. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py generate_fractures --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py generate_fractures --help + usage: mesh_doctor.py generate_fractures [-h] --policy field, internal_surfaces [--name NAME] [--values VALUES] + --output OUTPUT [--data-mode binary, ascii] --fracture-output + FRACTURE_OUTPUT [--fracture-data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --policy field, internal_surfaces + [string]: The criterion to define the surfaces that will be changed into fracture zones. + Possible values are "field, internal_surfaces" + --name NAME [string]: If the "field" policy is selected, defines which field will be considered to + define the fractures. If the "internal_surfaces" policy is selected, defines the name of + the attribute will be considered to identify the fractures. + --values VALUES [list of comma separated integers]: If the "field" policy is selected, which changes of + the field will be considered as a fracture. If the "internal_surfaces" policy is + selected, list of the fracture attributes. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. + --fracture-output FRACTURE_OUTPUT + [string]: The vtk output file destination. + --fracture-data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``generate_global_ids`` """"""""""""""""""""""" @@ -91,8 +214,21 @@ The ``generate_fractures`` module will split the mesh and generate the multi-blo When running ``geos`` in parallel, `global ids` can be used to refer to data across multiple ranks. The ``generate_global_ids`` can generate `global ids` for the imported ``vtk`` mesh. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py generate_global_ids --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py generate_global_ids --help + usage: mesh_doctor.py generate_global_ids [-h] [--cells] [--no-cells] [--points] [--no-points] --output OUTPUT + [--data-mode binary, ascii] + + options: + -h, --help show this help message and exit + --cells [bool]: Generate global ids for cells. Defaults to true. + --no-cells [bool]: Don't generate global ids for cells. + --points [bool]: Generate global ids for points. Defaults to true. + --no-points [bool]: Don't generate global ids for points. + --output OUTPUT [string]: The vtk output file destination. + --data-mode binary, ascii + [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``non_conformal`` """"""""""""""""" @@ -102,8 +238,19 @@ This module will detect elements which are close enough (there's a user defined The angle between two faces can also be precribed. This module can be a bit time consuming. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py non_conformal --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py non_conformal --help + usage: mesh_doctor.py non_conformal [-h] [--angle_tolerance 10.0] [--point_tolerance POINT_TOLERANCE] + [--face_tolerance FACE_TOLERANCE] + + options: + -h, --help show this help message and exit + --angle_tolerance 10.0 [float]: angle tolerance in degrees. Defaults to 10.0 + --point_tolerance POINT_TOLERANCE + [float]: tolerance for two points to be considered collocated. + --face_tolerance FACE_TOLERANCE + [float]: tolerance for two faces to be considered "touching". ``self_intersecting_elements`` """""""""""""""""""""""""""""" @@ -111,8 +258,15 @@ This module can be a bit time consuming. Some meshes can have cells that auto-intersect. This module will display the elements that have faces intersecting. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py self_intersecting_elements --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py self_intersecting_elements --help + usage: mesh_doctor.py self_intersecting_elements [-h] [--min 2.220446049250313e-16] + + options: + -h, --help show this help message and exit + --min 2.220446049250313e-16 + [float]: The tolerance in the computation. Defaults to your machine precision 2.220446049250313e-16. ``supported_elements`` """""""""""""""""""""" @@ -125,8 +279,15 @@ But also prismes up to 11 faces. The ``supported_elements`` check will validate that no unsupported element is included in the input mesh. It will also verify that the ``VTK_POLYHEDRON`` cells can effectively get converted into a supported type of element. -.. command-output:: python src/geos/mesh/doctor/mesh_doctor.py supported_elements --help - :cwd: ../geos-mesh +.. code-block:: + + $ python src/geos/mesh/doctor/mesh_doctor.py supported_elements --help + usage: mesh_doctor.py supported_elements [-h] [--chunck_size 1] [--nproc 8] + + options: + -h, --help show this help message and exit + --chunck_size 1 [int]: Defaults chunk size for parallel processing to 1 + --nproc 8 [int]: Number of threads used for parallel processing. Defaults to your CPU count 8. From 98680c5b1320489fd665cc990183e2ef200707bf Mon Sep 17 00:00:00 2001 From: Christopher Sherman Date: Wed, 4 Sep 2024 21:39:10 -0700 Subject: [PATCH 03/10] fix: Update numpy deprecation of product and test dependency issues (#38) --- geos-ats/src/geos/ats/helpers/curve_check.py | 6 ++++-- geos-ats/src/geos/ats/test_case.py | 13 +++++++++---- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/geos-ats/src/geos/ats/helpers/curve_check.py b/geos-ats/src/geos/ats/helpers/curve_check.py index a35b27f..31250c7 100644 --- a/geos-ats/src/geos/ats/helpers/curve_check.py +++ b/geos-ats/src/geos/ats/helpers/curve_check.py @@ -33,16 +33,18 @@ def interpolate_values_time( ta, xa, tb ): """ N = list( np.shape( xa ) ) M = len( tb ) + ta = np.ascontiguousarray( np.squeeze( ta ) ) + tb = np.ascontiguousarray( np.squeeze( tb ) ) if ( len( N ) == 1 ): return interp1d( ta, xa )( tb ) else: # Reshape the input array so that we can work on the non-time axes - S = np.product( N[ 1: ] ) + S = np.prod( N[ 1: ] ) xc = np.reshape( xa, ( N[ 0 ], S ) ) xd = np.zeros( ( len( tb ), S ) ) for ii in range( S ): - xd[ :, ii ] = interp1d( ta, xc[ :, ii ] )( tb ) + xd[ :, ii ] = interp1d( ta, xc[ :, ii ], bounds_error=False, fill_value='extrapolate' )( tb ) # Return the array to it's expected shape N[ 0 ] = M diff --git a/geos-ats/src/geos/ats/test_case.py b/geos-ats/src/geos/ats/test_case.py index 9c3c27f..7affe6c 100644 --- a/geos-ats/src/geos/ats/test_case.py +++ b/geos-ats/src/geos/ats/test_case.py @@ -1,4 +1,4 @@ -import ats # type: ignore[import] +import ats # type: ignore[import] import os import shutil import logging @@ -319,7 +319,7 @@ def testCreate( self ): priority = 1 # Setup a new test group - atsTest = None + parent_test = None ats.tests.AtsTest.newGroup( priority=priority, path=self.path ) for stepnum, step in enumerate( self.steps ): np = getattr( step.p, "np", 1 ) @@ -329,10 +329,10 @@ def testCreate( self ): label = "%s_%d_%s" % ( self.name, stepnum + 1, step.label() ) # call either 'test' or 'testif' - if atsTest is None: + if parent_test is None: func = lambda *a, **k: test( *a, **k ) else: - func = lambda *a, **k: testif( atsTest, *a, **k ) + func = lambda *a, **k: testif( parent_test, *a, **k ) # Set the time limit kw = {} @@ -359,6 +359,11 @@ def testCreate( self ): if self.last_status == PASSED: atsTest.status = SKIPPED + # Set the parent test + # Note: do not make tests dependent on the result of curvecheck + if step.label() != 'curvecheck': + parent_test = atsTest + # End the group ats.tests.AtsTest.endGroup() From 8cc89f5723fc057853e73b78a8ee06f79c1298b5 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Mon, 14 Oct 2024 08:58:52 -0700 Subject: [PATCH 04/10] Correction of erroneous copy of pre-split tags to new split meshes (#34) * First correction now allowing for the copy of fields from the original mesh to the 2 split meshes. But the indexes of fields are not correct yet for fracture mesh. * Module import paths update * Fields are now copied correctly for fracture_mesh. * Test case added to check the __copy_fields feature. * Invalid packages import for geos-mesh, impacting documentation (#32) Updating sphinx and sphinx-argparse solved the issue. * Correction for yapf valid format. * Had to make a workaround to avoid a weird yapf formatting suggestion. * Mistake correction * First correction now allowing for the copy of fields from the original mesh to the 2 split meshes. But the indexes of fields are not correct yet for fracture mesh. * Module import paths update * Fields are now copied correctly for fracture_mesh. * Test case added to check the __copy_fields feature. * Correction for yapf valid format. * Had to make a workaround to avoid a weird yapf formatting suggestion. * Mistake correction * First part of review corrections * Second part correction of reviews * Last part of correction review * Updated method when linking old cells to new cells ids to copy correctly the fields --- .../mesh/doctor/checks/check_fractures.py | 53 +-- .../mesh/doctor/checks/generate_fractures.py | 315 +++++++++++------- .../src/geos/mesh/doctor/checks/vtk_utils.py | 74 ++-- geos-mesh/tests/test_generate_fractures.py | 312 ++++++++++------- 4 files changed, 457 insertions(+), 297 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/check_fractures.py b/geos-mesh/src/geos/mesh/doctor/checks/check_fractures.py index 9f14aed..21695ad 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/check_fractures.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/check_fractures.py @@ -1,30 +1,14 @@ -from dataclasses import dataclass import logging - -from typing import ( - Collection, - FrozenSet, - Iterable, - Sequence, - Set, - Tuple, -) - -from tqdm import tqdm import numpy - -from vtkmodules.vtkCommonDataModel import ( - vtkUnstructuredGrid, - vtkCell, -) -from vtkmodules.vtkCommonCore import ( - vtkPoints, ) -from vtkmodules.vtkIOXML import ( - vtkXMLMultiBlockDataReader, ) -from vtkmodules.util.numpy_support import ( - vtk_to_numpy, ) -from vtk_utils import ( - vtk_iter, ) +from dataclasses import dataclass +from typing import Collection, Iterable, Sequence +from tqdm import tqdm +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkCell +from vtkmodules.vtkCommonCore import vtkPoints +from vtkmodules.vtkIOXML import vtkXMLMultiBlockDataReader +from vtkmodules.util.numpy_support import vtk_to_numpy +from geos.mesh.doctor.checks.vtk_utils import vtk_iter +from geos.mesh.doctor.checks.generate_fractures import Coordinates3D @dataclass( frozen=True ) @@ -44,7 +28,7 @@ class Result: def __read_multiblock( vtk_input_file: str, matrix_name: str, - fracture_name: str ) -> Tuple[ vtkUnstructuredGrid, vtkUnstructuredGrid ]: + fracture_name: str ) -> tuple[ vtkUnstructuredGrid, vtkUnstructuredGrid ]: reader = vtkXMLMultiBlockDataReader() reader.SetFileName( vtk_input_file ) reader.Update() @@ -73,9 +57,9 @@ def format_collocated_nodes( fracture_mesh: vtkUnstructuredGrid ) -> Sequence[ I def __check_collocated_nodes_positions( - matrix_points: Sequence[ Tuple[ float, float, float ] ], fracture_points: Sequence[ Tuple[ float, float, float ] ], - g2l: Sequence[ int ], collocated_nodes: Iterable[ Iterable[ int ] ] -) -> Collection[ Tuple[ int, Iterable[ int ], Iterable[ Tuple[ float, float, float ] ] ] ]: + matrix_points: Sequence[ Coordinates3D ], fracture_points: Sequence[ Coordinates3D ], g2l: Sequence[ int ], + collocated_nodes: Iterable[ Iterable[ int ] ] +) -> Collection[ tuple[ int, Iterable[ int ], Iterable[ Coordinates3D ] ] ]: issues = [] for li, bucket in enumerate( collocated_nodes ): matrix_nodes = ( fracture_points[ li ], ) + tuple( map( lambda gi: matrix_points[ g2l[ gi ] ], bucket ) ) @@ -98,14 +82,14 @@ def my_iter( ccc ): def __check_neighbors( matrix: vtkUnstructuredGrid, fracture: vtkUnstructuredGrid, g2l: Sequence[ int ], collocated_nodes: Sequence[ Iterable[ int ] ] ): - fracture_nodes: Set[ int ] = set() + fracture_nodes: set[ int ] = set() for bucket in collocated_nodes: for gi in bucket: fracture_nodes.add( g2l[ gi ] ) # For each face of each cell, # if all the points of the face are "made" of collocated nodes, # then this is a fracture face. - fracture_faces: Set[ FrozenSet[ int ] ] = set() + fracture_faces: set[ frozenset[ int ] ] = set() for c in range( matrix.GetNumberOfCells() ): cell: vtkCell = matrix.GetCell( c ) for f in range( cell.GetNumberOfFaces() ): @@ -116,7 +100,7 @@ def __check_neighbors( matrix: vtkUnstructuredGrid, fracture: vtkUnstructuredGri # Finding the cells for c in tqdm( range( fracture.GetNumberOfCells() ), desc="Finding neighbor cell pairs" ): cell: vtkCell = fracture.GetCell( c ) - cns: Set[ FrozenSet[ int ] ] = set() # subset of collocated_nodes + cns: set[ frozenset[ int ] ] = set() # subset of collocated_nodes point_ids = frozenset( vtk_iter( cell.GetPointIds() ) ) for point_id in point_ids: bucket = collocated_nodes[ point_id ] @@ -129,9 +113,8 @@ def __check_neighbors( matrix: vtkUnstructuredGrid, fracture: vtkUnstructuredGri if f in fracture_faces: found += 1 if found != 2: - logging.warning( - f"Something went wrong since we should have found 2 fractures faces (we found {found}) for collocated nodes {cns}." - ) + logging.warning( f"Something went wrong since we should have found 2 fractures faces (we found {found})" + + f" for collocated nodes {cns}." ) def __check( vtk_input_file: str, options: Options ) -> Result: diff --git a/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py b/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py index c21325d..76eaf56 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py @@ -1,50 +1,26 @@ +import logging +import networkx +import numpy from collections import defaultdict from dataclasses import dataclass -import logging -from typing import ( - Collection, - Dict, - FrozenSet, - Iterable, - List, - Mapping, - Optional, - Set, - Sequence, - Tuple, -) from enum import Enum - from tqdm import tqdm -import networkx -import numpy - -from vtkmodules.vtkCommonCore import ( - vtkIdList, - vtkPoints, -) -from vtkmodules.vtkCommonDataModel import ( - vtkCell, - vtkCellArray, - vtkPolygon, - vtkUnstructuredGrid, - VTK_POLYGON, - VTK_POLYHEDRON, -) -from vtkmodules.util.numpy_support import ( - vtk_to_numpy, - numpy_to_vtk, -) +from typing import Collection, Iterable, Mapping, Optional, Sequence, TypeAlias +from vtk import vtkDataArray +from vtkmodules.vtkCommonCore import vtkIdList, vtkPoints +from vtkmodules.vtkCommonDataModel import ( vtkCell, vtkCellArray, vtkPolygon, vtkUnstructuredGrid, VTK_POLYGON, + VTK_POLYHEDRON ) +from vtkmodules.util.numpy_support import vtk_to_numpy, numpy_to_vtk from vtkmodules.util.vtkConstants import VTK_ID_TYPE - -from . import vtk_utils -from .vtk_utils import ( - vtk_iter, - VtkOutput, - to_vtk_id_list, -) -from .vtk_polyhedron import ( - FaceStream, ) +from geos.mesh.doctor.checks.vtk_utils import ( VtkOutput, vtk_iter, to_vtk_id_list, read_mesh, write_mesh, + has_invalid_field ) +from geos.mesh.doctor.checks.vtk_polyhedron import FaceStream +""" +TypeAliases used in this file +""" +IDMapping: TypeAlias = Mapping[ int, int ] +CellsPointsCoords: TypeAlias = dict[ int, list[ tuple[ float ] ] ] +Coordinates3D: TypeAlias = tuple[ float ] class FracturePolicy( Enum ): @@ -56,7 +32,7 @@ class FracturePolicy( Enum ): class Options: policy: FracturePolicy field: str - field_values: FrozenSet[ int ] + field_values: frozenset[ int ] vtk_output: VtkOutput vtk_fracture_output: VtkOutput @@ -73,17 +49,17 @@ class FractureInfo: def build_node_to_cells( mesh: vtkUnstructuredGrid, - face_nodes: Iterable[ Iterable[ int ] ] ) -> Mapping[ int, Iterable[ int ] ]: - node_to_cells: Dict[ int, - Set[ int ] ] = defaultdict( set ) # TODO normally, just a list and not a set should be enough. + face_nodes: Iterable[ Iterable[ int ] ] ) -> dict[ int, Iterable[ int ] ]: + # TODO normally, just a list and not a set should be enough. + node_to_cells: dict[ int, set[ int ] ] = defaultdict( set ) - fracture_nodes: Set[ int ] = set() + fracture_nodes: set[ int ] = set() for fns in face_nodes: for n in fns: fracture_nodes.add( n ) for cell_id in tqdm( range( mesh.GetNumberOfCells() ), desc="Computing the node to cells mapping" ): - cell_points: FrozenSet[ int ] = frozenset( vtk_iter( mesh.GetCell( cell_id ).GetPointIds() ) ) + cell_points: frozenset[ int ] = frozenset( vtk_iter( mesh.GetCell( cell_id ).GetPointIds() ) ) intersection: Iterable[ int ] = cell_points & fracture_nodes for node in intersection: node_to_cells[ node ].add( cell_id ) @@ -92,8 +68,8 @@ def build_node_to_cells( mesh: vtkUnstructuredGrid, def __build_fracture_info_from_fields( mesh: vtkUnstructuredGrid, f: Sequence[ int ], - field_values: FrozenSet[ int ] ) -> FractureInfo: - cells_to_faces: Dict[ int, List[ int ] ] = defaultdict( list ) + field_values: frozenset[ int ] ) -> FractureInfo: + cells_to_faces: dict[ int, list[ int ] ] = defaultdict( list ) # For each face of each cell, we search for the unique neighbor cell (if it exists). # Then, if the 2 values of the two cells match the field requirements, # we store the cell and its local face index: this is indeed part of the surface that we'll need to be split. @@ -109,10 +85,10 @@ def __build_fracture_info_from_fields( mesh: vtkUnstructuredGrid, f: Sequence[ i for j in range( neighbor_cell_ids.GetNumberOfIds() ): # It's 0 or 1... neighbor_cell_id = neighbor_cell_ids.GetId( j ) if f[ neighbor_cell_id ] != f[ cell_id ] and f[ neighbor_cell_id ] in field_values: - cells_to_faces[ cell_id ].append( - i ) # TODO add this (cell_is, face_id) information to the fracture_info? - face_nodes: List[ Collection[ int ] ] = list() - face_nodes_hashes: Set[ FrozenSet[ int ] ] = set() # A temporary not to add multiple times the same face. + # TODO add this (cell_is, face_id) information to the fracture_info? + cells_to_faces[ cell_id ].append( i ) + face_nodes: list[ Collection[ int ] ] = list() + face_nodes_hashes: set[ frozenset[ int ] ] = set() # A temporary not to add multiple times the same face. for cell_id, faces_ids in tqdm( cells_to_faces.items(), desc="Extracting the faces of the fractures" ): cell = mesh.GetCell( cell_id ) for face_id in faces_ids: @@ -121,23 +97,23 @@ def __build_fracture_info_from_fields( mesh: vtkUnstructuredGrid, f: Sequence[ i if fnh not in face_nodes_hashes: face_nodes_hashes.add( fnh ) face_nodes.append( fn ) - node_to_cells: Mapping[ int, Iterable[ int ] ] = build_node_to_cells( mesh, face_nodes ) + node_to_cells: dict[ int, Iterable[ int ] ] = build_node_to_cells( mesh, face_nodes ) return FractureInfo( node_to_cells=node_to_cells, face_nodes=face_nodes ) def __build_fracture_info_from_internal_surfaces( mesh: vtkUnstructuredGrid, f: Sequence[ int ], - field_values: FrozenSet[ int ] ) -> FractureInfo: - node_to_cells: Dict[ int, List[ int ] ] = {} - face_nodes: List[ Collection[ int ] ] = [] + field_values: frozenset[ int ] ) -> FractureInfo: + node_to_cells: dict[ int, list[ int ] ] = defaultdict( list ) + face_nodes: list[ Collection[ int ] ] = list() for cell_id in tqdm( range( mesh.GetNumberOfCells() ), desc="Computing the face to nodes mapping" ): cell = mesh.GetCell( cell_id ) if cell.GetCellDimension() == 2: if f[ cell_id ] in field_values: - nodes = [] + nodes = list() for v in range( cell.GetNumberOfPoints() ): point_id: int = cell.GetPointId( v ) - node_to_cells[ point_id ] = [] + node_to_cells[ point_id ] = list() nodes.append( point_id ) face_nodes.append( tuple( nodes ) ) @@ -177,24 +153,24 @@ def build_cell_to_cell_graph( mesh: vtkUnstructuredGrid, fracture: FractureInfo """ # Faces are identified by their nodes. But the order of those nodes may vary while referring to the same face. # Therefore we compute some kinds of hashes of those face to easily detect if a face is part of the fracture. - tmp: List[ FrozenSet[ int ] ] = [] + tmp: list[ frozenset[ int ] ] = list() for fn in fracture.face_nodes: tmp.append( frozenset( fn ) ) - face_hashes: FrozenSet[ FrozenSet[ int ] ] = frozenset( tmp ) + face_hashes: frozenset[ frozenset[ int ] ] = frozenset( tmp ) # We extract the list of the cells that touch the fracture by at least one node. - cells: Set[ int ] = set() + cells: set[ int ] = set() for cell_ids in fracture.node_to_cells.values(): for cell_id in cell_ids: cells.add( cell_id ) # Using the last precomputed containers, we're now building the dict which connects # every face (hash) of the fracture to the cells that touch the face... - face_to_cells: Dict[ FrozenSet[ int ], List[ int ] ] = defaultdict( list ) + face_to_cells: dict[ frozenset[ int ], list[ int ] ] = defaultdict( list ) for cell_id in tqdm( cells, desc="Computing the cell to cell graph" ): cell: vtkCell = mesh.GetCell( cell_id ) for face_id in range( cell.GetNumberOfFaces() ): - face_hash: FrozenSet[ int ] = frozenset( vtk_iter( cell.GetFace( face_id ).GetPointIds() ) ) + face_hash: frozenset[ int ] = frozenset( vtk_iter( cell.GetFace( face_id ).GetPointIds() ) ) if face_hash not in face_hashes: face_to_cells[ face_hash ].append( cell_id ) @@ -208,7 +184,7 @@ def build_cell_to_cell_graph( mesh: vtkUnstructuredGrid, fracture: FractureInfo def __identify_split( num_points: int, cell_to_cell: networkx.Graph, - node_to_cells: Mapping[ int, Iterable[ int ] ] ) -> Mapping[ int, Mapping[ int, int ] ]: + node_to_cells: dict[ int, Iterable[ int ] ] ) -> dict[ int, IDMapping ]: """ For each cell, compute the node indices replacements. :param num_points: Number of points in the whole mesh (not the fracture). @@ -228,7 +204,7 @@ class NewIndex: def __init__( self, num_nodes: int ): self.__current_last_index = num_nodes - 1 - self.__seen: Set[ int ] = set() + self.__seen: set[ int ] = set() def __call__( self, index: int ) -> int: if index in self.__seen: @@ -239,10 +215,9 @@ def __call__( self, index: int ) -> int: return index build_new_index = NewIndex( num_points ) - result: Dict[ int, Dict[ int, int ] ] = defaultdict( dict ) - for node, cells in tqdm( - sorted( node_to_cells.items() ), # Iteration over `sorted` nodes to have a predictable result for tests. - desc="Identifying the node splits" ): + result: dict[ int, IDMapping ] = defaultdict( dict ) + # Iteration over `sorted` nodes to have a predictable result for tests. + for node, cells in tqdm( sorted( node_to_cells.items() ), desc="Identifying the node splits" ): for connected_cells in networkx.connected_components( cell_to_cell.subgraph( cells ) ): # Each group of connect cells need around `node` must consider the same `node`. # Separate groups must have different (duplicated) nodes. @@ -252,24 +227,102 @@ def __call__( self, index: int ) -> int: return result -def __copy_fields( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid, - collocated_nodes: Sequence[ int ] ) -> None: +def truncated_coordinates_with_id( mesh: vtkUnstructuredGrid, decimals: int = 3 ) -> dict[ Coordinates3D, int ]: + """Creates a mapping of truncated points coordinates with the their point id for every point of a mesh. + + Args: + mesh (vtkUnstructuredGrid): An unstructured mesh. + decimals (int, optional): Number of decimals to keep for truncation. Defaults to 3. + + Returns: + dict[ Coordinates3D, int ]: { coords0: pt_id0, ..., coordsN: pt_idN } """ - Copies the fields from the old mesh to the new one. - Point data will be duplicated for collocated nodes. - :param old_mesh: The mesh before the split. - :param new_mesh: The mesh after the split. Will receive the fields in place. - :param collocated_nodes: New index to old index. - :return: None + points: vtkPoints = mesh.GetPoints() + coords_with_id: dict[ Coordinates3D, int ] = dict() + for point_id in range( points.GetNumberOfPoints() ): + pt_coords = points.GetPoint( point_id ) + truncated_pt_coords = tuple( [ round( coord, decimals ) for coord in pt_coords ] ) + coords_with_id[ truncated_pt_coords ] = point_id + return coords_with_id + + +def link_new_cells_with_old_cells_id( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid ) -> IDMapping: + """After mapping each truncated point coordinates to a list of its points ids for the old and new mesh, + it is possible to determine the link between old and new cell id by coordinates position. + + Args: + old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. + new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. + + Returns: + IDMapping: { new_cell_id: old_cell_id, ... } + """ + truncated_coords_with_id_old: dict[ Coordinates3D, int ] = truncated_coordinates_with_id( old_mesh ) + truncated_coords_with_id_new: dict[ Coordinates3D, int ] = truncated_coordinates_with_id( new_mesh ) + # Every new_mesh by convention in this workflow is a mesh extracted from the old_mesh. + # So the number of elements is lower or equal in new mesh than in old mesh so we'd rather iterate over it + new_pts_to_old_pts_id: IDMapping = dict() + for coords, new_pt_id in truncated_coords_with_id_new.items(): + old_pt_id: int = truncated_coords_with_id_old[ coords ] # We can do that because new_mesh is from old_mesh + new_pts_to_old_pts_id[ new_pt_id ] = old_pt_id + # Now we have a link between point ids from the new_mesh to the old_mesh + # So we can now link the cells of new_mesh to the ones of old_mesh + new_cells_with_old_cells_id: IDMapping = dict() + for new_cell_id in range( new_mesh.GetNumberOfCells() ): + new_cell_pt_ids: tuple[ int ] = tuple( vtk_iter( new_mesh.GetCell( new_cell_id ).GetPointIds() ) ) + old_cell_pt_ids: tuple[ int ] = tuple( [ new_pts_to_old_pts_id[ new_pt_id ] for new_pt_id in new_cell_pt_ids ] ) + for old_cell_id in range( old_mesh.GetNumberOfCells() ): + pt_ids: tuple[ int ] = tuple( vtk_iter( old_mesh.GetCell( old_cell_id ).GetPointIds() ) ) + if pt_ids == old_cell_pt_ids: # the old cell was identified with the new cell + new_cells_with_old_cells_id[ new_cell_id ] = old_cell_id + break + return new_cells_with_old_cells_id + + +def __copy_cell_data( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid, is_fracture_mesh: bool = False ): + """Copy the cell data arrays from the old mesh to the new mesh. + If the new mesh is a fracture_mesh, the fracture_mesh has less cells than the old mesh. + Therefore, copying the entire old cell arrays to the new one will assignate invalid + indexing of the values of these arrays. + So here, we consider the new indexing of cells to add an array with the valid size of elements + and correct indexes. + + Args: + old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. + new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. + is_fracture_mesh (bool, optional): True if dealing with a new_mesh being the fracture mesh. + Defaults to False. """ # Copying the cell data. # The cells are the same, just their nodes support have changed. + if is_fracture_mesh: + new_to_old_cells: IDMapping = link_new_cells_with_old_cells_id( old_mesh, new_mesh ) input_cell_data = old_mesh.GetCellData() for i in range( input_cell_data.GetNumberOfArrays() ): - input_array = input_cell_data.GetArray( i ) + input_array: vtkDataArray = input_cell_data.GetArray( i ) logging.info( f"Copying cell field \"{input_array.GetName()}\"." ) - new_mesh.GetCellData().AddArray( input_array ) + if is_fracture_mesh: + array_name: str = input_array.GetName() + old_values: numpy.array = vtk_to_numpy( input_array ) + useful_values: list = list() + for old_cell_id in new_to_old_cells.values(): + useful_values.append( old_values[ old_cell_id ] ) + useful_input_array = numpy_to_vtk( numpy.array( useful_values ) ) + useful_input_array.SetName( array_name ) + new_mesh.GetCellData().AddArray( useful_input_array ) + else: + new_mesh.GetCellData().AddArray( input_array ) + +# TODO consider the fracture_mesh case ? +def __copy_field_data( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid ): + """Copy the field data arrays from the old mesh to the new mesh. + Does not consider the fracture_mesh case. + + Args: + old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. + new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. + """ # Copying field data. This data is a priori not related to geometry. input_field_data = old_mesh.GetFieldData() for i in range( input_field_data.GetNumberOfArrays() ): @@ -277,29 +330,58 @@ def __copy_fields( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid, logging.info( f"Copying field data \"{input_array.GetName()}\"." ) new_mesh.GetFieldData().AddArray( input_array ) + +# TODO consider the fracture_mesh case ? +def __copy_point_data( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid ): + """Copy the point data arrays from the old mesh to the new mesh. + Does not consider the fracture_mesh case. + + Args: + old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. + new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. + """ # Copying the point data. input_point_data = old_mesh.GetPointData() for i in range( input_point_data.GetNumberOfArrays() ): input_array = input_point_data.GetArray( i ) - logging.info( f"Copying point field \"{input_array.GetName()}\"" ) + logging.info( f"Copying point field \"{input_array.GetName()}\"." ) tmp = input_array.NewInstance() - tmp.SetName( input_array.GetName() ) - tmp.SetNumberOfComponents( input_array.GetNumberOfComponents() ) - tmp.SetNumberOfTuples( new_mesh.GetNumberOfPoints() ) - for p in range( tmp.GetNumberOfTuples() ): - tmp.SetTuple( p, input_array.GetTuple( collocated_nodes[ p ] ) ) + tmp.DeepCopy( input_array ) new_mesh.GetPointData().AddArray( tmp ) -def __perform_split( old_mesh: vtkUnstructuredGrid, - cell_to_node_mapping: Mapping[ int, Mapping[ int, int ] ] ) -> vtkUnstructuredGrid: +def __copy_fields( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid, is_fracture_mesh: bool = False ): + """ + Copies the fields from the old mesh to the new one. + Point data will be duplicated for collocated nodes. + :param old_mesh: The mesh before the split. + :param new_mesh: The mesh after the split. Will receive the fields in place. + :param collocated_nodes: New index to old index. + :param is_fracture_mesh: True when copying fields for a fracture mesh, False instead. + :return: None + """ + # Mesh cannot contains global ids before splitting. + if has_invalid_field( old_mesh, [ "GLOBAL_IDS_POINTS", "GLOBAL_IDS_CELLS" ] ): + err_msg: str = ( + "The mesh cannot contain global ids for neither cells nor points. The correct procedure is to split" + + " the mesh and then generate global ids for new split meshes. Dying ..." ) + logging.error( err_msg ) + raise ValueError( err_msg ) + + __copy_cell_data( old_mesh, new_mesh, is_fracture_mesh ) + __copy_field_data( old_mesh, new_mesh ) + __copy_point_data( old_mesh, new_mesh ) + + +def __perform_split( old_mesh: vtkUnstructuredGrid, cell_to_node_mapping: Mapping[ int, + IDMapping ] ) -> vtkUnstructuredGrid: """ Split the main 3d mesh based on the node duplication information contained in @p cell_to_node_mapping :param old_mesh: The main 3d mesh. :param cell_to_node_mapping: For each cell, gives the nodes that must be duplicated and their new index. :return: The main 3d mesh split at the fracture location. """ - added_points: Set[ int ] = set() + added_points: set[ int ] = set() for node_mapping in cell_to_node_mapping.values(): for i, o in node_mapping.items(): if i != o: @@ -336,16 +418,16 @@ def __perform_split( old_mesh: vtkUnstructuredGrid, new_mesh.Allocate( old_mesh.GetNumberOfCells() ) for c in tqdm( range( old_mesh.GetNumberOfCells() ), desc="Performing the mesh split" ): - node_mapping: Mapping[ int, int ] = cell_to_node_mapping.get( c, {} ) + node_mapping: IDMapping = cell_to_node_mapping.get( c, {} ) cell: vtkCell = old_mesh.GetCell( c ) cell_type: int = cell.GetCellType() # For polyhedron, we'll manipulate the face stream directly. if cell_type == VTK_POLYHEDRON: face_stream = vtkIdList() old_mesh.GetFaceStream( c, face_stream ) - new_face_nodes: List[ List[ int ] ] = [] + new_face_nodes: list[ list[ int ] ] = list() for face_nodes in FaceStream.build_from_vtk_id_list( face_stream ).face_nodes: - new_point_ids = [] + new_point_ids = list() for current_point_id in face_nodes: new_point_id: int = node_mapping.get( current_point_id, current_point_id ) new_point_ids.append( new_point_id ) @@ -361,13 +443,13 @@ def __perform_split( old_mesh: vtkUnstructuredGrid, cell_point_ids.SetId( i, new_point_id ) new_mesh.InsertNextCell( cell_type, cell_point_ids ) - __copy_fields( old_mesh, new_mesh, collocated_nodes ) + __copy_fields( old_mesh, new_mesh ) return new_mesh -def __generate_fracture_mesh( mesh_points: vtkPoints, fracture_info: FractureInfo, - cell_to_node_mapping: Mapping[ int, Mapping[ int, int ] ] ) -> vtkUnstructuredGrid: +def __generate_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_info: FractureInfo, + cell_to_node_mapping: Mapping[ int, IDMapping ] ) -> vtkUnstructuredGrid: """ Generates the mesh of the fracture. :param mesh_points: The points of the main 3d mesh. @@ -377,6 +459,7 @@ def __generate_fracture_mesh( mesh_points: vtkPoints, fracture_info: FractureInf """ logging.info( "Generating the meshes" ) + mesh_points: vtkPoints = old_mesh.GetPoints() is_node_duplicated = numpy.zeros( mesh_points.GetNumberOfPoints(), dtype=bool ) # defaults to False for node_mapping in cell_to_node_mapping.values(): for i, o in node_mapping.items(): @@ -386,8 +469,8 @@ def __generate_fracture_mesh( mesh_points: vtkPoints, fracture_info: FractureInf # Some elements can have all their nodes not duplicated. # In this case, it's mandatory not get rid of this element # because the neighboring 3d elements won't follow. - face_nodes: List[ Collection[ int ] ] = [] - discarded_face_nodes: Set[ Iterable[ int ] ] = set() + face_nodes: list[ Collection[ int ] ] = list() + discarded_face_nodes: set[ Iterable[ int ] ] = set() for ns in fracture_info.face_nodes: if any( map( is_node_duplicated.__getitem__, ns ) ): face_nodes.append( ns ) @@ -395,15 +478,16 @@ def __generate_fracture_mesh( mesh_points: vtkPoints, fracture_info: FractureInf discarded_face_nodes.add( ns ) if discarded_face_nodes: - # tmp = [] + # tmp = list() # for dfns in discarded_face_nodes: # tmp.append(", ".join(map(str, dfns))) msg: str = "(" + '), ('.join( map( lambda dfns: ", ".join( map( str, dfns ) ), discarded_face_nodes ) ) + ")" - # logging.info(f"The {len(tmp)} faces made of nodes ({'), ('.join(tmp)}) were/was discarded from the fracture mesh because none of their/its nodes were duplicated.") - # print(f"The {len(tmp)} faces made of nodes ({'), ('.join(tmp)}) were/was discarded from the fracture mesh because none of their/its nodes were duplicated.") - print( - f"The faces made of nodes [{msg}] were/was discarded from the fracture mesh because none of their/its nodes were duplicated." - ) + # logging.info(f"The {len(tmp)} faces made of nodes ({'), ('.join(tmp)}) were/was discarded" + # + "from the fracture mesh because none of their/its nodes were duplicated.") + # print(f"The {len(tmp)} faces made of nodes ({'), ('.join(tmp)}) were/was discarded" + # + "from the fracture mesh because none of their/its nodes were duplicated.") + logging.info( f"The faces made of nodes [{msg}] were/was discarded" + + "from the fracture mesh because none of their/its nodes were duplicated." ) fracture_nodes_tmp = numpy.ones( mesh_points.GetNumberOfPoints(), dtype=int ) * -1 for ns in face_nodes: @@ -413,9 +497,9 @@ def __generate_fracture_mesh( mesh_points: vtkPoints, fracture_info: FractureInf num_points: int = len( fracture_nodes ) points = vtkPoints() points.SetNumberOfPoints( num_points ) - node_3d_to_node_2d: Dict[ int, int ] = {} # Building the node mapping, from 3d mesh nodes to 2d fracture nodes. + node_3d_to_node_2d: IDMapping = {} # Building the node mapping, from 3d mesh nodes to 2d fracture nodes. for i, n in enumerate( fracture_nodes ): - coords: Tuple[ float, float, float ] = mesh_points.GetPoint( n ) + coords: Coordinates3D = mesh_points.GetPoint( n ) points.SetPoint( i, coords ) node_3d_to_node_2d[ n ] = i @@ -427,7 +511,7 @@ def __generate_fracture_mesh( mesh_points: vtkPoints, fracture_info: FractureInf polygon.GetPointIds().SetId( i, node_3d_to_node_2d[ n ] ) polygons.InsertNextCell( polygon ) - buckets: Dict[ int, Set[ int ] ] = defaultdict( set ) + buckets: dict[ int, set[ int ] ] = defaultdict( set ) for node_mapping in cell_to_node_mapping.values(): for i, o in node_mapping.items(): k: Optional[ int ] = node_3d_to_node_2d.get( min( i, o ) ) @@ -448,31 +532,34 @@ def __generate_fracture_mesh( mesh_points: vtkPoints, fracture_info: FractureInf if polygons.GetNumberOfCells() > 0: fracture_mesh.SetCells( [ VTK_POLYGON ] * polygons.GetNumberOfCells(), polygons ) fracture_mesh.GetPointData().AddArray( array ) + + __copy_fields( old_mesh, fracture_mesh, True ) + return fracture_mesh def __split_mesh_on_fracture( mesh: vtkUnstructuredGrid, - options: Options ) -> Tuple[ vtkUnstructuredGrid, vtkUnstructuredGrid ]: + options: Options ) -> tuple[ vtkUnstructuredGrid, vtkUnstructuredGrid ]: fracture: FractureInfo = build_fracture_info( mesh, options ) cell_to_cell: networkx.Graph = build_cell_to_cell_graph( mesh, fracture ) - cell_to_node_mapping: Mapping[ int, Mapping[ int, int ] ] = __identify_split( mesh.GetNumberOfPoints(), - cell_to_cell, fracture.node_to_cells ) + cell_to_node_mapping: Mapping[ int, IDMapping ] = __identify_split( mesh.GetNumberOfPoints(), cell_to_cell, + fracture.node_to_cells ) output_mesh: vtkUnstructuredGrid = __perform_split( mesh, cell_to_node_mapping ) - fractured_mesh: vtkUnstructuredGrid = __generate_fracture_mesh( mesh.GetPoints(), fracture, cell_to_node_mapping ) + fractured_mesh: vtkUnstructuredGrid = __generate_fracture_mesh( mesh, fracture, cell_to_node_mapping ) return output_mesh, fractured_mesh def __check( mesh, options: Options ) -> Result: output_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) - vtk_utils.write_mesh( output_mesh, options.vtk_output ) - vtk_utils.write_mesh( fracture_mesh, options.vtk_fracture_output ) + write_mesh( output_mesh, options.vtk_output ) + write_mesh( fracture_mesh, options.vtk_fracture_output ) # TODO provide statistics about what was actually performed (size of the fracture, number of split nodes...). return Result( info="OK" ) def check( vtk_input_file: str, options: Options ) -> Result: try: - mesh = vtk_utils.read_mesh( vtk_input_file ) + mesh = read_mesh( vtk_input_file ) return __check( mesh, options ) except BaseException as e: logging.error( e ) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py index 9beb375..3b581b7 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/vtk_utils.py @@ -1,25 +1,11 @@ -from dataclasses import dataclass import os.path import logging -import sys -from typing import ( - Any, - Iterator, - Optional, -) - -from vtkmodules.vtkCommonCore import ( - vtkIdList, ) -from vtkmodules.vtkCommonDataModel import ( - vtkUnstructuredGrid, ) -from vtkmodules.vtkIOLegacy import ( - vtkUnstructuredGridWriter, - vtkUnstructuredGridReader, -) -from vtkmodules.vtkIOXML import ( - vtkXMLUnstructuredGridReader, - vtkXMLUnstructuredGridWriter, -) +from dataclasses import dataclass +from typing import Iterator, Optional +from vtkmodules.vtkCommonCore import vtkIdList +from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid +from vtkmodules.vtkIOLegacy import vtkUnstructuredGridWriter, vtkUnstructuredGridReader +from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader, vtkXMLUnstructuredGridWriter @dataclass( frozen=True ) @@ -36,7 +22,7 @@ def to_vtk_id_list( data ) -> vtkIdList: return result -def vtk_iter( l ) -> Iterator[ Any ]: +def vtk_iter( l ) -> Iterator[ any ]: """ Utility function transforming a vtk "container" (e.g. vtkIdList) into an iterable to be used for building built-ins python containers. :param l: A vtk container. @@ -50,6 +36,38 @@ def vtk_iter( l ) -> Iterator[ Any ]: yield l.GetCellType( i ) +def has_invalid_field( mesh: vtkUnstructuredGrid, invalid_fields: list[ str ] ) -> bool: + """Checks if a mesh contains at least a data arrays within its cell, field or point data + having a certain name. If so, returns True, else False. + + Args: + mesh (vtkUnstructuredGrid): An unstructured mesh. + invalid_fields (list[str]): Field name of an array in any data from the data. + + Returns: + bool: True if one field found, else False. + """ + # Check the cell data fields + cell_data = mesh.GetCellData() + for i in range( cell_data.GetNumberOfArrays() ): + if cell_data.GetArrayName( i ) in invalid_fields: + logging.error( f"The mesh contains an invalid cell field name '{cell_data.GetArrayName( i )}'." ) + return True + # Check the field data fields + field_data = mesh.GetFieldData() + for i in range( field_data.GetNumberOfArrays() ): + if field_data.GetArrayName( i ) in invalid_fields: + logging.error( f"The mesh contains an invalid field name '{field_data.GetArrayName( i )}'." ) + return True + # Check the point data fields + point_data = mesh.GetPointData() + for i in range( point_data.GetNumberOfArrays() ): + if point_data.GetArrayName( i ) in invalid_fields: + logging.error( f"The mesh contains an invalid point field name '{point_data.GetArrayName( i )}'." ) + return True + return False + + def __read_vtk( vtk_input_file: str ) -> Optional[ vtkUnstructuredGrid ]: reader = vtkUnstructuredGridReader() logging.info( f"Testing file format \"{vtk_input_file}\" using legacy format reader..." ) @@ -83,6 +101,10 @@ def read_mesh( vtk_input_file: str ) -> vtkUnstructuredGrid: If first guess does not work, eventually all the others reader available will be tested. :return: A unstructured grid. """ + if not os.path.exists( vtk_input_file ): + err_msg: str = f"Invalid file path. Could not read \"{vtk_input_file}\". Dying..." + logging.error( err_msg ) + raise ValueError( err_msg ) file_extension = os.path.splitext( vtk_input_file )[ -1 ] extension_to_reader = { ".vtk": __read_vtk, ".vtu": __read_vtu } # Testing first the reader that should match @@ -96,8 +118,9 @@ def read_mesh( vtk_input_file: str ) -> vtkUnstructuredGrid: if output_mesh: return output_mesh # No reader did work. Dying. - logging.critical( f"Could not find the appropriate VTK reader for file \"{vtk_input_file}\". Dying..." ) - sys.exit( 1 ) + err_msg = f"Could not find the appropriate VTK reader for file \"{vtk_input_file}\". Dying..." + logging.error( err_msg ) + raise ValueError( err_msg ) def __write_vtk( mesh: vtkUnstructuredGrid, output: str ) -> int: @@ -135,6 +158,7 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int: success_code = __write_vtu( mesh, vtk_output.output, vtk_output.is_data_mode_binary ) else: # No writer found did work. Dying. - logging.critical( f"Could not find the appropriate VTK writer for extension \"{file_extension}\". Dying..." ) - sys.exit( 1 ) + err_msg = f"Could not find the appropriate VTK writer for extension \"{file_extension}\". Dying..." + logging.error( err_msg ) + raise ValueError( err_msg ) return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise. diff --git a/geos-mesh/tests/test_generate_fractures.py b/geos-mesh/tests/test_generate_fractures.py index 077d9d7..30e0e7d 100644 --- a/geos-mesh/tests/test_generate_fractures.py +++ b/geos-mesh/tests/test_generate_fractures.py @@ -1,31 +1,20 @@ -from dataclasses import dataclass - -from typing import ( - Tuple, - Iterable, - Iterator, - Sequence, -) - +import logging import numpy - import pytest - -from vtkmodules.vtkCommonDataModel import ( - vtkUnstructuredGrid, - VTK_HEXAHEDRON, - VTK_POLYHEDRON, - VTK_QUAD, -) -from vtkmodules.util.numpy_support import ( - numpy_to_vtk, ) - -from checks.vtk_utils import ( - to_vtk_id_list, ) - -from checks.check_fractures import format_collocated_nodes -from checks.generate_cube import build_rectilinear_blocks_mesh, XYZ -from checks.generate_fractures import __split_mesh_on_fracture, Options, FracturePolicy +from dataclasses import dataclass +from typing import Iterable, Iterator, Sequence, TypeAlias +from vtkmodules.vtkCommonDataModel import ( vtkUnstructuredGrid, vtkQuad, VTK_HEXAHEDRON, VTK_POLYHEDRON, VTK_QUAD ) +from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy +from geos.mesh.doctor.checks.vtk_utils import to_vtk_id_list +from geos.mesh.doctor.checks.check_fractures import format_collocated_nodes +from geos.mesh.doctor.checks.generate_cube import build_rectilinear_blocks_mesh, XYZ +from geos.mesh.doctor.checks.generate_fractures import ( __split_mesh_on_fracture, Options, FracturePolicy, + Coordinates3D, IDMapping ) +""" +TypeAliases used in this file +""" +FaceNodesCoords: TypeAlias = tuple[ tuple[ float ] ] +IDMatrix: TypeAlias = Sequence[ Sequence[ int ] ] @dataclass( frozen=True ) @@ -42,11 +31,11 @@ class TestCase: __test__ = False input_mesh: vtkUnstructuredGrid options: Options - collocated_nodes: Sequence[ Sequence[ int ] ] + collocated_nodes: IDMatrix result: TestResult -def __build_test_case( xs: Tuple[ numpy.ndarray, numpy.ndarray, numpy.ndarray ], +def __build_test_case( xs: tuple[ numpy.ndarray, numpy.ndarray, numpy.ndarray ], attribute: Iterable[ int ], field_values: Iterable[ int ] = None, policy: FracturePolicy = FracturePolicy.FIELD ): @@ -95,20 +84,10 @@ def __generate_test_data() -> Iterator[ TestCase ]: # Split in 3 inc = Incrementor( 27 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1, *inc.next( 1 ) ), - ( 3, *inc.next( 1 ) ), - ( 4, *inc.next( 2 ) ), - ( 7, *inc.next( 1 ) ), - ( 1 + 9, *inc.next( 1 ) ), - ( 3 + 9, *inc.next( 1 ) ), - ( 4 + 9, *inc.next( 2 ) ), - ( 7 + 9, *inc.next( 1 ) ), - ( 1 + 18, *inc.next( 1 ) ), - ( 3 + 18, *inc.next( 1 ) ), - ( 4 + 18, *inc.next( 2 ) ), - ( 7 + 18, *inc.next( 1 ) ), - ) + collocated_nodes: IDMatrix = ( ( 1, *inc.next( 1 ) ), ( 3, *inc.next( 1 ) ), ( 4, *inc.next( 2 ) ), + ( 7, *inc.next( 1 ) ), ( 1 + 9, *inc.next( 1 ) ), ( 3 + 9, *inc.next( 1 ) ), + ( 4 + 9, *inc.next( 2 ) ), ( 7 + 9, *inc.next( 1 ) ), ( 1 + 18, *inc.next( 1 ) ), + ( 3 + 18, *inc.next( 1 ) ), ( 4 + 18, *inc.next( 2 ) ), ( 7 + 18, *inc.next( 1 ) ) ) mesh, options = __build_test_case( ( three_nodes, three_nodes, three_nodes ), ( 0, 1, 2, 1, 0, 1, 2, 1 ) ) yield TestCase( input_mesh=mesh, options=options, @@ -117,27 +96,13 @@ def __generate_test_data() -> Iterator[ TestCase ]: # Split in 8 inc = Incrementor( 27 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1, *inc.next( 1 ) ), - ( 3, *inc.next( 1 ) ), - ( 4, *inc.next( 3 ) ), - ( 5, *inc.next( 1 ) ), - ( 7, *inc.next( 1 ) ), - ( 0 + 9, *inc.next( 1 ) ), - ( 1 + 9, *inc.next( 3 ) ), - ( 2 + 9, *inc.next( 1 ) ), - ( 3 + 9, *inc.next( 3 ) ), - ( 4 + 9, *inc.next( 7 ) ), - ( 5 + 9, *inc.next( 3 ) ), - ( 6 + 9, *inc.next( 1 ) ), - ( 7 + 9, *inc.next( 3 ) ), - ( 8 + 9, *inc.next( 1 ) ), - ( 1 + 18, *inc.next( 1 ) ), - ( 3 + 18, *inc.next( 1 ) ), - ( 4 + 18, *inc.next( 3 ) ), - ( 5 + 18, *inc.next( 1 ) ), - ( 7 + 18, *inc.next( 1 ) ), - ) + collocated_nodes: IDMatrix = ( ( 1, *inc.next( 1 ) ), ( 3, *inc.next( 1 ) ), ( 4, *inc.next( 3 ) ), + ( 5, *inc.next( 1 ) ), ( 7, *inc.next( 1 ) ), ( 0 + 9, *inc.next( 1 ) ), + ( 1 + 9, *inc.next( 3 ) ), ( 2 + 9, *inc.next( 1 ) ), ( 3 + 9, *inc.next( 3 ) ), + ( 4 + 9, *inc.next( 7 ) ), ( 5 + 9, *inc.next( 3 ) ), ( 6 + 9, *inc.next( 1 ) ), + ( 7 + 9, *inc.next( 3 ) ), ( 8 + 9, *inc.next( 1 ) ), ( 1 + 18, *inc.next( 1 ) ), + ( 3 + 18, *inc.next( 1 ) ), ( 4 + 18, *inc.next( 3 ) ), ( 5 + 18, *inc.next( 1 ) ), + ( 7 + 18, *inc.next( 1 ) ) ) mesh, options = __build_test_case( ( three_nodes, three_nodes, three_nodes ), range( 8 ) ) yield TestCase( input_mesh=mesh, options=options, @@ -146,14 +111,8 @@ def __generate_test_data() -> Iterator[ TestCase ]: # Straight notch inc = Incrementor( 27 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1, *inc.next( 1 ) ), - ( 4, ), - ( 1 + 9, *inc.next( 1 ) ), - ( 4 + 9, ), - ( 1 + 18, *inc.next( 1 ) ), - ( 4 + 18, ), - ) + collocated_nodes: IDMatrix = ( ( 1, *inc.next( 1 ) ), ( 4, ), ( 1 + 9, *inc.next( 1 ) ), ( 4 + 9, ), + ( 1 + 18, *inc.next( 1 ) ), ( 4 + 18, ) ) mesh, options = __build_test_case( ( three_nodes, three_nodes, three_nodes ), ( 0, 1, 2, 2, 0, 1, 2, 2 ), field_values=( 0, 1 ) ) yield TestCase( input_mesh=mesh, @@ -163,16 +122,8 @@ def __generate_test_data() -> Iterator[ TestCase ]: # L-shaped notch inc = Incrementor( 27 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1, *inc.next( 1 ) ), - ( 4, *inc.next( 1 ) ), - ( 7, *inc.next( 1 ) ), - ( 1 + 9, *inc.next( 1 ) ), - ( 4 + 9, ), - ( 7 + 9, ), - ( 1 + 18, *inc.next( 1 ) ), - ( 4 + 18, ), - ) + collocated_nodes: IDMatrix = ( ( 1, *inc.next( 1 ) ), ( 4, *inc.next( 1 ) ), ( 7, *inc.next( 1 ) ), + ( 1 + 9, *inc.next( 1 ) ), ( 4 + 9, ), ( 7 + 9, ), ( 19, *inc.next( 1 ) ), ( 22, ) ) mesh, options = __build_test_case( ( three_nodes, three_nodes, three_nodes ), ( 0, 1, 0, 1, 0, 1, 2, 2 ), field_values=( 0, 1 ) ) yield TestCase( input_mesh=mesh, @@ -182,16 +133,9 @@ def __generate_test_data() -> Iterator[ TestCase ]: # 3x1x1 split inc = Incrementor( 2 * 2 * 4 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1, *inc.next( 1 ) ), - ( 2, *inc.next( 1 ) ), - ( 5, *inc.next( 1 ) ), - ( 6, *inc.next( 1 ) ), - ( 1 + 8, *inc.next( 1 ) ), - ( 2 + 8, *inc.next( 1 ) ), - ( 5 + 8, *inc.next( 1 ) ), - ( 6 + 8, *inc.next( 1 ) ), - ) + collocated_nodes: IDMatrix = ( ( 1, *inc.next( 1 ) ), ( 2, *inc.next( 1 ) ), ( 5, *inc.next( 1 ) ), + ( 6, *inc.next( 1 ) ), ( 1 + 8, *inc.next( 1 ) ), ( 2 + 8, *inc.next( 1 ) ), + ( 5 + 8, *inc.next( 1 ) ), ( 6 + 8, *inc.next( 1 ) ) ) mesh, options = __build_test_case( ( four_nodes, two_nodes, two_nodes ), ( 0, 1, 2 ) ) yield TestCase( input_mesh=mesh, options=options, @@ -199,12 +143,8 @@ def __generate_test_data() -> Iterator[ TestCase ]: result=TestResult( 6 * 4, 3, 2 * 4, 2 ) ) # Discarded fracture element if no node duplication. - collocated_nodes: Sequence[ Sequence[ int ] ] = () - mesh, options = __build_test_case( ( three_nodes, four_nodes, four_nodes ), [ - 0, - ] * 8 + [ 1, 2 ] + [ - 0, - ] * 8, + collocated_nodes: IDMatrix = tuple() + mesh, options = __build_test_case( ( three_nodes, four_nodes, four_nodes ), ( 0, ) * 8 + ( 1, 2 ) + ( 0, ) * 8, field_values=( 1, 2 ) ) yield TestCase( input_mesh=mesh, options=options, @@ -213,20 +153,11 @@ def __generate_test_data() -> Iterator[ TestCase ]: # Fracture on a corner inc = Incrementor( 3 * 4 * 4 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1 + 12, ), - ( 4 + 12, ), - ( 7 + 12, ), - ( 1 + 12 * 2, *inc.next( 1 ) ), - ( 4 + 12 * 2, *inc.next( 1 ) ), - ( 7 + 12 * 2, ), - ( 1 + 12 * 3, *inc.next( 1 ) ), - ( 4 + 12 * 3, *inc.next( 1 ) ), - ( 7 + 12 * 3, ), - ) - mesh, options = __build_test_case( ( three_nodes, four_nodes, four_nodes ), [ - 0, - ] * 6 + [ 1, 2, 1, 2, 0, 0, 1, 2, 1, 2, 0, 0 ], + collocated_nodes: IDMatrix = ( ( 1 + 12, ), ( 4 + 12, ), ( 7 + 12, ), ( 1 + 12 * 2, *inc.next( 1 ) ), + ( 4 + 12 * 2, *inc.next( 1 ) ), ( 7 + 12 * 2, ), ( 1 + 12 * 3, *inc.next( 1 ) ), + ( 4 + 12 * 3, *inc.next( 1 ) ), ( 7 + 12 * 3, ) ) + mesh, options = __build_test_case( ( three_nodes, four_nodes, four_nodes ), + ( 0, ) * 6 + ( 1, 2, 1, 2, 0, 0, 1, 2, 1, 2, 0, 0 ), field_values=( 1, 2 ) ) yield TestCase( input_mesh=mesh, options=options, @@ -235,12 +166,8 @@ def __generate_test_data() -> Iterator[ TestCase ]: # Generate mesh with 2 hexs, one being a standard hex, the other a 42 hex. inc = Incrementor( 3 * 2 * 2 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1, *inc.next( 1 ) ), - ( 1 + 3, *inc.next( 1 ) ), - ( 1 + 6, *inc.next( 1 ) ), - ( 1 + 9, *inc.next( 1 ) ), - ) + collocated_nodes: IDMatrix = ( ( 1, *inc.next( 1 ) ), ( 1 + 3, *inc.next( 1 ) ), ( 1 + 6, *inc.next( 1 ) ), + ( 1 + 9, *inc.next( 1 ) ) ) mesh, options = __build_test_case( ( three_nodes, two_nodes, two_nodes ), ( 0, 1 ) ) polyhedron_mesh = vtkUnstructuredGrid() polyhedron_mesh.SetPoints( mesh.GetPoints() ) @@ -258,12 +185,8 @@ def __generate_test_data() -> Iterator[ TestCase ]: # Split in 2 using the internal fracture description inc = Incrementor( 3 * 2 * 2 ) - collocated_nodes: Sequence[ Sequence[ int ] ] = ( - ( 1, *inc.next( 1 ) ), - ( 1 + 3, *inc.next( 1 ) ), - ( 1 + 6, *inc.next( 1 ) ), - ( 1 + 9, *inc.next( 1 ) ), - ) + collocated_nodes: IDMatrix = ( ( 1, *inc.next( 1 ) ), ( 1 + 3, *inc.next( 1 ) ), ( 1 + 6, *inc.next( 1 ) ), + ( 1 + 9, *inc.next( 1 ) ) ) mesh, options = __build_test_case( ( three_nodes, two_nodes, two_nodes ), attribute=( 0, 0, 0 ), field_values=( 0, ), @@ -286,3 +209,146 @@ def test_generate_fracture( test_case: TestCase ): res = format_collocated_nodes( fracture_mesh ) assert res == test_case.collocated_nodes assert len( res ) == test_case.result.fracture_mesh_num_points + + +def add_simplified_field_for_cells( mesh: vtkUnstructuredGrid, field_name: str, field_dimension: int ): + """Reduce functionality obtained from src.geos.mesh.doctor.checks.generate_fracture.__add_fields + where the goal is to add a cell data array with incrementing values. + + Args: + mesh (vtkUnstructuredGrid): Unstructured mesh. + field_name (str): Name of the field to add to CellData + field_dimension (int): Number of components for the field. + """ + data = mesh.GetCellData() + n = mesh.GetNumberOfCells() + array = numpy.ones( ( n, field_dimension ), dtype=float ) + array = numpy.arange( 1, n * field_dimension + 1 ).reshape( n, field_dimension ) + vtk_array = numpy_to_vtk( array ) + vtk_array.SetName( field_name ) + data.AddArray( vtk_array ) + + +def find_borders_faces_rectilinear_grid( mesh: vtkUnstructuredGrid ) -> tuple[ FaceNodesCoords ]: + """ + 6+--------+7 + / /| + / / | + 4+--------+5 | + | | | + | 2+ | +3 + | | / + | |/ + 0+--------+1 + + For a vtk rectilinear grid, gives the coordinates of each of its borders face nodes. + + Args: + mesh (vtkUnstructuredGrid): Unstructured mesh. + + Returns: + tuple[QuadCoords]: For a rectilinear grid, returns a tuple of 6 elements. + """ + mesh_bounds: tuple[ float ] = mesh.GetBounds() + min_bound: Coordinates3D = [ mesh_bounds[ i ] for i in range( len( mesh_bounds ) ) if i % 2 == 0 ] + max_bound: Coordinates3D = [ mesh_bounds[ i ] for i in range( len( mesh_bounds ) ) if i % 2 == 1 ] + center: Coordinates3D = mesh.GetCenter() + face_diag: tuple[ float ] = ( ( max_bound[ 0 ] - min_bound[ 0 ] ) / 2, ( max_bound[ 1 ] - min_bound[ 1 ] ) / 2, + ( max_bound[ 2 ] - min_bound[ 2 ] ) / 2 ) + node0: Coordinates3D = ( center[ 0 ] - face_diag[ 0 ], center[ 1 ] - face_diag[ 1 ], center[ 2 ] - face_diag[ 2 ] ) + node1: Coordinates3D = ( center[ 0 ] + face_diag[ 0 ], center[ 1 ] - face_diag[ 1 ], center[ 2 ] - face_diag[ 2 ] ) + node2: Coordinates3D = ( center[ 0 ] - face_diag[ 0 ], center[ 1 ] + face_diag[ 1 ], center[ 2 ] - face_diag[ 2 ] ) + node3: Coordinates3D = ( center[ 0 ] + face_diag[ 0 ], center[ 1 ] + face_diag[ 1 ], center[ 2 ] - face_diag[ 2 ] ) + node4: Coordinates3D = ( center[ 0 ] - face_diag[ 0 ], center[ 1 ] - face_diag[ 1 ], center[ 2 ] + face_diag[ 2 ] ) + node5: Coordinates3D = ( center[ 0 ] + face_diag[ 0 ], center[ 1 ] - face_diag[ 1 ], center[ 2 ] + face_diag[ 2 ] ) + node6: Coordinates3D = ( center[ 0 ] - face_diag[ 0 ], center[ 1 ] + face_diag[ 1 ], center[ 2 ] + face_diag[ 2 ] ) + node7: Coordinates3D = ( center[ 0 ] + face_diag[ 0 ], center[ 1 ] + face_diag[ 1 ], center[ 2 ] + face_diag[ 2 ] ) + faces: tuple[ FaceNodesCoords ] = ( ( node0, node1, node3, node2 ), ( node4, node5, node7, node6 ), + ( node0, node2, node6, node4 ), ( node1, node3, node7, node5 ), + ( node0, node1, node5, node4 ), ( node2, node3, node7, node6 ) ) + return faces + + +def add_quad( mesh: vtkUnstructuredGrid, face: FaceNodesCoords ): + """Adds a quad cell to each border of an unstructured mesh. + + Args: + mesh (vtkUnstructuredGrid): Unstructured mesh. + """ + points_coords = mesh.GetPoints().GetData() + quad: vtkQuad = vtkQuad() + ids_association: IDMapping = {} + for i in range( mesh.GetNumberOfPoints() ): + for j in range( len( face ) ): + if points_coords.GetTuple( i ) == face[ j ]: + ids_association[ i ] = j + break + if len( ids_association ) == 4: + break + + for vertice_id, quad_coord_index in ids_association.items(): + quad.GetPoints().InsertNextPoint( face[ quad_coord_index ] ) + quad.GetPointIds().SetId( quad_coord_index, vertice_id ) + + mesh.InsertNextCell( quad.GetCellType(), quad.GetPointIds() ) + + +def test_copy_fields_when_splitting_mesh(): + """This test is designed to check the __copy_fields method from generate_fractures, + that will be called when using __split_mesh_on_fracture method from generate_fractures. + """ + # Generating the rectilinear grid and its quads on all borders + x: numpy.array = numpy.array( [ 0, 1, 2 ] ) + y: numpy.array = numpy.array( [ 0, 1 ] ) + z: numpy.array = numpy.array( [ 0, 1 ] ) + xyzs: XYZ = XYZ( x, y, z ) + mesh: vtkUnstructuredGrid = build_rectilinear_blocks_mesh( [ xyzs ] ) + assert mesh.GetCells().GetNumberOfCells() == 2 + border_faces: tuple[ FaceNodesCoords ] = find_borders_faces_rectilinear_grid( mesh ) + for face in border_faces: + add_quad( mesh, face ) + assert mesh.GetCells().GetNumberOfCells() == 8 + # Create a quad cell to represent the fracture surface. + fracture: FaceNodesCoords = ( ( 1.0, 0.0, 0.0 ), ( 1.0, 1.0, 0.0 ), ( 1.0, 1.0, 1.0 ), ( 1.0, 0.0, 1.0 ) ) + add_quad( mesh, fracture ) + assert mesh.GetCells().GetNumberOfCells() == 9 + # Add a "TestField" array + assert mesh.GetCellData().GetNumberOfArrays() == 0 + add_simplified_field_for_cells( mesh, "TestField", 1 ) + assert mesh.GetCellData().GetNumberOfArrays() == 1 + assert mesh.GetCellData().GetArrayName( 0 ) == "TestField" + testField_values: list[ int ] = vtk_to_numpy( mesh.GetCellData().GetArray( 0 ) ).tolist() + assert testField_values == [ 1, 2, 3, 4, 5, 6, 7, 8, 9 ] + # Split the mesh along the fracture surface which is number 9 on TestField + options = Options( policy=FracturePolicy.INTERNAL_SURFACES, + field="TestField", + field_values=frozenset( map( int, [ "9" ] ) ), + vtk_output=None, + vtk_fracture_output=None ) + main_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) + assert main_mesh.GetCellData().GetNumberOfArrays() == 1 + assert fracture_mesh.GetCellData().GetNumberOfArrays() == 1 + assert main_mesh.GetCellData().GetArrayName( 0 ) == "TestField" + assert fracture_mesh.GetCellData().GetArrayName( 0 ) == "TestField" + # Make sure that only 1 correct value is in "TestField" array for fracture_mesh, 9 values for main_mesh + fracture_mesh_values: list[ int ] = vtk_to_numpy( fracture_mesh.GetCellData().GetArray( 0 ) ).tolist() + main_mesh_values: list[ int ] = vtk_to_numpy( main_mesh.GetCellData().GetArray( 0 ) ).tolist() + assert fracture_mesh_values == [ 9 ] # The value for the fracture surface + assert main_mesh_values == [ 1, 2, 3, 4, 5, 6, 7, 8, 9 ] + # Test for invalid point field name + add_simplified_field_for_cells( mesh, "GLOBAL_IDS_POINTS", 1 ) + with pytest.raises( ValueError ) as pytest_wrapped_e: + main_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) + assert pytest_wrapped_e.type == ValueError + # Test for invalid cell field name + mesh: vtkUnstructuredGrid = build_rectilinear_blocks_mesh( [ xyzs ] ) + border_faces: tuple[ FaceNodesCoords ] = find_borders_faces_rectilinear_grid( mesh ) + for face in border_faces: + add_quad( mesh, face ) + add_quad( mesh, fracture ) + add_simplified_field_for_cells( mesh, "TestField", 1 ) + add_simplified_field_for_cells( mesh, "GLOBAL_IDS_CELLS", 1 ) + assert mesh.GetCellData().GetNumberOfArrays() == 2 + with pytest.raises( ValueError ) as pytest_wrapped_e: + main_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) + assert pytest_wrapped_e.type == ValueError From a88a0934b552c546c83fec7573aee44da76c50fd Mon Sep 17 00:00:00 2001 From: MelReyCG <122801580+MelReyCG@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:27:11 +0100 Subject: [PATCH 05/10] Add char arrays output (#43) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✨ add char comparison when encountering int8 arrays * adding detection of wider strings * better management of strings separators & whitespaces * removed potencially disturbing character * 🎨 code style * 🎨 code style + msg fix --- .../src/geos/ats/helpers/restart_check.py | 108 ++++++++++++++---- 1 file changed, 87 insertions(+), 21 deletions(-) diff --git a/geos-ats/src/geos/ats/helpers/restart_check.py b/geos-ats/src/geos/ats/helpers/restart_check.py index 7aa282e..f491107 100644 --- a/geos-ats/src/geos/ats/helpers/restart_check.py +++ b/geos-ats/src/geos/ats/helpers/restart_check.py @@ -7,6 +7,7 @@ import argparse import logging import time +import string from pathlib import Path try: from geos.ats.helpers.permute_array import permuteArray # type: ignore[import] @@ -375,35 +376,100 @@ def compareIntArrays( self, path, arr, base_arr ): ARR [in]: The hdf5 Dataset to compare. BASE_ARR [in]: The hdf5 Dataset to compare against. """ - # If the shapes are different they can't be compared. + message = "" if arr.shape != base_arr.shape: - msg = "Datasets have different shapes and therefore can't be compared: %s, %s.\n" % ( arr.shape, - base_arr.shape ) - self.errorMsg( path, msg, True ) - return + message = "Datasets have different shapes and therefore can't be compared statistically: %s, %s.\n" % ( + arr.shape, base_arr.shape ) + else: + # Calculate the absolute difference. + difference = np.subtract( arr, base_arr ) + np.abs( difference, out=difference ) - # Create a copy of the arrays. + offenders = difference != 0.0 + n_offenders = np.sum( offenders ) - # Calculate the absolute difference. - difference = np.subtract( arr, base_arr ) - np.abs( difference, out=difference ) + if n_offenders != 0: + max_index = np.unravel_index( np.argmax( difference ), difference.shape ) + max_difference = difference[ max_index ] + offenders_mean = np.mean( difference[ offenders ] ) + offenders_std = np.std( difference[ offenders ] ) - offenders = difference != 0.0 - n_offenders = np.sum( offenders ) + message = "Arrays of types %s and %s have %s values of which %d have differing values.\n" % ( + arr.dtype, base_arr.dtype, offenders.size, n_offenders ) + message += "Statistics of the differences greater than 0:\n" + message += "\tmax_index = %s, max = %s, mean = %s, std = %s\n" % ( max_index, max_difference, + offenders_mean, offenders_std ) - if n_offenders != 0: - max_index = np.unravel_index( np.argmax( difference ), difference.shape ) - max_difference = difference[ max_index ] - offenders_mean = np.mean( difference[ offenders ] ) - offenders_std = np.std( difference[ offenders ] ) + # actually, int8 arrays are almost always char arrays, so we sould add a character comparison. + if arr.dtype == np.int8 and base_arr.dtype == np.int8: + message += self.compareCharArrays( arr, base_arr ) - message = "Arrays of types %s and %s have %s values of which %d have differing values.\n" % ( - arr.dtype, base_arr.dtype, offenders.size, n_offenders ) - message += "Statistics of the differences greater than 0:\n" - message += "\tmax_index = %s, max = %s, mean = %s, std = %s\n" % ( max_index, max_difference, - offenders_mean, offenders_std ) + if message != "": self.errorMsg( path, message, True ) + def compareCharArrays( self, comp_arr, base_arr ): + """ + Compare the valid characters of two arrays and return a formatted string showing differences. + + COMP_ARR [in]: The hdf5 Dataset to compare. + BASE_ARR [in]: The hdf5 Dataset to compare against. + + Returns a formatted string highlighting the differing characters. + """ + comp_ndarr = np.array( comp_arr ).flatten() + base_ndarr = np.array( base_arr ).flatten() + + # Replace invalid characters by group-separator characters ('\x1D') + valid_chars = set( string.printable ) + invalid_char = '\x1D' + comp_str = "".join( + [ chr( x ) if ( x >= 0 and chr( x ) in valid_chars ) else invalid_char for x in comp_ndarr ] ) + base_str = "".join( + [ chr( x ) if ( x >= 0 and chr( x ) in valid_chars ) else invalid_char for x in base_ndarr ] ) + + # replace whitespaces sequences by only one space (preventing indentation / spacing changes detection) + whitespace_pattern = r"[ \t\n\r\v\f]+" + comp_str = re.sub( whitespace_pattern, " ", comp_str ) + base_str = re.sub( whitespace_pattern, " ", base_str ) + # replace invalid characters sequences by a double space (for clear display) + invalid_char_pattern = r"\x1D+" + comp_str_display = re.sub( invalid_char_pattern, " ", comp_str ) + base_str_display = re.sub( invalid_char_pattern, " ", base_str ) + + message = "" + + def limited_display( n, string ): + return string[ :n ] + f"... ({len(string)-n} omitted chars)" if len( string ) > n else string + + if len( comp_str ) != len( base_str ): + max_display = 250 + message = f"Character arrays have different sizes: {len( comp_str )}, {len( base_str )}.\n" + message += f" {limited_display( max_display, comp_str_display )}\n" + message += f" {limited_display( max_display, base_str_display )}\n" + else: + # We need to trim arrays to the length of the shortest one for the comparisons + min_length = min( len( comp_str_display ), len( base_str_display ) ) + comp_str_trim = comp_str_display[ :min_length ] + base_str_trim = base_str_display[ :min_length ] + + differing_indices = np.where( np.array( list( comp_str_trim ) ) != np.array( list( base_str_trim ) ) )[ 0 ] + if differing_indices.size != 0: + # check for reordering + arr_set = sorted( set( comp_str.split( invalid_char ) ) ) + base_arr_set = sorted( set( base_str.split( invalid_char ) ) ) + reordering_detected = arr_set == base_arr_set + + max_display = 110 if reordering_detected else 250 + message = "Differing valid characters" + message += " (substrings reordering detected):\n" if reordering_detected else ":\n" + + message += f" {limited_display( max_display, comp_str_display )}\n" + message += f" {limited_display( max_display, base_str_display )}\n" + message += " " + "".join( + [ "^" if i in differing_indices else " " for i in range( min( max_display, min_length ) ) ] ) + "\n" + + return message + def compareStringArrays( self, path, arr, base_arr ): """ Compare two string datasets. Exact equality is used as the acceptance criteria. From 51c74e14e6c8d80d86f7df659662d74957f258c7 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Tue, 5 Nov 2024 15:19:44 -0800 Subject: [PATCH 06/10] Create multiple fractures with generate_fractures (#46) * First attempt at outputting all fracture meshes. Ok when no more than 1 intersection with 2 faults. * Corrected first commit, now works for multiple fractures at a same time. * docs update * Allows for flexibility in data mode for fractures * yapf formatting * Better docs readability * Corrected tests with new formalism * TypeAlias cannot be used in prod machines because of old python version * yapf formatting --- docs/geos-mesh.rst | 27 +++--- .../mesh/doctor/checks/generate_fractures.py | 57 +++++++----- .../parsing/generate_fractures_parsing.py | 90 +++++++++++++++---- geos-mesh/tests/test_generate_fractures.py | 38 ++++---- 4 files changed, 145 insertions(+), 67 deletions(-) diff --git a/docs/geos-mesh.rst b/docs/geos-mesh.rst index cbdea0e..82b8551 100644 --- a/docs/geos-mesh.rst +++ b/docs/geos-mesh.rst @@ -185,27 +185,26 @@ The ``generate_fractures`` module will split the mesh and generate the multi-blo .. code-block:: $ python src/geos/mesh/doctor/mesh_doctor.py generate_fractures --help - usage: mesh_doctor.py generate_fractures [-h] --policy field, internal_surfaces [--name NAME] [--values VALUES] - --output OUTPUT [--data-mode binary, ascii] --fracture-output - FRACTURE_OUTPUT [--fracture-data-mode binary, ascii] + usage: mesh_doctor.py generate_fractures [-h] --policy field, internal_surfaces [--name NAME] [--values VALUES] --output OUTPUT + [--data-mode binary, ascii] [--fractures_output_dir FRACTURES_OUTPUT_DIR] options: -h, --help show this help message and exit --policy field, internal_surfaces - [string]: The criterion to define the surfaces that will be changed into fracture zones. - Possible values are "field, internal_surfaces" - --name NAME [string]: If the "field" policy is selected, defines which field will be considered to - define the fractures. If the "internal_surfaces" policy is selected, defines the name of - the attribute will be considered to identify the fractures. - --values VALUES [list of comma separated integers]: If the "field" policy is selected, which changes of - the field will be considered as a fracture. If the "internal_surfaces" policy is - selected, list of the fracture attributes. + [string]: The criterion to define the surfaces that will be changed into fracture zones. Possible values are "field, internal_surfaces" + --name NAME [string]: If the "field" policy is selected, defines which field will be considered to define the fractures. + If the "internal_surfaces" policy is selected, defines the name of the attribute will be considered to identify the fractures. + --values VALUES [list of comma separated integers]: If the "field" policy is selected, which changes of the field will be considered as a fracture. + If the "internal_surfaces" policy is selected, list of the fracture attributes. + You can create multiple fractures by separating the values with ':' like shown in this example. + --values 10,12:13,14,16,18:22 will create 3 fractures identified respectively with the values (10,12), (13,14,16,18) and (22). + If no ':' is found, all values specified will be assumed to create only 1 single fracture. --output OUTPUT [string]: The vtk output file destination. --data-mode binary, ascii [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. - --fracture-output FRACTURE_OUTPUT - [string]: The vtk output file destination. - --fracture-data-mode binary, ascii + --fractures_output_dir FRACTURES_OUTPUT_DIR + [string]: The output directory for the fractures meshes that will be generated from the mesh. + --fractures_data_mode FRACTURES_DATA_MODE [string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary. ``generate_global_ids`` diff --git a/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py b/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py index 76eaf56..9928d12 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from enum import Enum from tqdm import tqdm -from typing import Collection, Iterable, Mapping, Optional, Sequence, TypeAlias +from typing import Collection, Iterable, Mapping, Optional, Sequence from vtk import vtkDataArray from vtkmodules.vtkCommonCore import vtkIdList, vtkPoints from vtkmodules.vtkCommonDataModel import ( vtkCell, vtkCellArray, vtkPolygon, vtkUnstructuredGrid, VTK_POLYGON, @@ -16,11 +16,12 @@ has_invalid_field ) from geos.mesh.doctor.checks.vtk_polyhedron import FaceStream """ -TypeAliases used in this file +TypeAliases cannot be used with Python 3.9. A simple assignment like described there will be used: +https://docs.python.org/3/library/typing.html#typing.TypeAlias:~:text=through%20simple%20assignment%3A-,Vector%20%3D%20list%5Bfloat%5D,-Or%20marked%20with """ -IDMapping: TypeAlias = Mapping[ int, int ] -CellsPointsCoords: TypeAlias = dict[ int, list[ tuple[ float ] ] ] -Coordinates3D: TypeAlias = tuple[ float ] +IDMapping = Mapping[ int, int ] +CellsPointsCoords = dict[ int, list[ tuple[ float ] ] ] +Coordinates3D = tuple[ float ] class FracturePolicy( Enum ): @@ -32,9 +33,10 @@ class FracturePolicy( Enum ): class Options: policy: FracturePolicy field: str - field_values: frozenset[ int ] - vtk_output: VtkOutput - vtk_fracture_output: VtkOutput + field_values_combined: frozenset[ int ] + field_values_per_fracture: list[ frozenset[ int ] ] + mesh_VtkOutput: VtkOutput + all_fractures_VtkOutput: list[ VtkOutput ] @dataclass( frozen=True ) @@ -127,9 +129,15 @@ def __build_fracture_info_from_internal_surfaces( mesh: vtkUnstructuredGrid, f: return FractureInfo( node_to_cells=node_to_cells, face_nodes=face_nodes ) -def build_fracture_info( mesh: vtkUnstructuredGrid, options: Options ) -> FractureInfo: +def build_fracture_info( mesh: vtkUnstructuredGrid, + options: Options, + combined_fractures: bool, + fracture_id: int = 0 ) -> FractureInfo: field = options.field - field_values = options.field_values + if combined_fractures: + field_values = options.field_values_combined + else: + field_values = options.field_values_per_fracture[ fracture_id ] cell_data = mesh.GetCellData() if cell_data.HasArray( field ): f = vtk_to_numpy( cell_data.GetArray( field ) ) @@ -538,21 +546,30 @@ def __generate_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_info: Frac return fracture_mesh -def __split_mesh_on_fracture( mesh: vtkUnstructuredGrid, - options: Options ) -> tuple[ vtkUnstructuredGrid, vtkUnstructuredGrid ]: - fracture: FractureInfo = build_fracture_info( mesh, options ) - cell_to_cell: networkx.Graph = build_cell_to_cell_graph( mesh, fracture ) +def __split_mesh_on_fractures( mesh: vtkUnstructuredGrid, + options: Options ) -> tuple[ vtkUnstructuredGrid, list[ vtkUnstructuredGrid ] ]: + all_fracture_infos: list[ FractureInfo ] = list() + for fracture_id in range( len( options.field_values_per_fracture ) ): + fracture_info: FractureInfo = build_fracture_info( mesh, options, False, fracture_id ) + all_fracture_infos.append( fracture_info ) + combined_fractures: FractureInfo = build_fracture_info( mesh, options, True ) + cell_to_cell: networkx.Graph = build_cell_to_cell_graph( mesh, combined_fractures ) cell_to_node_mapping: Mapping[ int, IDMapping ] = __identify_split( mesh.GetNumberOfPoints(), cell_to_cell, - fracture.node_to_cells ) + combined_fractures.node_to_cells ) output_mesh: vtkUnstructuredGrid = __perform_split( mesh, cell_to_node_mapping ) - fractured_mesh: vtkUnstructuredGrid = __generate_fracture_mesh( mesh, fracture, cell_to_node_mapping ) - return output_mesh, fractured_mesh + fracture_meshes: list[ vtkUnstructuredGrid ] = list() + for fracture_info_separated in all_fracture_infos: + fracture_mesh: vtkUnstructuredGrid = __generate_fracture_mesh( mesh, fracture_info_separated, + cell_to_node_mapping ) + fracture_meshes.append( fracture_mesh ) + return ( output_mesh, fracture_meshes ) def __check( mesh, options: Options ) -> Result: - output_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) - write_mesh( output_mesh, options.vtk_output ) - write_mesh( fracture_mesh, options.vtk_fracture_output ) + output_mesh, fracture_meshes = __split_mesh_on_fractures( mesh, options ) + write_mesh( output_mesh, options.mesh_VtkOutput ) + for i, fracture_mesh in enumerate( fracture_meshes ): + write_mesh( fracture_mesh, options.all_fractures_VtkOutput[ i ] ) # TODO provide statistics about what was actually performed (size of the fracture, number of split nodes...). return Result( info="OK" ) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py index 9d94b89..619a4a8 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py @@ -1,7 +1,7 @@ import logging - +import os from geos.mesh.doctor.checks.generate_fractures import Options, Result, FracturePolicy - +from geos.mesh.doctor.checks.vtk_utils import VtkOutput from . import vtk_output_parsing, GENERATE_FRACTURES __POLICY = "policy" @@ -12,7 +12,8 @@ __FIELD_NAME = "name" __FIELD_VALUES = "values" -__FRACTURE_PREFIX = "fracture" +__FRACTURES_OUTPUT_DIR = "fractures_output_dir" +__FRACTURES_DATA_MODE = "fractures_data_mode" def convert_to_fracture_policy( s: str ) -> FracturePolicy: @@ -30,8 +31,7 @@ def convert_to_fracture_policy( s: str ) -> FracturePolicy: def fill_subparser( subparsers ) -> None: - p = subparsers.add_parser( GENERATE_FRACTURES, - help="Splits the mesh to generate the faults and fractures. [EXPERIMENTAL]" ) + p = subparsers.add_parser( GENERATE_FRACTURES, help="Splits the mesh to generate the faults and fractures." ) p.add_argument( '--' + __POLICY, type=convert_to_fracture_policy, metavar=", ".join( __POLICIES ), @@ -43,30 +43,86 @@ def fill_subparser( subparsers ) -> None: type=str, help= f"[string]: If the \"{__FIELD_POLICY}\" {__POLICY} is selected, defines which field will be considered to define the fractures. " - f"If the \"{__INTERNAL_SURFACES_POLICY}\" {__POLICY} is selected, defines the name of the attribute will be considered to identify the fractures. " + f"If the \"{__INTERNAL_SURFACES_POLICY}\" {__POLICY} is selected, defines the name of the attribute will be considered to identify the fractures." ) p.add_argument( '--' + __FIELD_VALUES, type=str, help= - f"[list of comma separated integers]: If the \"{__FIELD_POLICY}\" {__POLICY} is selected, which changes of the field will be considered as a fracture. If the \"{__INTERNAL_SURFACES_POLICY}\" {__POLICY} is selected, list of the fracture attributes." - ) + f"[list of comma separated integers]: If the \"{__FIELD_POLICY}\" {__POLICY} is selected, which changes of the field will be considered " + f"as a fracture. If the \"{__INTERNAL_SURFACES_POLICY}\" {__POLICY} is selected, list of the fracture attributes. " + f"You can create multiple fractures by separating the values with ':' like shown in this example. " + f"--{__FIELD_VALUES} 10,12:13,14,16,18:22 will create 3 fractures identified respectively with the values (10,12), (13,14,16,18) and (22). " + f"If no ':' is found, all values specified will be assumed to create only 1 single fracture." ) vtk_output_parsing.fill_vtk_output_subparser( p ) - vtk_output_parsing.fill_vtk_output_subparser( p, prefix=__FRACTURE_PREFIX ) + p.add_argument( + '--' + __FRACTURES_OUTPUT_DIR, + type=str, + help=f"[string]: The output directory for the fractures meshes that will be generated from the mesh." ) + p.add_argument( + '--' + __FRACTURES_DATA_MODE, + type=str, + help=f'[string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary.' ) def convert( parsed_options ) -> Options: - policy = parsed_options[ __POLICY ] - field = parsed_options[ __FIELD_NAME ] - field_values = frozenset( map( int, parsed_options[ __FIELD_VALUES ].split( "," ) ) ) - vtk_output = vtk_output_parsing.convert( parsed_options ) - vtk_fracture_output = vtk_output_parsing.convert( parsed_options, prefix=__FRACTURE_PREFIX ) + policy: str = parsed_options[ __POLICY ] + field: str = parsed_options[ __FIELD_NAME ] + all_values: str = parsed_options[ __FIELD_VALUES ] + if not are_values_parsable( all_values ): + raise ValueError( + f"When entering --{__FIELD_VALUES}, respect this given format example:\n--{__FIELD_VALUES} " + + "10,12:13,14,16,18:22 to create 3 fractures identified with respectively the values (10,12), (13,14,16,18) and (22)." + ) + all_values_no_separator: str = all_values.replace( ":", "," ) + field_values_combined: frozenset[ int ] = frozenset( map( int, all_values_no_separator.split( "," ) ) ) + mesh_vtk_output = vtk_output_parsing.convert( parsed_options ) + # create the different fractures + per_fracture: list[ str ] = all_values.split( ":" ) + field_values_per_fracture: list[ frozenset[ int ] ] = [ + frozenset( map( int, fracture.split( "," ) ) ) for fracture in per_fracture + ] + fracture_names: list[ str ] = [ "fracture_" + frac.replace( ",", "_" ) + ".vtu" for frac in per_fracture ] + fractures_output_dir: str = parsed_options[ __FRACTURES_OUTPUT_DIR ] + fractures_data_mode: str = parsed_options[ __FRACTURES_DATA_MODE ] + all_fractures_VtkOutput: list[ VtkOutput ] = build_all_fractures_VtkOutput( fractures_output_dir, + fractures_data_mode, mesh_vtk_output, + fracture_names ) return Options( policy=policy, field=field, - field_values=field_values, - vtk_output=vtk_output, - vtk_fracture_output=vtk_fracture_output ) + field_values_combined=field_values_combined, + field_values_per_fracture=field_values_per_fracture, + mesh_VtkOutput=mesh_vtk_output, + all_fractures_VtkOutput=all_fractures_VtkOutput ) def display_results( options: Options, result: Result ): pass + + +def are_values_parsable( values: str ) -> bool: + if not all( character.isdigit() or character in { ':', ',' } for character in values ): + return False + if values.startswith( ":" ) or values.startswith( "," ): + return False + if values.endswith( ":" ) or values.endswith( "," ): + return False + return True + + +def build_all_fractures_VtkOutput( fracture_output_dir: str, fractures_data_mode: str, mesh_vtk_output: VtkOutput, + fracture_names: list[ str ] ) -> list[ VtkOutput ]: + if not os.path.exists( fracture_output_dir ): + raise ValueError( f"The --{__FRACTURES_OUTPUT_DIR} given directory does not exist." ) + + if not os.access( fracture_output_dir, os.W_OK ): + raise ValueError( f"The --{__FRACTURES_OUTPUT_DIR} given directory is not writable." ) + + output_name = os.path.basename( mesh_vtk_output.output ) + splitted_name_without_expension: list[ str ] = output_name.split( "." )[ :-1 ] + name_without_extension: str = '_'.join( splitted_name_without_expension ) + "_" + all_fractures_VtkOuput: list[ VtkOutput ] = list() + for fracture_name in fracture_names: + fracture_path = os.path.join( fracture_output_dir, name_without_extension + fracture_name ) + all_fractures_VtkOuput.append( VtkOutput( fracture_path, fractures_data_mode ) ) + return all_fractures_VtkOuput diff --git a/geos-mesh/tests/test_generate_fractures.py b/geos-mesh/tests/test_generate_fractures.py index 30e0e7d..2592c17 100644 --- a/geos-mesh/tests/test_generate_fractures.py +++ b/geos-mesh/tests/test_generate_fractures.py @@ -2,19 +2,17 @@ import numpy import pytest from dataclasses import dataclass -from typing import Iterable, Iterator, Sequence, TypeAlias +from typing import Iterable, Iterator, Sequence from vtkmodules.vtkCommonDataModel import ( vtkUnstructuredGrid, vtkQuad, VTK_HEXAHEDRON, VTK_POLYHEDRON, VTK_QUAD ) from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy from geos.mesh.doctor.checks.vtk_utils import to_vtk_id_list from geos.mesh.doctor.checks.check_fractures import format_collocated_nodes from geos.mesh.doctor.checks.generate_cube import build_rectilinear_blocks_mesh, XYZ -from geos.mesh.doctor.checks.generate_fractures import ( __split_mesh_on_fracture, Options, FracturePolicy, +from geos.mesh.doctor.checks.generate_fractures import ( __split_mesh_on_fractures, Options, FracturePolicy, Coordinates3D, IDMapping ) -""" -TypeAliases used in this file -""" -FaceNodesCoords: TypeAlias = tuple[ tuple[ float ] ] -IDMatrix: TypeAlias = Sequence[ Sequence[ int ] ] + +FaceNodesCoords = tuple[ tuple[ float ] ] +IDMatrix = Sequence[ Sequence[ int ] ] @dataclass( frozen=True ) @@ -55,7 +53,12 @@ def __build_test_case( xs: tuple[ numpy.ndarray, numpy.ndarray, numpy.ndarray ], else: fv = frozenset( field_values ) - options = Options( policy=policy, field="attribute", field_values=fv, vtk_output=None, vtk_fracture_output=None ) + options = Options( policy=policy, + field="attribute", + field_values_combined=fv, + field_values_per_fracture=[ fv ], + mesh_VtkOutput=None, + all_fractures_VtkOutput=None ) return mesh, options @@ -200,7 +203,8 @@ def __generate_test_data() -> Iterator[ TestCase ]: @pytest.mark.parametrize( "test_case", __generate_test_data() ) def test_generate_fracture( test_case: TestCase ): - main_mesh, fracture_mesh = __split_mesh_on_fracture( test_case.input_mesh, test_case.options ) + main_mesh, fracture_meshes = __split_mesh_on_fractures( test_case.input_mesh, test_case.options ) + fracture_mesh: vtkUnstructuredGrid = fracture_meshes[ 0 ] assert main_mesh.GetNumberOfPoints() == test_case.result.main_mesh_num_points assert main_mesh.GetNumberOfCells() == test_case.result.main_mesh_num_cells assert fracture_mesh.GetNumberOfPoints() == test_case.result.fracture_mesh_num_points @@ -295,7 +299,7 @@ def add_quad( mesh: vtkUnstructuredGrid, face: FaceNodesCoords ): def test_copy_fields_when_splitting_mesh(): """This test is designed to check the __copy_fields method from generate_fractures, - that will be called when using __split_mesh_on_fracture method from generate_fractures. + that will be called when using __split_mesh_on_fractures method from generate_fractures. """ # Generating the rectilinear grid and its quads on all borders x: numpy.array = numpy.array( [ 0, 1, 2 ] ) @@ -322,10 +326,12 @@ def test_copy_fields_when_splitting_mesh(): # Split the mesh along the fracture surface which is number 9 on TestField options = Options( policy=FracturePolicy.INTERNAL_SURFACES, field="TestField", - field_values=frozenset( map( int, [ "9" ] ) ), - vtk_output=None, - vtk_fracture_output=None ) - main_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) + field_values_combined=frozenset( map( int, [ "9" ] ) ), + field_values_per_fracture=[ frozenset( map( int, [ "9" ] ) ) ], + mesh_VtkOutput=None, + all_fractures_VtkOutput=None ) + main_mesh, fracture_meshes = __split_mesh_on_fractures( mesh, options ) + fracture_mesh: vtkUnstructuredGrid = fracture_meshes[ 0 ] assert main_mesh.GetCellData().GetNumberOfArrays() == 1 assert fracture_mesh.GetCellData().GetNumberOfArrays() == 1 assert main_mesh.GetCellData().GetArrayName( 0 ) == "TestField" @@ -338,7 +344,7 @@ def test_copy_fields_when_splitting_mesh(): # Test for invalid point field name add_simplified_field_for_cells( mesh, "GLOBAL_IDS_POINTS", 1 ) with pytest.raises( ValueError ) as pytest_wrapped_e: - main_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) + main_mesh, fracture_meshes = __split_mesh_on_fractures( mesh, options ) assert pytest_wrapped_e.type == ValueError # Test for invalid cell field name mesh: vtkUnstructuredGrid = build_rectilinear_blocks_mesh( [ xyzs ] ) @@ -350,5 +356,5 @@ def test_copy_fields_when_splitting_mesh(): add_simplified_field_for_cells( mesh, "GLOBAL_IDS_CELLS", 1 ) assert mesh.GetCellData().GetNumberOfArrays() == 2 with pytest.raises( ValueError ) as pytest_wrapped_e: - main_mesh, fracture_mesh = __split_mesh_on_fracture( mesh, options ) + main_mesh, fracture_meshes = __split_mesh_on_fractures( mesh, options ) assert pytest_wrapped_e.type == ValueError From 83e92b61b544211c93f7dd9fc55d62a5c17f196f Mon Sep 17 00:00:00 2001 From: Aleks Novikov Date: Fri, 15 Nov 2024 11:26:55 +0100 Subject: [PATCH 07/10] feat: Python interfaces to geos from Makutu repo (#44) undefined --- pygeos-tools/pyproject.toml | 4 + .../pygeos_tools/utilities/input/GeosxArgs.py | 202 +++ .../geos/pygeos_tools/utilities/input/Xml.py | 120 ++ .../pygeos_tools/utilities/input/__init__.py | 18 + .../utilities/mesh/InternalMesh.py | 144 +++ .../utilities/mesh/VtkFieldSpecifications.py | 197 +++ .../pygeos_tools/utilities/mesh/VtkMesh.py | 632 ++++++++++ .../pygeos_tools/utilities/mesh/__init__.py | 19 + .../pygeos_tools/utilities/model/__init__ .py | 33 + .../utilities/model/utils/VtkModel.py | 1096 +++++++++++++++++ .../utilities/model/utils/vtkUtils.py | 488 ++++++++ .../utilities/solvers/AcousticSolver.py | 398 ++++++ .../utilities/solvers/ElasticSolver.py | 319 +++++ .../utilities/solvers/GeomechanicsSolver.py | 104 ++ .../utilities/solvers/ReservoirSolver.py | 216 ++++ .../pygeos_tools/utilities/solvers/Solver.py | 697 +++++++++++ .../utilities/solvers/WaveSolver.py | 520 ++++++++ .../utilities/solvers/__init__.py | 30 + .../utilities/solvers/utils/solverutils.py | 43 + 19 files changed, 5280 insertions(+) create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/__init__.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/model/__init__ .py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/model/utils/VtkModel.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/model/utils/vtkUtils.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/AcousticSolver.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py create mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py diff --git a/pygeos-tools/pyproject.toml b/pygeos-tools/pyproject.toml index 2a6f5de..24f846f 100644 --- a/pygeos-tools/pyproject.toml +++ b/pygeos-tools/pyproject.toml @@ -22,6 +22,10 @@ dependencies = [ "numpy", "scipy", "mpi4py", + "vtk", + "pyevtk", + "xmltodict", + "h5py", ] [tool.mypy] diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py b/pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py new file mode 100644 index 0000000..f6faf41 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py @@ -0,0 +1,202 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import sys +import argparse + +class GeosxArgs: + """ + Class containing the main argument in command line for GEOSX + + Attributes + ---------- + cmdline : list of str + List corresponding to a splitted GEOSX submission line + options : dict + Dict containing GEOSX main options + """ + def __init__(self, args=sys.argv.copy()): + """ + Parameters + ---------- + args : list of str + List corresponding to a splitted submission line with options + """ + self.cmdline = args + self.options = self.optionsParser() + + + def optionsParser(self, cmdline=[]): + """ + Return a dict with useful geosx options parsed from a list/submission line. + + Parameters + ---------- + cmdline : list of str + List corresponding to a splitted GEOSX submission line + + Returns + -------- + dict : + Dict containing GEOSX main options + """ + if not cmdline: + cmdline = self.cmdline + desc = "Parser of GEOSX command line" + parser = argparse.ArgumentParser("GEOSX command line parser", + allow_abbrev=False, + description=desc) + + parser.add_argument("--input", "-i", "--xml", + type=str, dest="xml", default=None, + help="XML input file") + parser.add_argument("--restart", "-r", + dest="restart", type=str, default=None, + help="Target restart filename") + parser.add_argument("-x-partitions", "-x", + type=int, dest="xpart", default=None, + help="Number of partitions in the x direction.") + parser.add_argument("-y-partitions", "-y", + type=int, dest="ypart", default=None, + help="Number of partitions in the y direction.") + parser.add_argument("-z-partitions", "-z", + type=int, dest="zpart", default=None, + help="Number of partitions in the z direction.") + parser.add_argument("--output", "-o", + type=str, dest="outputdir", default=None, + help="Directory to put the output files") + parser.add_argument("--use-nonblocking", "-b", default=None, + dest="non_blocking", action="store_true", + help="Use non-blocking MPI communication") + parser.add_argument("--name", "-n", + dest="name", default=None, + help="Name of the problem, used for output") + parser.add_argument("--suppress-pinned", "-s", + dest="supp_pinned", action="store_true", default=None, + help="Suppress usage of pinned memory for MPI communication buffers") + parser.add_argument("--timers", "-t", + type=str, dest="timers", default=None, + help="String specifying the type of timer output") + parser.add_argument("--trace-data-migration", + dest="trace_data_mig", action="store_true", default=None, + help="Trace host-device data migration") + parser.add_argument("--pause-for", + dest="pause_for", default=None, + help="Pause geosx for a given number of seconds before starting execution") + + return vars(parser.parse_known_args(cmdline)[0]) + + + def updateArg(self, optionName=None, newValue=None): + """ + Update the GEOSX initialization arguments + + Parameters + ---------- + optionName : str + Name of the option to update + newValue : str + New value for the argument to be updated. + + Returns + ------- + bool : True if the option has been (found and) updated. False otherwise. + """ + if optionName is not None: + if self.options[optionName] != newValue: + self.options.update({optionName: newValue}) + return True + + return False + + + def getCommandLine(self): + """ + Return the command line specific to GEOSX initialization + + Returns + -------- + cl : list of str + List containing all the options requested + """ + cl = [""] + for opt, val in self.options.items(): + if val is not None: + ab = GeosxAbbrevOption().getAbbrv(opt) + cl += [ab] + if not isinstance(self.options[opt], bool): + cl += [str(self.options[opt])] + + return cl + + +class GeosxAbbrevOption: + """ + Class containing GEOSX command line options abbreviations + + Attributes + ---------- + abbrvDict : dict + Dict containing lists of abbreviations for GEOSX command line options + """ + def __init__(self): + self.abbrvDict = {"xml": ("--input", "-i"), + "restart": ("--restart", "r"), + "xpart": ("-x-partitions", "-x"), + "ypart": ("-y-partitions", "-y"), + "zpart": ("-z-partitions", "-z"), + "non_blocking": ("--use-non-blocking", "-b"), + "name": ("--name", "-n"), + "supp_pinned": ("--suppress-pinned"), + "outputdir": ("--output", "-o"), + "timers": ("--timers", "-t"), + "trace_data_mig": ("--trace-data-migration"), + "pause_for": ("--pause-for") + } + + def getAbbrv(self, optionName=None): + """ + Returns the abbreviation corresponding to the requested option + + Parameters + ---------- + optionName : str + Name of the requested option + + Returns + ------- + str : Requested abbreviations + """ + return self.abbrvDict[optionName][-1] + + + def getAllAbbrv(self, optionName=None): + """ + Returns the abbreviation corresponding to the requested option + + Parameters + ---------- + optionName : str + Name of the requested option + + Returns + ------- + list of str : + Requested abbreviations + """ + try: + return self.abbrvDict[optionName] + + except: + return [""] diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py b/pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py new file mode 100644 index 0000000..161fdab --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py @@ -0,0 +1,120 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import os +from xml.etree import cElementTree as ET +import xmltodict +from re import findall +from ..mesh.InternalMesh import InternalMesh +from ..mesh.VtkMesh import VTKMesh + + +class XML(): + def __init__(self, xmlFile): + self.filename = xmlFile + + self.tree = ET.parse(xmlFile) + root = self.tree.getroot() + to_string = ET.tostring(root, method='xml') + + self.outputs = None + + root = xmltodict.parse(to_string, attr_prefix="" ,dict_constructor=dict) + for k, v in root['Problem'].items(): + words = findall('[A-Z][^A-Z]*', k) + words[0] = words[0].lower() + attr = "" + for word in words: + attr += word + setattr(self, attr, v) + + + + def updateSolvers(self, solverName, **kwargs): + root = self.tree.getroot() + solver = root.find("./Solvers/"+solverName) + for k, v in kwargs.items(): + if k in solver.attrib: + solver.set(k, v) + self.solvers[solverName].update({k:str(v)}) + + + def updateMesh(self, **kwargs): + root = self.tree.getroot() + mesh = root.find("./Mesh//") + for k, v in kwargs.items(): + if k in mesh.attrib: + mesh.set(k, v) + self.mesh[mesh.tag].update({k:str(v)}) + + + def updateGeometry(self, boxname, **kwargs): + root = self.tree.getroot() + geometry = root.find("./Geometry//*[@name="+boxname+"]") + + for i in len(self.geometry[geometry.tag]): + box = self.geometry[geometry.tag][i] + if boxname == box["name"]: + break + + for k, v in kwargs.items(): + if k in geometry.attrib: + geometry.set(k, v) + self.geometry[geometry.tag][i].update({k:str(v)}) + + + def getMeshObject(self): + + if "InternalMesh" in self.mesh.keys(): + #Not working properly for now + return InternalMesh(self) + + elif "VTKMesh" in self.mesh.keys(): + vtkFile = self.mesh["VTKMesh"]["file"] + if not os.path.isabs(vtkFile): + vtkFile = os.path.join(os.path.split(self.filename)[0], vtkFile) + return VTKMesh(vtkFile) + + def getAttribute(self, parentElement, attributeTag): + if parentElement == "root": + pElement = self.tree.find(f"./[@{attributeTag}]") + else: + pElement = self.tree.find(f"./*/{parentElement}/[@{attributeTag}]") + + return pElement.get(attributeTag) + + + def getSolverType(self): + return [k for k in self.solvers.keys() if k[0].isupper()] + + + def getSourcesAndReceivers(self): + solverType = self.getSolverType() + if len(solverType) > 1: + pass + else: + src = self.getAttribute(f"{solverType[0]}", "sourceCoordinates") + src = eval(src.replace("{", "[").replace("}", "]")) + + rcv = self.getAttribute(f"{solverType[0]}", "receiverCoordinates") + rcv = eval(rcv.replace("{", "[").replace("}", "]")) + + return src, rcv + + + def exportToXml(self, filename=None): + if filename is None: + self.tree.write(self.filename) + else: + self.tree.write(filename) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py b/pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py new file mode 100644 index 0000000..af3b85b --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py @@ -0,0 +1,18 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +"""Input handle""" + +from .GeosxArgs import GeosxArgs, GeosxAbbrevOption +from .Xml import XML \ No newline at end of file diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py new file mode 100644 index 0000000..5019b91 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py @@ -0,0 +1,144 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import numpy as np + + +class InternalMesh: + """ + GEOSX Internal Mesh + + Attributes + ---------- + xml : XML + XML object containing the information on the mesh + bounds : list of list + Real bounds of the mesh [[xmin, xmax],[ymin,ymax],[zmin,zmax]] + nx : int + Number of elements in the x direction + ny : int + Number of elements in the y direction + nz : int + Number of elements in the z direction + order : int + Mesh order + cellBlockNames : str + Names of each mesh block + cellBounds : + elementTypes : + Element types of each mesh block + numberOfCells : int + Total number of cells + numberOfPoints : int + Total number of points + fieldSpecifications : dict + Dict containing the mesh field specifications + """ + def __init__(self, xml): + """ + Parameters + ---------- + xml : XML + XML object containing the information on the mesh + """ + self.xml = xml + + mesh = xml.mesh["InternalMesh"] + elementRegion = xml.elementRegions["CellElementRegion"] + fieldSpecifications = xml.fieldSpecifications + + name = mesh["name"] + + xCoords = list(eval(mesh["xCoords"])) + yCoords = list(eval(mesh["yCoords"])) + zCoords = list(eval(mesh["zCoords"])) + + self.bounds = [[xCoords[0], xCoords[-1]], [yCoords[0], yCoords[-1]], [zCoords[0], zCoords[-1]]] + + + nxStr = mesh["nx"].strip('').replace('{','').replace('}','').split(',') + nyStr = mesh["ny"].strip('').replace('{','').replace('}','').split(',') + nzStr = mesh["nz"].strip('').replace('{','').replace('}','').split(',') + + nx = [eval(nx) for nx in nxStr] + ny = [eval(ny) for ny in nyStr] + nz = [eval(nz) for nz in nzStr] + + self.nx = nx + self.ny = ny + self.nz = nz + + order = 1 + self.order = order + + self.cellBlockNames = mesh["cellBlockNames"].strip('').replace('{','').replace('}','').split(',') + + xlayers = [] + ylayers = [] + zlayers = [] + for i in range(len(nx)): + xlayers.append([xCoords[i], xCoords[i+1]]) + for i in range(len(ny)): + ylayers.append([yCoords[i], yCoords[i+1]]) + for i in range(len(nz)): + zlayers.append([zCoords[i], zCoords[i+1]]) + + self.layers = [xlayers, ylayers, zlayers] + + xCellsBounds = np.zeros(sum(nx)+1) + yCellsBounds = np.zeros(sum(ny)+1) + zCellsBounds = np.zeros(sum(nz)+1) + + for i in range(len(nx)): + xstep = (xlayers[i][1]-xlayers[i][0])/nx[i] + if i == 0: + xCellsBounds[0:nx[i]] = np.arange(xlayers[i][0], xlayers[i][1], xstep) + else : + xCellsBounds[nx[i-1]:sum(nx[0:i+1])] = np.arange(xlayers[i][0], xlayers[i][1], xstep) + xCellsBounds[nx[-1]] = xlayers[i][1] + + for i in range(len(ny)): + ystep = (ylayers[i][1]-ylayers[i][0])/ny[i] + if i == 0: + yCellsBounds[0:ny[i]] = np.arange(ylayers[i][0], ylayers[i][1], ystep) + else : + xCellsBounds[ny[i-1]:sum(ny[0:i+1])] = np.arange(ylayers[i][0], ylayers[i][1], ystep) + yCellsBounds[ny[-1]] = ylayers[i][1] + + for i in range(len(nz)): + zstep = (zlayers[i][1]-zlayers[i][0])/nz[i] + if i == 0: + zCellsBounds[0:nz[i]] = np.arange(zlayers[i][0], zlayers[i][1], zstep) + else : + zCellsBounds[nz[i-1]:sum(nz[0:i+1])] = np.arange(zlayers[i][0], zlayers[i][1], zstep) + zCellsBounds[nz[-1]] = zlayers[i][1] + + self.cellBounds = [xCellsBounds, yCellsBounds, zCellsBounds] + + elementTypes = mesh["elementTypes"].strip('').replace('{','').replace('}','').split(',') + + self.elementTypes=[] + for type in elementTypes: + if type == "C3D8": + self.elementTypes.append("Hexahedron") + else: + self.elementTypes.append(type) + + + self.numberOfCells = sum(nx) * sum(ny) * sum(nz) + self.numberOfPoints = (sum(nx) + 1) * (sum(ny) + 1) * (sum(nz) + 1) + + self.fieldSpecifications = xml.fieldSpecifications + self.isSet = True + diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py new file mode 100644 index 0000000..cba7259 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py @@ -0,0 +1,197 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import copy +import numpy as np +from vtk.util import numpy_support as VN + + +class VTKFieldSpecifications: + """ + Object containing field dataset of a VTK format mesh + + Attributes + ---------- + arrays : dict + Dict containing the {array names : array} of the given dataset + """ + def __init__(self, fieldData): + """ + Parameters + ---------- + fieldData : vtk.vtkFieldData + Data contained in the VTK mesh + """ + self.arrays = {} + assert(fieldData.IsA("vtkFieldData")) + for i in range(fieldData.GetNumberOfArrays()): + value = fieldData.GetArray(i) + key = value.GetName() + self.arrays.update({key: value}) + self.fieldtype = None + + + def hasArray(self, name): + """ + Check if the object contains an array with the requested name + + Parameters + ---------- + name : str + Name of the cell/point data array + + Returns + ------- + bool : True if the array exists. False otherwise + """ + if name in self.arrays.keys(): + return True + else: + return False + + + def getArray(self, name, sorted=False): + """ + Return the requested array from the cell data + + Parameters + ---------- + name : str + Name of the cell/point data array + sorted : bool + Sort with global ids \ + Require the presence of a global ids array + + Returns + ------- + numpy array : requested array + """ + array = None + + if self.hasArray(name): + array = VN.vtk_to_numpy(self.arrays[name]) + + if sorted: + ftype = self.fieldtype.split("Data")[0] + if self.hasArray(f"Global{ftype}Ids"): + gids = self.getCopyArray(f"Global{ftype}Ids") + array = array[np.argsort(gids)] + + return array + + + def getCopyArray(self, name, **kwargs): + """ + Return a copy of the requested array from the cell data + + Parameters + ---------- + name : str + Name of the cell/point data array + + Returns + ------- + numpy array : copy of the requested array + """ + + array = self.getArray(name, **kwargs) + + if array is not None: + array = copy.deepcopy(array) + + return array + + + def getVtkArray(self, name): + """ + Return the vtkDataArray requested + + Parameters + ---------- + name : str + Name of the cell/point data array + + Returns + ------- + numpy array : copy of the requested array + """ + if self.hasArray(name): + return self.arrays[name] + else: + return None + + + def setArray(self, name, value, overwrite=False): + """ + Return a copy of the requested array from the cell data + + Parameters + ---------- + name : str + Name of the cell data array + + Returns + ------- + numpy array : copy of the requested array + """ + if self.hasArray(name) and overwrite == False: + print(f"Warning! \n This dataset already contains a cell data array named {name}. Set the 'overwrite' parameter to True to bypass this warning") + else: + array = VN.vtk_to_numpy(self.arrays[name]) + array[:] = value[:] + + + +class VTKCellSpecifications(VTKFieldSpecifications): + """ + Contains the cell data information from a VTK Mesh + Inherits from VTKFieldSpecifications + """ + def __init__(self, celldata): + """ + Parameters + ---------- + celldata : vtk.vtkCellData + Cell data of the mesh + """ + assert(celldata.IsA("vtkCellData")) + super().__init__(fieldData=celldata) + self.fieldtype = "CellData" + + +class VTKPointSpecifications(VTKFieldSpecifications): + """ + Contains the point data information from a VTK Mesh + Inherits from VTKFieldSpecifications + + Parameters + ---------- + pointdata : vtk.vtkPointData + Point data of the mesh + + Attributes + --------- + arrays : dict + Dict containing the {name, vtkDataArray} of each point data array + """ + def __init__(self, pointdata): + """ + Parameters + ---------- + pointdata : vtk.vtkPointData + Point data of the mesh + """ + assert(pointdata.IsA("vtkPointData")) + super().__init__(fieldData=pointdata) + self.fieldtype = "PointData" diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py new file mode 100644 index 0000000..f89a1e5 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py @@ -0,0 +1,632 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import os +import vtk +from vtk.util import numpy_support as VN + +from .VtkFieldSpecifications import VTKCellSpecifications, VTKPointSpecifications +from ..model.utils.vtkUtils import cGlobalIds + + + +class VTKMesh: + """ + VTK format Mesh. Now handling .vtu .vts .pvts .pvtu + + Attributes + ---------- + meshfile : str + Mesh filename + vtktype : str + Format of the VTK mesh + bounds : tuple of int + Real bounds of the mesh (xmin, xmax, ymin, ymax, zmin, zmax) + numberOfPoints : int + Total number of points of the mesh + numberOfCells : int + Total number of cells of the mesh + isSet : bool + Whether or not the mesh properties have been set + hasLocator : bool + Whether or not the mesh cell locator has been initialized + """ + def __init__(self, meshfile): + """ + Parameters + ---------- + meshfile : str + Mesh filename + """ + self.meshfile = meshfile + self.vtktype = os.path.splitext(self.meshfile)[-1][1:] + + self.bounds = None + self.numberOfPoints = None + self.numberOfCells = None + + self.isSet = False + self.hasLocator = False + + + def getReader(self): + """Return the appropriate reader given the VTK format + + Returns + -------- + vtk.vtkXMLReader + Appropriate reader given the format of the VTK mesh file. + """ + + if self.vtktype == "vtu": + return vtk.vtkXMLUnstructuredGridReader() + elif self.vtktype == "vts": + return vtk.vtkXMLStructuredGridReader() + elif self.vtktype == "pvtu": + return vtk.vtkXMLPUnstructuredGridReader() + elif self.vtktype == "pvts": + return vtk.vtkXMLPStructuredGridReader() + else: + print("This VTK format is not handled.") + return None + + + def read(self): + """Read information from the VTK file + + Returns + -------- + vtk.vtkDataObject + General representation of VTK mesh data + """ + reader = self.getReader() + reader.SetFileName(self.meshfile) + reader.Update() + + return reader.GetOutput() + + + def setMeshProperties(self): + """Read and set as attributes the bounds, number of points and cells""" + data = self.read() + + self.bounds = data.GetBounds() + self.numberOfPoints = data.GetNumberOfPoints() + self.numberOfCells = data.GetNumberOfCells() + + self.isSet = True + + + def getBounds(self): + """ + Get the bounds of the mesh in the format: + (xmin, xmax, ymin, ymax, zmin, zmax) + + Returns + ------- + tuple or None + Bounds of the mesh + """ + return self.bounds + + + def getNumberOfPoints(self): + """ + Get the total number of points of the mesh + + Returns + ------- + int + Number of points + """ + return self.numberOfPoints + + + def getNumberOfCells(self): + """ + Get the total number of cells of the mesh + + Returns + ------- + int + Number of cells + """ + return self.numberOfCells + + + def getCellData(self): + """Read the cell data + + Returns + -------- + VTKCellSpecifications + Cell data information + """ + data = self.read() + + return VTKCellSpecifications(data.GetCellData()) + + + def getPointData(self): + """Read the point data + + Returns + -------- + VTKPointSpecifications + Point data information + """ + data = self.read() + + return VTKPointSpecifications(data.GetPointData()) + + + def getArray(self, name, dtype="cell", copy=False, sorted=False): + """ + Return a cell or point data array. If the file is a pvtu, the array is sorted with global ids + + Parameters + ----------- + name : str + Name of the vtk cell/point data array + dtype : str + Type of vtk data \ + `cell` or `point` + copy : bool + Return a copy of the requested array + Default is False + sorted : bool + Return the array sorted with respect to GlobalPointIds or GlobalCellIds + Default is False + + Returns + -------- + array : numpy array + Requested array + """ + assert dtype.lower() in ("cell", "point") + + if dtype.lower() == "cell": + fdata = self.getCellData() + else: + fdata = self.getPointData() + + if copy: + array = fdata.getCopyArray(name, sorted=sorted) + else: + array = fdata.getArray(name, sorted=sorted) + + return array + + + def extractMesh(self, center, srootname, dist=[None, None, None], comm=None, export=True): + """ + Extract a rectangular submesh such that for each axis we have the subax: [center-dist, center+dist] + + Parameters + --------- + center : 3d float + Requested center of the subbox + srootname : str + Submesh root filename + dist : 3d float + Distance to the center in each direction + comm : MPI.COMM_WORLD + MPI communicator + + Returns + ------- + VTKMesh + Submesh extracted + """ + assert self.vtktype in ("vtu", "pvtu", "vts", "pvts") + vtktype = self.vtktype[-3:] + sfilename = ".".join((srootname, vtktype)) + + if comm is None or comm.Get_rank() == 0: + if not self.isSet: + self.setMeshProperties() + minpos = [] + maxpos = [] + + for i in range(3): + xmin, d = self.getSubAx(center[i], dist[i], ax=i+1) + minpos.append(xmin) + maxpos.append(xmin+d) + + data = self.read() + submesh = VTKSubMesh(sfilename, data, minpos, maxpos, create=export) + + else: + submesh = VTKMesh(sfilename) + + # if file creation, wait for rank 0 to finish + if export: + info = "Done" + comm.bcast(info, root=0) + + return submesh + + + def getSubAx(self, center, dist, ax): + """ + Return the min and max positions in the mesh given the center, distance and ax considered. If the 2*distance if greater than the bounds, the min/max is the corresponding mesh bound. + + Parameters + ---------- + center : float + Central position considered + dist : float + Max distance requested + ax : int + Ax to consider (1, 2, 3) + + Returns + ------- + min, max : float + Min and Max positions + """ + assert(type(ax) == int) + + bounds = [self.bounds[(ax-1)*2], self.bounds[ax*2-1]] + + if dist is not None: + dist = abs(dist) + ox = max(bounds[0], center-dist) + x = min(bounds[1]-ox, 2*dist) + else: + ox = bounds[0] + x = bounds[1] + + return ox, x + + + def getNumberOfBlocks(self): + """Return the number of blocks of a mesh.""" + if self.vtktype in ["pvtu", "pvts"]: + with open(self.meshfile) as ff: + nb = 0 + for line in ff: + m = line.split() + if m[0] == ' high are set to high + comm : MPI_COMM + MPI communicators + """ + root = 0 + rank = comm.Get_rank() + size = comm.Get_size() + + x = None + if rank == root: + with h5py.File( filename, 'r' ) as f: + x = f["velocity"][:] + + imin = np.where( x < low )[0] + imax = np.where( x > high )[0] + x[imin] = low + x[imax] = high + + if self.model == "1/c2": + x = np.sqrt(1/x) + elif self.model == "1/c": + x = 1/x + elif self.model == "c": + pass + else: + raise ValueError("Not implemented") + + startModel = self.bcastField( x, comm ) + + self.updateVelocityModel( startModel ) + + + def updateVelocityModel(self, vel, **kwargs): + """ + Update velocity value in GEOS + + Parameters + ---------- + vel : array + Values for velocity field + """ + super().updateVelocityModel(vel, velocityName="acousticVelocity", **kwargs) + + + def setConstantVelocityModel(self, vel, velFieldName="acousticVelocity", **kwargs): + """ + Update velocity value in GEOS, using a constant value. + + Parameters + ---------- + vel : float + Value for velocity field + """ + prefix = self._getPrefixPath(**kwargs) + + velocity = self.solver.get_wrapper(prefix+velFieldName).value() + velocity.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + velocity.to_numpy()[:] = vel + + + def getVelocityModel(self, filterGhost=False, **kwargs): + """ + Get the velocity values + + Parameters + ----------- + filterGhost : bool + Filter the ghost ranks + + Returns + ------- + velocity : numpy array + Velocity values + """ + velocity = super().getVelocityModel(velocityName="acousticVelocity", filterGhost=filterGhost, **kwargs) + + return velocity + + + def updateDensityModel(self, density, **kwargs): + """ + Update density values in GEOS + + Parameters + ----------- + density : array + New values for the density + """ + super().updateDensityModel( density=density, densityName="acousticDensity", **kwargs ) + + + def getGradient(self, filterGhost=False, **kwargs): + """Get the local rank gradient value + + Returns + -------- + partialGradient : Numpy Array + Array containing the element id list for the local rank + """ + partialGradient = self.getField("partialGradient", **kwargs) + + if filterGhost: + partialGradient = self.filterGhostRank(partialGradient, **kwargs) + + return partialGradient + + + def computePartialGradient( self, shotId, minDepth, comm, gradDirectory="partialGradient", **kwargs ): + """Compute the partial Gradient + + Parameters + ----------- + shotId : string + Number of the shot as string + minDepth : float + Depth at which gradient values are kept, otherwise it is set to 0. + NOTE : this is done in this routine to avoid storage \ + of elementCenter coordinates in the .hdf5 \ + but might be problem for WolfeConditions later on \ + if minDepth is too large + comm : MPI_COMM + MPI communicators + gradDirectory : str, optional + Partial gradient directory \ + Default is `partialGradient` + """ + rank = comm.Get_rank() + size = comm.Get_size() + root = 0 + + # Get local gradient + grad = self.getGradient( **kwargs ) + if self.model == "1/c2": + x = self.getVelocityModel( **kwargs ) + grad = -( x * x * x / 2 ) * grad + elif self.model == "1/c": + x = self.getVelocityModel( filterGhost=True, **kwargs ) + grad = - x * x * grad + elif self.model == "c": + pass + else: + raise ValueError("Not implemented") + + grad = grad.astype( np.float64 ) + + zind = np.where(self.getElementCenter(**kwargs)[:,2] < minDepth)[0] + grad[zind] = 0.0 + + # Gather global gradient + gradFull, ntot = self.gatherField( field=grad, comm=comm, root=root ) + + if rank == root: + os.makedirs(gradDirectory, exist_ok=True) + + with h5py.File(f"{gradDirectory}/partialGradient_"+shotId+".hdf5", 'w') as h5p: + + h5p.create_dataset("partialGradient", data=np.zeros(ntot), chunks=True, maxshape=(ntot,)) + h5p["partialGradient"][:] = self.dtWaveField * gradFull + + shutil.move(f"{gradDirectory}/partialGradient_"+shotId+".hdf5", f"{gradDirectory}/partialGradient_ready_"+shotId+".hdf5") + + comm.Barrier() + + + def getPressureAtReceivers(self): + """ + Get the pressure values at receivers coordinates + + Returns + ------ + numpy Array : Array containing the pressure values at all time step at all receivers coordinates + """ + pressureAtReceivers = self.solver.get_wrapper("pressureNp1AtReceivers").value() + + return pressureAtReceivers.to_numpy() + + + def getFullPressureAtReceivers(self, comm): + """Return all pressures at receivers values on all ranks + Note that for a too large 2d array this may not work. + + Parameters: + ----------- + comm : MPI_COMM + MPI communicators + """ + rank = comm.Get_rank() + + allPressure = comm.gather(self.getPressureAtReceivers(), root=0) + pressure = np.zeros(self.getPressureAtReceivers().shape) + + if rank == 0: + for p in allPressure: + for i in range(p.shape[1]): + if any(p[1:, i])==True: + pressure[:, i] = p[:, i] + + pressure = comm.bcast(pressure, root=0) + + return pressure + + + def resetWaveField(self, **kwargs): + """Reinitialize all pressure values on the Wavefield to zero in GEOSX""" + + self.geosx.get_wrapper("Solvers/"+self.name+"/indexSeismoTrace").value()[0] = 0 + meshName = self._getMeshName() + discretization = self._getDiscretization() + + nodeManagerPath = f"domain/MeshBodies/{meshName}/meshLevels/{discretization}/nodeManager/" + + + if self.type == "AcousticSEM": + for ts in ("nm1", "n", "np1"): + pressure = self.geosx.get_wrapper(nodeManagerPath + f"pressure_{ts}").value() + pressure.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + pressure.to_numpy()[:] = 0.0 + + elif self.type == "AcousticFirstOrderSEM": + pressure_np1 = self.geosx.get_wrapper(nodeManagerPath + "pressure_np1").value() + pressure_np1.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + pressure_np1.to_numpy()[:] = 0.0 + + prefix = self._getPrefixPath(**kwargs) + for component in ("x", "y", "z"): + velocity = self.geosx.get_wrapper(prefix + f"velocity_{component}").value() + velocity.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + velocity.to_numpy()[:] = 0.0 + + + def resetPressureAtReceivers(self): + """Reinitialize pressure values at receivers to 0 + """ + pressure = self.solver.get_wrapper("pressureNp1AtReceivers").value() + pressure.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + pressure.to_numpy()[:] = 0.0 + + + def getWaveField(self): + return self.getPressureAtReceivers()[:,:-1] + + + def getFullWaveFieldAtReceivers(self, comm): + return self.getFullPressureAtReceivers(comm)[:,:-1] diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py new file mode 100644 index 0000000..34acbc7 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py @@ -0,0 +1,319 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import numpy as np +import ctypes as ct + +import pygeosx + +from .WaveSolver import WaveSolver + + +class ElasticSolver(WaveSolver): + """ + ElasticSolver Object containing all methods to run ElasticSEM simulation with GEOSX + + Attributes + ----------- + dt : float + Time step for simulation + minTime : float + Min time to consider + maxTime : float + End time to consider + dtSeismo : float + Time step to save pressure for seismic trace + minTimeSim : float + Starting time of simulation + maxTimeSim : float + End Time of simulation + dtWaveField : float + Time step to save fields + sourceType : str + Type of source + sourceFreq : float + Frequency of the source + name : str + Solver name + type : str + Type of solver + Default is None + geosxArgs : GeosxArgs + Object containing GEOSX launching options + geosx : pygeosx instance + + alreadyInitialized : bool + Flag indicating if the initial conditions have been applied yet + firstGeosxInit : bool + Flag for initialize or reinitialize + collectionTargets : list + Output targets for geosx + hdf5Targets : list + HDF5 output targets for geosx + vtkTargets : list + VTK output targets for geosx + """ + def __init__(self, + dt=None, + minTime=0, + maxTime=None, + dtSeismo=None, + dtWaveField=None, + sourceType=None, + sourceFreq=None, + **kwargs): + """ + Parameters + ---------- + dt : float + Time step for simulation + minTime : float + Starting time of simulation + Default is 0 + maxTime : float + End Time of simulation + dtSeismo : float + Time step to save pressure for seismic trace + dtWaveField : float + Time step to save fields + sourceType : str + Type of source + Default is None + sourceFreq : float + Frequency of the source + Default is None + kwargs : keyword args + geosx_argv : list + GEOSX arguments or command line as a splitted line + """ + super().__init__(dt=dt, + minTime=minTime, + maxTime=maxTime, + dtSeismo=dtSeismo, + dtWaveField=dtWaveField, + sourceType=sourceType, + sourceFreq=sourceFreq, + **kwargs) + + def initialize(self, rank=0, xml=None): + super().initialize(rank, xml) + try: + useDAS = self.xml.getAttribute(parentElement=self._getType(), attributeTag="useDAS") + + except AttributeError: + useDAS = None + + if useDAS == "none": + try: + linearGEO = bool(self.xml.getAttribute(self._getType(), "linearDASGeometry")) + except AttributeError: + linearGEO = False + + if linearGEO is True: + self.useDAS = True + + + def __repr__(self): + string_list = [] + string_list.append("Solver type : " + type(self).__name__ + "\n") + string_list.append("dt : " + str(self.dt) + "\n") + string_list.append("maxTime : " + str(self.maxTime) + "\n") + string_list.append("dtSeismo : " + str(self.dtSeismo) + "\n") + string_list.append("Outputs : " + str(self.hdf5Targets) + "\n" + str(self.vtkTargets) + "\n") + rep = "" + for string in string_list: + rep += string + + return rep + + + def updateVelocityModel(self, vel, component, **kwargs): + """ + Update velocity value in GEOS + + Parameters + ---------- + vel : float/array + Value(s) for velocity field + component : str + Vs or Vp + """ + assert component.lower() in ("vs", "vp"), "Only Vs or Vp component accepted" + super().updateVelocityModel(vel, velocityName="elasticVelocity"+component.title(), **kwargs) + + + def getVelocityModel(self, component, filterGhost=False, **kwargs): + """ + Get the velocity values + + Parameters + ----------- + component : str + Vs or Vp + filterGhost : bool + Filter the ghost ranks + + Returns + ------- + velocity : numpy array + Array containing the velocity values + """ + assert component.lower() in ("vs", "vp"), "Only Vs or Vp component accepted" + + velocity = super().getVelocityModel(velocityName="elasticVelocity"+component.title(), filterGhost=filterGhost, **kwargs) + + return velocity + + + def getDensityModel(self, filterGhost=False, **kwargs): + """ + Get the density values + + Parameters + ----------- + filterGhost : bool + Filter the ghost ranks + + Returns + -------- + density : numpy array + Array containing the density values + """ + density = self.getField("elasticDensity", filterGhost=filterGhost, **kwargs) + + return density + + + def updateDensityModel(self, density, **kwargs): + """ + Update density values in GEOS + + Parameters + ----------- + density : array + New values for the density + """ + super().updateDensityModel( density=density, densityName="elasticDensity", **kwargs ) + + + def getDASSignalAtReceivers(self): + """ + Get the DAS signal values at receivers coordinates + + Returns + -------- + dassignal : numpy array + Array containing the DAS signal values at all time step at all receivers coordinates + """ + if self.type != "ElasticSEM": + raise TypeError(f"DAS signal not implemented for solver of type {self.type}.") + else: + dassignal = self.solver.get_wrapper(f"dasSignalNp1AtReceivers").value().to_numpy() + + return dassignal + + + def getDisplacementAtReceivers(self, component="X"): + """ + Get the displacement values at receivers coordinates for a given direction + + Returns + -------- + displacement : numpy array + Array containing the displacements values at all time step at all receivers coordinates + """ + assert component.upper() in ("X", "Y", "Z") + if self.type == "ElasticFirstOrderSEM": + displacement = self.solver.get_wrapper(f"displacement{component.lower()}Np1AtReceivers").value().to_numpy() + elif self.type == "ElasticSEM": + displacement = self.solver.get_wrapper(f"displacement{component.upper()}Np1AtReceivers").value().to_numpy() + + return displacement + + + def getAllDisplacementAtReceivers(self): + """ + Get the displacement for the x, y and z directions at all time step and all receivers coordinates + + Returns + -------- + displacementX : numpy array + Component X of the displacement + displacementY : numpy array + Component Y of the displacement + displacementZ : numpy array + Component Z of the displacement + """ + displacementX = self.getDisplacementAtReceivers("X") + displacementY = self.getDisplacementAtReceivers("Y") + displacementZ = self.getDisplacementAtReceivers("Z") + + return displacementX, displacementY, displacementZ + + + def resetWaveField(self, **kwargs): + """Reinitialize all displacement values on the Wavefield to zero in GEOSX""" + + self.geosx.get_wrapper("Solvers/"+self.name+"/indexSeismoTrace").value()[0] = 0 + meshName = self._getMeshName() + discretization = self._getDiscretization() + + nodeManagerPath = f"domain/MeshBodies/{meshName}/meshLevels/{discretization}/nodeManager/" + + if self.type == "ElasticSEM": + for component in ("x", "y", "z"): + for ts in ("nm1", "n", "np1"): + displacement = self.geosx.get_wrapper(nodeManagerPath+f"displacement{component}_{ts}").value() + displacement.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + displacement.to_numpy()[:] = 0.0 + + elif self.type == "ElasticFirstOrderSEM": + component = ("x", "y", "z") + for c in component: + displacement_np1 = self.geosx.get_wrapper(nodeManagerPath+f"displacement{c}_np1").value() + displacement_np1.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + displacement_np1.to_numpy()[:] = 0.0 + + prefix = self._getPrefixPath(**kwargs) + for i, c in enumerate(component): + for j in range(i, len(component)): + cc = c + component[j] + + sigma = self.solver.get_wrapper(prefix+f"stresstensor{cc}").value() + sigma.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + sigma.to_numpy()[:] = 0.0 + + + def resetDisplacementAtReceivers(self): + """Reinitialize displacement values at receivers to 0 + """ + for component in ("X", "Y", "Z"): + displacement = self.solver.get_wrapper(f"displacement{component}Np1AtReceivers").value() + displacement.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + displacement.to_numpy()[:] = 0.0 + + + def getWaveField( self ): + if self.useDAS: + return self.getDASSignalAtReceivers() + else: + return self.getAllDisplacementAtReceivers() + + + def getFullWaveFieldAtReceivers(self, comm): + print( "This method is not implemented yet" ) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py new file mode 100644 index 0000000..ebf16fe --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py @@ -0,0 +1,104 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +from .Solver import Solver + + +class GeomechanicsSolver(Solver): + """ + Geomechanics solver object containing methods to run geomechanics simulations in GEOS + + Attributes + ----------- + xml : XML + XML object containing parameters for GEOSX initialization + initialDt : float + Time step for the simulation + maxTime : float + End time of simulation + name : str + Solver name + type : str + Type of solver + Default is None + geosxArgs : GeosxArgs + Object containing GEOSX launching options + geosx : pygeosx.Group + pygeosx initialize instance + alreadyInitialized : bool + Flag indicating if the initial conditions have been applied yet + firstGeosxInit : bool + Flag for initialize or reinitialize + collectionTargets : list + Output targets for geosx + hdf5Targets : list + HDF5 output targets for geosx + vtkTargets : list + VTK output targets for geosx + """ + def __init__(self, + dt=None, + maxTime=None, + **kwargs): + """ + Parameters + ----------- + initialDt : float + Initial time step \ + Default is None + maxTime : float + End time of simulation \ + Default is None + """ + super().__init__(**kwargs) + + self.dt = dt + self.maxTime = maxTime + + + def _setTimeVariables(self): + """Set the time variables attributes""" + # Set up variables from xml + events = self.xml.events + + if self.maxTime is None: + self.maxTime = float(events["maxTime"]) + else: + self.updateTimeVariable("maxTime") + + if self.dt is None: + for event in events["PeriodicEvent"]: + if isinstance(event, dict): + poroName = "poromechanicsSolver" + if event["target"] == "/Solvers/" + poroName: + self.dt = float(event['forceDt']) + else: + if event == "target" and events["PeriodicEvent"]["target"] == "/Solvers/" + self.name: + self.dt = float(events["PeriodicEvent"]["forceDt"]) + else: + self.updateTimeVariable("dt") + + + def updateTimeVariable(self, variable): + """Overwrite a time variable in GEOS""" + if variable == "maxTime": + assert hasattr(self, "maxTime") + self.geosx.get_wrapper("Events/maxTime").value()[0] = self.maxTime + elif variable == "dt": + assert hasattr(self, "dt") + self.geosx.get_wrapper("Events/solverApplications/forceDt").value()[0] + + + def initialize( self, rank=0, xml=None ): + super().initialize( rank, xml, stype="SinglePhasePoromechanics" ) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py new file mode 100644 index 0000000..48f7a43 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py @@ -0,0 +1,216 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import numpy as np + +from .Solver import Solver + + +class ReservoirSolver(Solver): + """ + Reservoir solver object containing methods to run reservoir simulations in GEOS + + Attributes + ----------- + xml : XML + XML object containing parameters for GEOSX initialization + initialDt : float + Time step for the simulation + maxTime : float + End time of simulation + name : str + Solver name + type : str + Type of solver + Default is None + geosxArgs : GeosxArgs + Object containing GEOSX launching options + geosx : pygeosx.Group + pygeosx initialize instance + alreadyInitialized : bool + Flag indicating if the initial conditions have been applied yet + firstGeosxInit : bool + Flag for initialize or reinitialize + collectionTargets : list + Output targets for geosx + hdf5Targets : list + HDF5 output targets for geosx + vtkTargets : list + VTK output targets for geosx + """ + def __init__(self, + initialDt=None, + maxTime=None, + **kwargs): + """ + Parameters + ----------- + initialDt : float + Initial time step \ + Default is None + maxTime : float + End time of simulation \ + Default is None + """ + super().__init__(**kwargs) + + self.initialDt = initialDt + self.maxTime = maxTime + + self.isCoupled = kwargs.get("coupled", False) + self.dtSeismic4D = None + + + def _setTimeVariables(self): + """Set the time variables attributes""" + # Set up variables from xml + events = self.xml.events + outputs = self.xml.outputs + + if self.isCoupled: + if self.dtSeismic4D is None: + for event in events["PeriodicEvent"]: + if event["name"] == "seismic4D": + self.dtSeismic4D = float(event["timeFrequency"]) + else: + self.updateTimeVariable("dtSeismic4D") + + if self.maxTime is None: + self.maxTime = float(events["maxTime"]) + else: + self.updateTimeVariable("maxTime") + + fvm = self.xml.solvers[self.type] + if self.initialDt is None: + for k, v in fvm.items(): + if k == "initialDt": + self.initialDt = np.array(v, dtype=float) + else: + self.updateTimeVariable("initialDt") + + + def updateTimeVariable(self, variable): + """Overwrite a time variable in GEOS""" + if variable == "maxTime": + assert hasattr(self, "maxTime") + self.geosx.get_wrapper("Events/maxTime").value()[0] = self.maxTime + elif variable == "initialDt": + assert hasattr(self, "initialDt") + self.geosx.get_wrapper(f"/Solvers/{self.name}/initialDt").value()[0] = self.initialDt + elif variable == "dtSeismic4D": + assert hasattr(self, "dtSeismic4D") + self.geosx.get_wrapper("Events/seismic4D/timeFrequency").value()[0] = self.dtSeismic4D + + + def execute(self, time): + """ + Do one solver iteration + + Parameters + ---------- + time : float + Current time of simulation + dt : float + Timestep + """ + self.solver.execute(time, self.initialDt) + + + def getTimeStep(self): + """ + Get the value of `initialDt` variable from GEOS + + Returns + -------- + float + initialDt value + """ + return self.solver.get_wrapper("initialDt").value()[0] + + + def updateTimeStep(self): + """Update the attribute value with the one from GEOS""" + self.initialDt = self.getTimeStep() + + + def getPressure(self, **kwargs): + """ + Get the local pressure + + Returns + -------- + pressure : numpy array + Local pressure + """ + pressure = self.getField("pressure", **kwargs) + + return pressure + + + def getDeltaPressure(self, **kwargs): + """ + Get the local delta pressure + + Returns + -------- + deltaPressure : numpy array + Local delta pressure + """ + deltaPressure = self.getField("deltaPressure", **kwargs) + + return deltaPressure + + + def getPhaseVolumeFractionGas(self, **kwargs): + """ + Get the local gas phase volume fraction + + Returns + -------- + phaseVolumeFractionGas : numpy array + Local gas phase volume fraction + """ + phaseVolumeFraction = self.getField("phaseVolumeFraction", **kwargs) + phaseVolumeFractionGas = np.ascontiguousarray(phaseVolumeFraction[:,0]) + + return phaseVolumeFractionGas + + + def getPhaseVolumeFractionWater(self, **kwargs): + """ + Get the local water phase volume fraction + + Returns + -------- + phaseVolumeFractionWater : numpy array + Local water phase volume fraction + """ + phaseVolumeFraction = self.getField("phaseVolumeFraction", **kwargs) + phaseVolumeFractionWater = np.ascontiguousarray(phaseVolumeFraction[:,1]) + + return phaseVolumeFractionWater + + + def getRockPorosity(self, **kwargs): + """ + Get the local rock porosity + + Returns + -------- + rockPorosity : numpy array + Local rock porosity + """ + rockPorosity = self.getField("rockPorosity_referencePorosity", **kwargs) + + return rockPorosity diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py new file mode 100644 index 0000000..a2afd4d --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py @@ -0,0 +1,697 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +import os +import sys +import numpy as np + +import pygeosx +from ..input.Xml import XML +from ..input.GeosxArgs import GeosxArgs + +from mpi4py import MPI + +class Solver: + """ + Solver class containing the main methods of a GEOS solver + """ + + def __init__(self, **kwargs): + self.alreadyInitialized = False + self.type = None + + argv = kwargs.get("geosx_argv", sys.argv) + self.geosxArgs = GeosxArgs(argv) + self.xml = None + + + def initialize(self, rank=0, xml=None, **kwargs): + """Initialization or reinitialization of GEOSX + + Parameters + ---------- + rank : int + Process rank + xml : XML + XML object containing parameters for GEOSX initialization. + Only required if not set in the __init__ OR if different from it + """ + if xml: + self.updateXml(xml) + else: + if self.xml is None: + try: + self.xml = XML(self.geosxArgs.options["xml"]) + except: + raise ValueError("You need to provide a xml input file") + + + if not self.alreadyInitialized: + # if GEOS is UNINITIALIZED + if self.getGEOSState() == 0: + self.geosx = pygeosx.initialize(rank, self.geosxArgs.getCommandLine()) + self.alreadyInitialized = True + + # elif GEOS is INITIALIZED OR READY TO RUN + elif self.getGEOSState() in (1, 2): + self.geosx = pygeosx.reinit(self.geosxArgs.getCommandLine()) + self.alreadyInitialized = True + + # else if COMPLETED state + else: + print(f"The current state of GEOS does not allow for initialization\nCurrent state: {self.getGEOSState()}") + + self.name = self._getName() + stype = kwargs.get( "stype", None ) + self._setSolverGroup( stype ) + + # Check all time variables are set/ set them from xml + self._setTimeVariables() + + # Set the outputs collections/targets + self.__setOutputs() + + + def _setSolverGroup( self, stype=None ): + if stype is None: + if self.name is not None: + if isinstance( self.name, str ): + self.solver = self.geosx.get_group( "/Solvers/" + self.name ) + else: + name = self._getName( stype=stype ) + self.solver = self.geosx.get_group( "/Solvers/" + name ) + + + def getGEOSState(self): + """ + Return the current GEOS state + + Returns + --------- + int + GEOS state + 0 : UNINITIALIZED + 1 : INITIALIZED + 2 : READY TO RUN + 3 : COMPLETED + """ + return pygeosx.getState() + + + def _setTimeVariables(self): + """Initialize the time variables. Specific to each solver""" + pass + + + def __setOutputs(self): + if hasattr(self.xml, "outputs"): + outputs = self.xml.outputs + self.__setOutputTargets(outputs) + + self.collections = [] + self.hdf5Outputs = [] + self.vtkOutputs = [] + + if not outputs == None: + # Set the collections + for target in self.collectionTargets: + self.collections.append(self.geosx.get_group(target)) + + for target in self.hdf5Targets: + self.hdf5Outputs.append(self.geosx.get_group(target)) + + for target in self.vtkTargets: + self.vtkOutputs.append(self.geosx.get_group(target)) + + + def __setOutputTargets(self, outputs): + """ + Set the list of outputs targets + + Parameters + ----------- + outputs : dict + Dictionnary extracted from the xml input file + """ + self.collectionTargets = [] + self.hdf5Targets = [] + self.vtkTargets = [] + + if outputs: + if isinstance(list(outputs.values())[0], list): + if "TimeHistory" in outputs.keys(): + for hdf5 in outputs["TimeHistory"]: + self.collectionTargets.append(hdf5['sources'].strip("{} ")) + self.hdf5Targets.append("Outputs/"+hdf5['name']) + + if "VTK" in outputs.keys(): + for vtk in outputs["VTK"]: + self.vtkTargets.append("Outputs/"+vtk['name']) + + else: + if "TimeHistory" in list(outputs.keys()): + hdf5 = outputs["TimeHistory"] + self.collectionTargets.append(hdf5['sources'].strip("{} ")) + self.hdf5Targets.append("Outputs/"+hdf5['name']) + + if "VTK" in list(outputs.keys()): + vtk = outputs["VTK"] + self.vtkTargets.append("Outputs/"+vtk['name']) + + + def _getType(self): + """ + Set the type of solver given in the xml 'Solvers' block + + Raises + ------- + ValueError : if no solver is provided in the XML + """ + if self.xml is not None: + typesOfSolvers = self.xml.getSolverType() + + if len( typesOfSolvers ) == 1: + self.type = typesOfSolvers[0] + elif len( typesOfSolvers ) > 1: + self.type = typesOfSolvers + else: + raise ValueError("You must provide a Solver in the XML input file") + + + def _getName(self, stype=None): + """ + Get the solver 'name' attribute from the xml + + Returns + ------- + str or list of str + Solver name from the xml \ + List of solvers name if several solvers in xml + """ + if self.xml is not None: + if stype is None: + if self.type is None: + self._getType() + stype = self.type + + # Check only one type of solver + if isinstance(stype, str): + return self.xml.solvers[stype]["name"] + + elif isinstance( stype, list ): + return [ self.xml.solvers[solvertype]["name"] for solvertype in stype ] + + + def _getMeshName(self): + """ + Get the mesh 'name' attribute from the xml + + Returns + ------- + str + Mesh name from the xml + """ + if self.xml is not None: + meshes = [m for m in self.xml.mesh] + if len(meshes) <= 1: + return self.xml.mesh[meshes[0]]["name"] + + + def _getDiscretization(self): + """Get discretization from the XML + + Returns + -------- + discretization : str + Name of the discretization method + """ + if self.xml is not None: + if self.type is None: + self._getType() + + if isinstance(self.type, str): + return self.xml.solvers[self.type]["discretization"] + elif isinstance(self.type, list): + return [self.xml.solvers[solverType]["discretization"] for solverType in self.type] + + + def _getTargetRegion(self): + """ + Get the target region name from the xml + + Returns + ------- + str + target region from the xml + """ + if self.xml is not None: + if self.type is None: + self._getType() + + if isinstance(self.type, str): + targetRegionRaw = self.xml.solvers[self.type]["targetRegions"] + targetRegion = targetRegionRaw.strip("{ }").split(",") + + if len(targetRegion) <= 1: + return targetRegion[0] + + + def _getCellBlock(self): + """ + Get the cell blocks names from the xml + + Returns + ------- + str + cell blocks from the xml + """ + if self.xml is not None: + cellElementRegion = self.xml.elementRegions["CellElementRegion"][0] + cellBlocks = cellElementRegion["cellBlocks"].strip("{ }").split(",") + + if len(cellBlocks) <= 1: + return cellBlocks[0] + + + def reinitSolver(self): + """Reinitialize Solver""" + self.solver.reinit() + + + def applyInitialConditions(self): + """Apply the initial conditions after GEOS (re)initialization""" + if self.getGEOSState() == 1: + pygeosx.apply_initial_conditions() + + + + def finalize(self): + """Terminate GEOSX""" + pygeosx._finalize() + + + def updateXml(self, xml): + """ + Update XML + + Parameters + ----------- + xml : XML + XML object corresponding to GEOSX input + """ + self.xml = xml + + if self.geosxArgs.updateArg("xml", xml.filename): + self.alreadyInitialized = False + + + def updateHdf5OutputsName(self, directory, filenames, reinit=False): + """ + Overwrite GEOSX hdf5 Outputs paths that have been read in the XML. + + Parameters + ---------- + list_of_output : list of str + List of requested output paths + reinit : bool + Perform reinitialization or not. Must be set to True if called after applyInitialConditions() + """ + + if not len(self.hdf5Outputs): + raise ValueError("No HDF5 Outputs specified in XML.") + else: + for i in range(len(filenames)): + os.makedirs(directory, exist_ok=True) + + self.hdf5Outputs[i].setOutputName(os.path.join(directory, filenames[i])) + if reinit: + self.hdf5Outputs[i].reinit() + + + def updateVtkOutputsName(self, directory): + """ + Overwrite GEOSX vtk Outputs paths that have been read in the XML. + + Parameters + ---------- + list_of_output : list of str + List of vtk output paths + reinit : bool + Perform reinitialization or not. Must be set to True if called after applyInitialConditions() + """ + if not len(self.vtkOutputs): + pass + else: + self.vtkOutputs[0].setOutputDir(directory) + + + def execute(self, time): + """ + Do one solver iteration + + Parameters + ---------- + time : float + Current time of simulation + """ + + self.solver.execute(time, self.dt) + + + def cleanup(self, time): + """ + Finalize simulation. Also triggers write of leftover seismogram data + + Parameters + ---------- + time : float + Current time of simulation + """ + self.solver.cleanup(time) + + + def outputVtk(self, time): + """ + Trigger the VTK output + + Parameters + ---------- + time : float + Current time of simulation + """ + for vtkOutput in self.vtkOutputs: + vtkOutput.output(time, self.dt) + + + def _getPrefixPath(self, targetRegion=None, meshName=None, cellBlock=None): + """ + Return the prefix path to get wrappers or fields in GEOS + + Parameters + ----------- + targetRegion : str, optional + Name of the target Region \ + Default value is taken from the xml + meshName : str, optional + Name of the mesh \ + Default value is taken from the xml + cellBlock : str, optional + Name of the cell blocks \ + Default value is taken from the xml + + Returns + ------- + prefix : str + Prefix path + + Raises + ------- + AssertionError : if the variables 'targetRegion', 'meshName' \ + or `cellBlock` have multiple or no values + """ + if targetRegion is None: + targetRegion = self._getTargetRegion() + if meshName is None: + meshName = self._getMeshName() + if cellBlock is None: + cellBlock = self._getCellBlock() + + discretization=self._getDiscretization() + if discretization is None: + discretization="Level0" + assert None not in (targetRegion, meshName, cellBlock, discretization), "No values or multiple values found for `targetRegion`, `meshName` and `cellBlock` arguments" + + prefix = os.path.join("/domain/MeshBodies", meshName, "meshLevels", discretization, "ElementRegions/elementRegionsGroup", targetRegion, "elementSubRegions", cellBlock, "") + return prefix + + + def getField(self, fieldName, **kwargs): + """ + Get the requested field as numpy array + + Parameters + ----------- + fieldName : str + Name of the field in GEOSX + + Returns + ------- + field : np.array + Field requested + """ + prefix = self._getPrefixPath(**kwargs) + field = self.solver.get_wrapper(prefix+fieldName).value() + + return field.to_numpy() + + + def getElementCenter(self, filterGhost=False, **kwargs): + """ + Get element center position as numpy array + + Returns + ------- + elementCenter : array-like + Element center coordinates + """ + elementCenter = self.getField("elementCenter", **kwargs) + + if filterGhost: + elementCenter = self.filterGhostRank(elementCenter, **kwargs) + + return elementCenter + + + def getElementCenterZ(self, **kwargs): + """ + Get the z coordinate of the element center + + Returns + ------- + elementCenterZ : array-like + Element center z coordinates + """ + elementCenter = self.getField("elementCenter", **kwargs) + elementCenterZ = np.ascontiguousarray(elementCenter[:,2]) + + return elementCenterZ + + + def getGhostRank(self, **kwargs): + """ + Get the local ghost ranks + + Returns + ------- + ghostRank : array-like + Local ghost ranks + """ + ghostRank = self.getField("ghostRank", **kwargs) + + return ghostRank + + + def getLocalToGlobalMap(self, filterGhost=False, **kwargs): + """ + Get the local rank element id list + + Returns + ------- + Numpy Array : Array containing the element id list for the local rank + """ + localToGlobalMap = self.getField("localToGlobalMap", **kwargs) + + if filterGhost: + localToGlobalMap = self.filterGhostRank(localToGlobalMap, **kwargs) + + return localToGlobalMap + + + def gatherField(self, field, comm, root=0, **kwargs): + """ + Gather a full GEOS field from all local ranks + + Parameters + ----------- + field : numpy array + Local field + comm : MPI.COMM_WORLD + MPI communicator + root : int + MPI rank used for the gather \ + Default is rank 0 + """ + assert isinstance(root, int) + assert root < comm.Get_size() + + rank = comm.Get_rank() + + ghostRank = self.getGhostRank(**kwargs) + localToGlobalMap = self.getLocalToGlobalMap(**kwargs) + + # Prepare buffer + nlocalElements = ghostRank.shape[0] + nmax = np.zeros( 1 ) + nmax[0] = np.max( localToGlobalMap ) # max global number of elements + + comm.Barrier() + comm.Allreduce( MPI.IN_PLACE, nmax, op=MPI.MAX ) + ntot = round( nmax[0] + 1 ) + + if rank != root: + fullField = None + nrcv = nlocalElements + comm.send(nrcv, dest=root, tag=1) + comm.Send(field, dest=root, tag=2) + comm.Send(ghostRank, dest=root, tag=3) + comm.Send(localToGlobalMap, dest=root, tag=4) + + else: + fullField = np.full( (ntot), fill_value=np.nan ) + jj = np.where( ghostRank < 0 )[0] + fullField[localToGlobalMap[jj]] = field[jj] + + for r in range( comm.Get_size() ): + if r != root: + nrcv = comm.recv(source=r, tag=1) + + fieldRcv = np.zeros(nrcv, dtype=np.float64) + ghostRankRcv = np.zeros(nrcv, dtype=np.int32) + localToGlobalMapRcv = np.zeros(nrcv, dtype=np.int64) + + comm.Recv(fieldRcv, source=r, tag=2) + comm.Recv(ghostRankRcv, source=r, tag=3) + comm.Recv(localToGlobalMapRcv, source=r, tag=4) + + jj = np.where ( ghostRankRcv < 0 )[0] + + fullField[ localToGlobalMapRcv[jj]] = fieldRcv[jj] + comm.Barrier() + return fullField, ntot + + + def bcastField( self, fullField, comm, root=0, **kwargs ): + """ + Broadcast a field to local ranks with GEOS local to global map + + Parameters + ----------- + fullField : numpy array + Full field + comm : MPI.COMM_WORLD + MPI communicator + root : int + MPI rank used for the gather \ + Default is rank 0 + + Returns + -------- + field : numpy array + Local field + """ + rank = comm.Get_rank() + size = comm.Get_size() + + ghostRank = self.getGhostRank( **kwargs ) + localToGlobalMap = self.getLocalToGlobalMap( **kwargs ) + nlocalElements = ghostRank.shape[0] + + field = np.zeros( nlocalElements ) + + if rank == root: + jj = np.where(ghostRank < 0)[0] + field[jj] = fullField[localToGlobalMap[jj]] + + for r in range( size ): + if r != root: + nrcv = comm.recv( source=r, tag=1 ) + fieldRcv = np.zeros( nrcv, dtype=np.float64 ) + ghostRankRcv = np.zeros( nrcv, dtype=np.int32 ) + localToGlobalMapRcv = np.zeros( nrcv, dtype=np.int64 ) + + comm.Recv( ghostRankRcv, r, 3 ) + comm.Recv( localToGlobalMapRcv, r, 4 ) + + jj = np.where(ghostRankRcv < 0)[0] + fieldRcv[jj] = fullField[localToGlobalMapRcv[jj]] + + comm.Send( fieldRcv, dest=r, tag=100+r ) + + else: + nrcv = nlocalElements + comm.send( nrcv, root, 1 ) + comm.Send( ghostRank, root, 3 ) + comm.Send( localToGlobalMap, root, 4 ) + + comm.Recv( field, source=root, tag=100+rank ) + + return field + + + def filterGhostRank(self, field, **kwargs): + """ + Filter the ghost rank from a GEOS field + + Parameters + ----------- + field : numpy array + Field to filter + + Returns + ------- + field : numpy array + Filtered field + """ + ghostRank = self.getGhostRank(**kwargs) + ind = np.where(ghostRank<0)[0] + + return field[ind] + + + def getWrapper(self, path): + """ + Get the GEOS wrapper + + Parameters + ----------- + path : str + GEOS path + + Returns + -------- + Requested wrapper + """ + if hasattr(self, "solver"): + wrapper = self.solver.get_wrapper(path) + + return wrapper + + + def getGroup(self, path): + """ + Get the GEOS group + + Parameters + ----------- + path : str + GEOS path + + Returns + -------- + Group of the path requested + """ + if hasattr(self, "solver"): + group = self.solver.get_group(path) + + return group diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py new file mode 100644 index 0000000..34b1d85 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py @@ -0,0 +1,520 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +from scipy.fftpack import fftfreq, ifft, fft +import numpy as np +import pygeosx + +from .Solver import Solver + +class WaveSolver(Solver): + """ + WaveSolver Object containing methods useful for simulation using wave solvers in GEOS + + Attributes + ----------- + dt : float + Time step for simulation + minTime : float + Min time to consider + maxTime : float + End time to consider + dtSeismo : float + Time step to save pressure for seismic trace + minTimeSim : float + Starting time of simulation + maxTimeSim : float + End Time of simulation + dtWaveField : float + Time step to save fields + sourceType : str + Type of source + sourceFreq : float + Frequency of the source + name : str + Solver name + type : str + Type of solver + Default is None + geosxArgs : GeosxArgs + Object containing GEOSX launching options + geosx : pygeosx instance + + alreadyInitialized : bool + Flag indicating if the initial conditions have been applied yet + firstGeosxInit : bool + Flag for initialize or reinitialize + collectionTargets : list + Output targets for geosx + hdf5Targets : list + HDF5 output targets for geosx + vtkTargets : list + VTK output targets for geosx + """ + def __init__(self, + dt=None, + minTime=0, + maxTime=None, + dtSeismo=None, + dtWaveField=None, + sourceType=None, + sourceFreq=None, + **kwargs): + """ + Parameters + ---------- + dt : float + Time step for simulation + minTime : float + Starting time of simulation + Default is 0 + maxTime : float + End Time of simulation + dtSeismo : float + Time step to save pressure for seismic trace + dtWaveField : float + Time step to save fields + sourceType : str + Type of source + Default is None + sourceFreq : float + Frequency of the source + Default is None + kwargs : keyword args + geosx_argv : list + GEOSX arguments or command line as a splitted line + """ + super().__init__(**kwargs) + + self.name = None + + self.dt = dt + self.minTime = minTime + self.maxTime = maxTime + + self.minTimeSim = minTime + self.maxTimeSim = maxTime + + self.dtSeismo = dtSeismo + + self.sourceType = sourceType + self.sourceFreq = sourceFreq + + self.dtWaveField = dtWaveField + + + def __repr__(self): + string_list = [] + string_list.append("Solver type : " + type(self).__name__ + "\n") + string_list.append("dt : " + str(self.dt) + "\n") + string_list.append("maxTime : " + str(self.maxTime) + "\n") + string_list.append("dtSeismo : " + str(self.dtSeismo) + "\n") + string_list.append("Outputs : " + str(self.hdf5Targets) + "\n" + str(self.vtkTargets) + "\n") + rep = "" + for string in string_list: + rep += string + + return rep + + + def _setTimeVariables(self): + """Initialize the time variables""" + if hasattr(self.xml, "events"): + events = self.xml.events + if self.dt is None: + for event in events["PeriodicEvent"]: + if isinstance(event, dict): + if event["target"] == "/Solvers/" + self.name: + self.dt = float(event['forceDt']) + else: + if event == "target" and events["PeriodicEvent"]["target"] == "/Solvers/" + self.name: + self.dt = float(events["PeriodicEvent"]["forceDt"]) + else: + self.updateTimeVariable("dt") + + if self.maxTime is None: + self.maxTime = float(events["maxTime"]) + self.maxTimeSim = self.maxTime + else: + self.updateTimeVariable("maxTime") + + if self.dtSeismo is None: + if self.type is None: + self._getType() + + solverdict = self.xml.solvers[self.type] + for k, v in solverdict.items(): + if k == "dtSeismoTrace": + self.dtSeismo = float(v) + else: + self.updateTimeVariable("dtSeismo") + + assert None not in (self.dt, self.dtSeismo, self.maxTime) + + + def _setSourceProperties(self): + """Set the frequency and type of source""" + if self.sourceFreq is None: + if self.type is None: + self._getType() + solverdict = self.xml.solvers[self.type] + for k, v in solverdict.items(): + if k == "timeSourceFrequency": + self.sourceFreq = v + + if self.sourceType is None: + if hasattr(self.xml, "events"): + events = self.xml.events + try: + for event in events["PeriodicEvent"]: + if isinstance(event, dict): + if event["target"] == "/Solvers/" + self.name: + self.sourceType = "ricker" + event['rickerOrder'] + else: + if event == "target" and events["PeriodicEvent"]["target"] == "/Solvers/" + self.name: + self.sourceType = "ricker" + events["PeriodicEvent"]["rickerOrder"] + except: + self.sourceType = "ricker2" + + + def initialize(self, rank=0, xml=None): + """ + Initialization or reinitialization of GEOSX + + Parameters + ---------- + rank : int + Process rank + xml : XML + XML object containing parameters for GEOSX initialization. + Only required if not set in the __init__ OR if different from it + """ + super().initialize(rank, xml) + self._setSourceProperties() + + + def updateTimeStep(self, dt): + """ + Update the solver time step + + Parameters + ---------- + dt : double + Time step + """ + self.dt = dt + + + def updateTimeVariable(self, timeVariable): + """ + Overwrite a GEOS time variable + + Parameters + ---------- + timeVariable : str + Name of the time variable to update + """ + if timeVariable == "maxTime": + assert hasattr(self, "maxTime") + self.geosx.get_wrapper("Events/maxTime").value()[0] = self.maxTime + + elif timeVariable == "minTime": + assert hasattr(self, "minTime") + self.geosx.get_wrapper("Events/minTime").value()[0] = self.minTime + + elif timeVariable == "dt": + assert hasattr(self, "dt") + self.geosx.get_wrapper("Events/solverApplications/forceDt").value()[0] = self.dt + + elif timeVariable == "dtSeismo": + assert hasattr(self, "dtSeismo") + self.geosx.get_wrapper("/Solvers/"+self.name+"/dtSeismoTrace").value()[0] = self.dtSeismo + + + def updateSourceFrequency(self, freq): + """ + Overwrite GEOSX source frequency + + Parameters + ---------- + freq : float + Frequency of the source in Hz + """ + self.geosx.get_wrapper("/Solvers/"+self.name+"/timeSourceFrequency").value()[0] = freq + self.sourceFreq = freq + + + + def updateSourceAndReceivers(self, sourcesCoords=[], receiversCoords=[]): + """ + Update sources and receivers positions in GEOS + + Parameters + ---------- + sourcesCoords : list + List of coordinates for the sources + receiversCoords : list + List of coordinates for the receivers + """ + src_pos_geosx = self.solver.get_wrapper( "sourceCoordinates" ).value() + src_pos_geosx.set_access_level( pygeosx.pylvarray.RESIZEABLE ) + + rcv_pos_geosx = self.solver.get_wrapper( "receiverCoordinates" ).value() + rcv_pos_geosx.set_access_level( pygeosx.pylvarray.RESIZEABLE ) + + src_pos_geosx.resize_all(( len( sourcesCoords ), 3 )) + if len( sourcesCoords ) == 0: + src_pos_geosx.to_numpy()[:] = np.zeros(( 0, 3 )) + else: + src_pos_geosx.to_numpy()[:] = sourcesCoords[:] + + + rcv_pos_geosx.resize_all(( len( receiversCoords ), 3 )) + if len( receiversCoords ) == 0: + rcv_pos_geosx.to_numpy()[:] = np.zeros(( 0, 3 )) + else: + rcv_pos_geosx.to_numpy()[:] = receiversCoords[:] + + self.solver.reinit() + + + def evaluateSource(self): + """ + Evaluate source and update on GEOS + Only ricker order {0 - 4} accepted + """ + sourceTypes = ("ricker0", "ricker1", "ricker2", "ricker3", "ricker4") + assert self.sourceType in sourceTypes, f"Only {sourceTypes} are allowed" + + f0 = self.sourceFreq + delay = 1.0 / f0 + alpha = - ( f0 * np.pi )**2 + + nsamples = int( round( ( self.maxTime - self.minTime ) / self.dt )) + 1 + sourceValue = np.zeros(( nsamples, 1 )) + + order = int( self.sourceType[-1] ) + sgn = ( -1 )**( order + 1 ) + + time = self.minTime + for nt in range(nsamples): + + if self.minTime <= - 1.0 / f0: + tmin = - 2.9 / f0 + tmax = 2.9 / f0 + time_d = time + + else: + time_d = time - delay + tmin = 0.0 + tmax = 2.9 / f0 + + if (time > tmin and time < tmax) or ( self.minTime < - 1 / f0 and time == tmin ): + gaussian = np.exp( alpha * time_d**2) + + if order == 0: + sourceValue[nt, 0] = sgn * gaussian + + elif order == 1: + sourceValue[nt, 0] = sgn * ( 2 * alpha * time_d ) * gaussian + + elif order == 2: + sourceValue[nt, 0] = sgn * ( 2 * alpha + 4 * alpha**2 * time_d**2 ) * gaussian + + elif order == 3: + sourceValue[nt, 0] = sgn * ( 12 * alpha**2 * time_d + 8 * alpha**3 * time_d**3 ) * gaussian + + elif order == 4: + sourceValue[nt, 0] = sgn * ( 12 * alpha**2 + 48 * alpha**3 * time_d**2 + 16 * alpha**4 * time_d**4 ) * gaussian + + time += self.dt + + self.updateSourceFrequency(self.sourceFreq) + self.updateSourceValue(sourceValue) + self.sourceValue = sourceValue + + + def updateSourceValue(self, value): + """ + Update the value of the source in GEOS + + Parameters + ---------- + value : array/list + List/array containing the value of the source at each time step + """ + src_value = self.solver.get_wrapper("sourceValue").value() + src_value.set_access_level(pygeosx.pylvarray.RESIZEABLE) + + src_value.resize_all(value.shape) + src_value.to_numpy()[:] = value[:] + + self.maxTimeSim = ( value.shape[0] - 1 ) * self.dt + self.geosx.get_wrapper("Events/minTime").value()[0] = self.minTime + self.sourceValue = value[:] + + + def filterSource(self, fmax): + """ + Filter the source value and give the value to GEOSX. Note that is can also modify the start and end time of simulation to avoid discontinuity. + + Parameters + ----------- + fmax : float/string + Max frequency of the source wanted. The source then have frequencies in the interval [0,fmax+1] + """ + if str(fmax) == "all": + return + + minTime = self.minTime + maxTime = self.maxTime + dt = self.dt + f0 = self.sourceFreq + + sourceValue = self.sourceValue + + pad = int(round(sourceValue.shape[0]/2)) + n = sourceValue.shape[0] + 2 * pad + + tf = fftfreq(n, dt) + y_fft = np.zeros((n,sourceValue.shape[1]), dtype="complex_") + y = np.zeros(y_fft.shape, dtype="complex_") + + for i in range(y_fft.shape[1]): + y_fft[pad:n-pad,i] = sourceValue[:,i] + y_fft[:,i] = fft(y_fft[:,i])# Perform fourier transform + + isup = np.where(tf>=fmax)[0] + imax = np.where(tf[isup]>=fmax+1)[0][0] + i1 = isup[0] + i2 = isup[imax] + + iinf = np.where(tf<=-fmax)[0] + imin = np.where(tf[iinf]<=-fmax-1)[0][-1] + + i3 = iinf[imin] + i4 = iinf[-1] + + for i in range(y_fft.shape[1]): + y_fft[i1:i2,i] = np.cos((isup[0:imax] - i1)/(i2-i1) * np.pi/2)**2 * y_fft[i1:i2,i] + y_fft[i3:i4,i] = np.cos((iinf[imin:-1] - i4)/(i3-i4) * np.pi/2)**2 * y_fft[i3:i4,i] + y_fft[i2:i3,i] = 0 + + for i in range(y.shape[1]): + y[:,i] = ifft(y_fft[:,i])# Perform inverse fourier transform + + it0 = int(round(abs(minTime/dt))) + pad + d = int(round(1/f0/dt)) + + i1 = max(it0 - 4*d, 0) + i2 = int(round(i1 + d/4)) + + i4 = min(n,n - pad + 4*d) + i3 = int(round(i4 - d/4)) + + for i in range(y.shape[1]): + y[i1:i2,i] = np.cos((np.arange(i1,i2) - i2)/(i2-i1) * np.pi/2)**2 * y[i1:i2,i] + y[i3:i4,i] = np.cos((np.arange(i3,i4) - i3)/(i4-i3) * np.pi/2)**2 * y[i3:i4,i] + y[max(i1-d,0):i1,i] = 0.0 + y[i4:min(i4+d,n),i] = 0.0 + + + t = np.arange(minTime-pad*dt, maxTime+pad*dt+dt/2, dt) + + self.updateSourceValue(np.real(y[max(i1-d,0):min(i4+d,n),:])) + self.minTimeSim = t[max(i1-d,0)] + self.maxTimeSim = t[min(i4+d,n-1)] + self.geosx.get_wrapper("Events/minTime").value()[0] = self.minTimeSim + self.sourceValue = np.real(y[max(i1-d,0):min(i4+d,n),:]) + + + def updateVelocityModel(self, vel, velocityName, **kwargs): + """ + Update velocity value in GEOS + + Parameters + ---------- + vel : float/array + Value(s) for velocity field + velocityName : str + Name of the velocity array in GEOS + """ + prefix = self._getPrefixPath(**kwargs) + + velocity = self.solver.get_wrapper(prefix+velocityName).value() + velocity.set_access_level(pygeosx.pylvarray.MODIFIABLE) + + velocity.to_numpy()[vel>0] = vel[vel>0] + + + def updateDensityModel( self, density, densityName, **kwargs ): + """ + Update density values in GEOS + + Parameters + ----------- + density : array + New values for the density + densityName : str + Name of density array in GEOS + """ + prefix = self._getPrefixPath( **kwargs ) + + d = self.solver.get_wrapper( prefix + densityName ).value() + d.set_access_level( pygeosx.pylvarray.MODIFIABLE ) + + d.to_numpy()[density>0] = density[density>0] + + + def outputWaveField(self, time): + """ + Trigger the wavefield output + + Parameters + ---------- + time : float + Current time of simulation + """ + self.collections[0].collect(time, self.dt) + self.hdf5Outputs[0].output(time, self.dt) + + + def getVelocityModel(self, velocityName, filterGhost=False, **kwargs): + """ + Get the velocity values + + Parameters + ----------- + velocityName : str + Name of velocity array in GEOS + filterGhost : bool + Filter the ghost ranks + + Returns + ------- + Numpy Array : Array containing the velocity values + """ + velocity = self.getField(velocityName, **kwargs) + + if filterGhost: + velocity = self.filterGhostRank(velocity, **kwargs) + + return velocity + + + def getWaveField(self): + pass + + def getWaveFieldAtReceivers(self, comm): + pass diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py new file mode 100644 index 0000000..be13e6d --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py @@ -0,0 +1,30 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +"""Solvers classes""" + +from .Solver import Solver +from .AcousticSolver import AcousticSolver +from .ReservoirSolver import ReservoirSolver +from .ElasticSolver import ElasticSolver +from .WaveSolver import WaveSolver +from .GeomechanicsSolver import GeomechanicsSolver + +from .utils.solverutils import ( + print_group, + print_with_indent, + printGeosx, + printSolver, + printGroup, +) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py new file mode 100644 index 0000000..5c0b518 --- /dev/null +++ b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py @@ -0,0 +1,43 @@ +# ------------------------------------------------------------------------------------------------------------ +# SPDX-License-Identifier: LGPL-2.1-only +# +# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC +# Copyright (c) 2018-2024 TotalEnergies +# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University +# Copyright (c) 2023-2024 Chevron +# Copyright (c) 2019- GEOS/GEOSX Contributors +# Copyright (c) 2019- INRIA project-team Makutu +# All rights reserved +# +# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. +# ------------------------------------------------------------------------------------------------------------ + +def print_group(group, indent=0): + print("{}{}".format(" " * indent, group)) + + indent += 4 + print("{}wrappers:".format(" " * indent)) + + for wrapper in group.wrappers(): + print("{}{}".format(" " * (indent + 4), wrapper)) + print_with_indent(str(wrapper.value(False)), indent + 8) + + print("{}groups:".format(" " * indent)) + + for subgroup in group.groups(): + print_group(subgroup, indent + 4) + + +def print_with_indent(msg, indent): + indent_str = " " * indent + print(indent_str + msg.replace("\n", "\n" + indent_str)) + + +def printGeosx(solver): + print_group(solver.geosx) + +def printSolver(solver): + print_group(solver.solver) + +def printGroup(solver, path): + print_group(solver.solver.get_group(path)) \ No newline at end of file From d2eccce8f0ec6e2aae2870f7ed8504c06ec00432 Mon Sep 17 00:00:00 2001 From: Matteo Cusini <49037133+CusiniM@users.noreply.github.com> Date: Wed, 20 Nov 2024 12:13:28 -0800 Subject: [PATCH 08/10] Revert "feat: Python interfaces to geos from Makutu repo (#44)" (#47) This reverts commit 83e92b61b544211c93f7dd9fc55d62a5c17f196f. --- pygeos-tools/pyproject.toml | 4 - .../pygeos_tools/utilities/input/GeosxArgs.py | 202 --- .../geos/pygeos_tools/utilities/input/Xml.py | 120 -- .../pygeos_tools/utilities/input/__init__.py | 18 - .../utilities/mesh/InternalMesh.py | 144 --- .../utilities/mesh/VtkFieldSpecifications.py | 197 --- .../pygeos_tools/utilities/mesh/VtkMesh.py | 632 ---------- .../pygeos_tools/utilities/mesh/__init__.py | 19 - .../pygeos_tools/utilities/model/__init__ .py | 33 - .../utilities/model/utils/VtkModel.py | 1096 ----------------- .../utilities/model/utils/vtkUtils.py | 488 -------- .../utilities/solvers/AcousticSolver.py | 398 ------ .../utilities/solvers/ElasticSolver.py | 319 ----- .../utilities/solvers/GeomechanicsSolver.py | 104 -- .../utilities/solvers/ReservoirSolver.py | 216 ---- .../pygeos_tools/utilities/solvers/Solver.py | 697 ----------- .../utilities/solvers/WaveSolver.py | 520 -------- .../utilities/solvers/__init__.py | 30 - .../utilities/solvers/utils/solverutils.py | 43 - 19 files changed, 5280 deletions(-) delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/mesh/__init__.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/model/__init__ .py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/model/utils/VtkModel.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/model/utils/vtkUtils.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/AcousticSolver.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py delete mode 100644 pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py diff --git a/pygeos-tools/pyproject.toml b/pygeos-tools/pyproject.toml index 24f846f..2a6f5de 100644 --- a/pygeos-tools/pyproject.toml +++ b/pygeos-tools/pyproject.toml @@ -22,10 +22,6 @@ dependencies = [ "numpy", "scipy", "mpi4py", - "vtk", - "pyevtk", - "xmltodict", - "h5py", ] [tool.mypy] diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py b/pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py deleted file mode 100644 index f6faf41..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/input/GeosxArgs.py +++ /dev/null @@ -1,202 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import sys -import argparse - -class GeosxArgs: - """ - Class containing the main argument in command line for GEOSX - - Attributes - ---------- - cmdline : list of str - List corresponding to a splitted GEOSX submission line - options : dict - Dict containing GEOSX main options - """ - def __init__(self, args=sys.argv.copy()): - """ - Parameters - ---------- - args : list of str - List corresponding to a splitted submission line with options - """ - self.cmdline = args - self.options = self.optionsParser() - - - def optionsParser(self, cmdline=[]): - """ - Return a dict with useful geosx options parsed from a list/submission line. - - Parameters - ---------- - cmdline : list of str - List corresponding to a splitted GEOSX submission line - - Returns - -------- - dict : - Dict containing GEOSX main options - """ - if not cmdline: - cmdline = self.cmdline - desc = "Parser of GEOSX command line" - parser = argparse.ArgumentParser("GEOSX command line parser", - allow_abbrev=False, - description=desc) - - parser.add_argument("--input", "-i", "--xml", - type=str, dest="xml", default=None, - help="XML input file") - parser.add_argument("--restart", "-r", - dest="restart", type=str, default=None, - help="Target restart filename") - parser.add_argument("-x-partitions", "-x", - type=int, dest="xpart", default=None, - help="Number of partitions in the x direction.") - parser.add_argument("-y-partitions", "-y", - type=int, dest="ypart", default=None, - help="Number of partitions in the y direction.") - parser.add_argument("-z-partitions", "-z", - type=int, dest="zpart", default=None, - help="Number of partitions in the z direction.") - parser.add_argument("--output", "-o", - type=str, dest="outputdir", default=None, - help="Directory to put the output files") - parser.add_argument("--use-nonblocking", "-b", default=None, - dest="non_blocking", action="store_true", - help="Use non-blocking MPI communication") - parser.add_argument("--name", "-n", - dest="name", default=None, - help="Name of the problem, used for output") - parser.add_argument("--suppress-pinned", "-s", - dest="supp_pinned", action="store_true", default=None, - help="Suppress usage of pinned memory for MPI communication buffers") - parser.add_argument("--timers", "-t", - type=str, dest="timers", default=None, - help="String specifying the type of timer output") - parser.add_argument("--trace-data-migration", - dest="trace_data_mig", action="store_true", default=None, - help="Trace host-device data migration") - parser.add_argument("--pause-for", - dest="pause_for", default=None, - help="Pause geosx for a given number of seconds before starting execution") - - return vars(parser.parse_known_args(cmdline)[0]) - - - def updateArg(self, optionName=None, newValue=None): - """ - Update the GEOSX initialization arguments - - Parameters - ---------- - optionName : str - Name of the option to update - newValue : str - New value for the argument to be updated. - - Returns - ------- - bool : True if the option has been (found and) updated. False otherwise. - """ - if optionName is not None: - if self.options[optionName] != newValue: - self.options.update({optionName: newValue}) - return True - - return False - - - def getCommandLine(self): - """ - Return the command line specific to GEOSX initialization - - Returns - -------- - cl : list of str - List containing all the options requested - """ - cl = [""] - for opt, val in self.options.items(): - if val is not None: - ab = GeosxAbbrevOption().getAbbrv(opt) - cl += [ab] - if not isinstance(self.options[opt], bool): - cl += [str(self.options[opt])] - - return cl - - -class GeosxAbbrevOption: - """ - Class containing GEOSX command line options abbreviations - - Attributes - ---------- - abbrvDict : dict - Dict containing lists of abbreviations for GEOSX command line options - """ - def __init__(self): - self.abbrvDict = {"xml": ("--input", "-i"), - "restart": ("--restart", "r"), - "xpart": ("-x-partitions", "-x"), - "ypart": ("-y-partitions", "-y"), - "zpart": ("-z-partitions", "-z"), - "non_blocking": ("--use-non-blocking", "-b"), - "name": ("--name", "-n"), - "supp_pinned": ("--suppress-pinned"), - "outputdir": ("--output", "-o"), - "timers": ("--timers", "-t"), - "trace_data_mig": ("--trace-data-migration"), - "pause_for": ("--pause-for") - } - - def getAbbrv(self, optionName=None): - """ - Returns the abbreviation corresponding to the requested option - - Parameters - ---------- - optionName : str - Name of the requested option - - Returns - ------- - str : Requested abbreviations - """ - return self.abbrvDict[optionName][-1] - - - def getAllAbbrv(self, optionName=None): - """ - Returns the abbreviation corresponding to the requested option - - Parameters - ---------- - optionName : str - Name of the requested option - - Returns - ------- - list of str : - Requested abbreviations - """ - try: - return self.abbrvDict[optionName] - - except: - return [""] diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py b/pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py deleted file mode 100644 index 161fdab..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/input/Xml.py +++ /dev/null @@ -1,120 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import os -from xml.etree import cElementTree as ET -import xmltodict -from re import findall -from ..mesh.InternalMesh import InternalMesh -from ..mesh.VtkMesh import VTKMesh - - -class XML(): - def __init__(self, xmlFile): - self.filename = xmlFile - - self.tree = ET.parse(xmlFile) - root = self.tree.getroot() - to_string = ET.tostring(root, method='xml') - - self.outputs = None - - root = xmltodict.parse(to_string, attr_prefix="" ,dict_constructor=dict) - for k, v in root['Problem'].items(): - words = findall('[A-Z][^A-Z]*', k) - words[0] = words[0].lower() - attr = "" - for word in words: - attr += word - setattr(self, attr, v) - - - - def updateSolvers(self, solverName, **kwargs): - root = self.tree.getroot() - solver = root.find("./Solvers/"+solverName) - for k, v in kwargs.items(): - if k in solver.attrib: - solver.set(k, v) - self.solvers[solverName].update({k:str(v)}) - - - def updateMesh(self, **kwargs): - root = self.tree.getroot() - mesh = root.find("./Mesh//") - for k, v in kwargs.items(): - if k in mesh.attrib: - mesh.set(k, v) - self.mesh[mesh.tag].update({k:str(v)}) - - - def updateGeometry(self, boxname, **kwargs): - root = self.tree.getroot() - geometry = root.find("./Geometry//*[@name="+boxname+"]") - - for i in len(self.geometry[geometry.tag]): - box = self.geometry[geometry.tag][i] - if boxname == box["name"]: - break - - for k, v in kwargs.items(): - if k in geometry.attrib: - geometry.set(k, v) - self.geometry[geometry.tag][i].update({k:str(v)}) - - - def getMeshObject(self): - - if "InternalMesh" in self.mesh.keys(): - #Not working properly for now - return InternalMesh(self) - - elif "VTKMesh" in self.mesh.keys(): - vtkFile = self.mesh["VTKMesh"]["file"] - if not os.path.isabs(vtkFile): - vtkFile = os.path.join(os.path.split(self.filename)[0], vtkFile) - return VTKMesh(vtkFile) - - def getAttribute(self, parentElement, attributeTag): - if parentElement == "root": - pElement = self.tree.find(f"./[@{attributeTag}]") - else: - pElement = self.tree.find(f"./*/{parentElement}/[@{attributeTag}]") - - return pElement.get(attributeTag) - - - def getSolverType(self): - return [k for k in self.solvers.keys() if k[0].isupper()] - - - def getSourcesAndReceivers(self): - solverType = self.getSolverType() - if len(solverType) > 1: - pass - else: - src = self.getAttribute(f"{solverType[0]}", "sourceCoordinates") - src = eval(src.replace("{", "[").replace("}", "]")) - - rcv = self.getAttribute(f"{solverType[0]}", "receiverCoordinates") - rcv = eval(rcv.replace("{", "[").replace("}", "]")) - - return src, rcv - - - def exportToXml(self, filename=None): - if filename is None: - self.tree.write(self.filename) - else: - self.tree.write(filename) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py b/pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py deleted file mode 100644 index af3b85b..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/input/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -"""Input handle""" - -from .GeosxArgs import GeosxArgs, GeosxAbbrevOption -from .Xml import XML \ No newline at end of file diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py deleted file mode 100644 index 5019b91..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/InternalMesh.py +++ /dev/null @@ -1,144 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import numpy as np - - -class InternalMesh: - """ - GEOSX Internal Mesh - - Attributes - ---------- - xml : XML - XML object containing the information on the mesh - bounds : list of list - Real bounds of the mesh [[xmin, xmax],[ymin,ymax],[zmin,zmax]] - nx : int - Number of elements in the x direction - ny : int - Number of elements in the y direction - nz : int - Number of elements in the z direction - order : int - Mesh order - cellBlockNames : str - Names of each mesh block - cellBounds : - elementTypes : - Element types of each mesh block - numberOfCells : int - Total number of cells - numberOfPoints : int - Total number of points - fieldSpecifications : dict - Dict containing the mesh field specifications - """ - def __init__(self, xml): - """ - Parameters - ---------- - xml : XML - XML object containing the information on the mesh - """ - self.xml = xml - - mesh = xml.mesh["InternalMesh"] - elementRegion = xml.elementRegions["CellElementRegion"] - fieldSpecifications = xml.fieldSpecifications - - name = mesh["name"] - - xCoords = list(eval(mesh["xCoords"])) - yCoords = list(eval(mesh["yCoords"])) - zCoords = list(eval(mesh["zCoords"])) - - self.bounds = [[xCoords[0], xCoords[-1]], [yCoords[0], yCoords[-1]], [zCoords[0], zCoords[-1]]] - - - nxStr = mesh["nx"].strip('').replace('{','').replace('}','').split(',') - nyStr = mesh["ny"].strip('').replace('{','').replace('}','').split(',') - nzStr = mesh["nz"].strip('').replace('{','').replace('}','').split(',') - - nx = [eval(nx) for nx in nxStr] - ny = [eval(ny) for ny in nyStr] - nz = [eval(nz) for nz in nzStr] - - self.nx = nx - self.ny = ny - self.nz = nz - - order = 1 - self.order = order - - self.cellBlockNames = mesh["cellBlockNames"].strip('').replace('{','').replace('}','').split(',') - - xlayers = [] - ylayers = [] - zlayers = [] - for i in range(len(nx)): - xlayers.append([xCoords[i], xCoords[i+1]]) - for i in range(len(ny)): - ylayers.append([yCoords[i], yCoords[i+1]]) - for i in range(len(nz)): - zlayers.append([zCoords[i], zCoords[i+1]]) - - self.layers = [xlayers, ylayers, zlayers] - - xCellsBounds = np.zeros(sum(nx)+1) - yCellsBounds = np.zeros(sum(ny)+1) - zCellsBounds = np.zeros(sum(nz)+1) - - for i in range(len(nx)): - xstep = (xlayers[i][1]-xlayers[i][0])/nx[i] - if i == 0: - xCellsBounds[0:nx[i]] = np.arange(xlayers[i][0], xlayers[i][1], xstep) - else : - xCellsBounds[nx[i-1]:sum(nx[0:i+1])] = np.arange(xlayers[i][0], xlayers[i][1], xstep) - xCellsBounds[nx[-1]] = xlayers[i][1] - - for i in range(len(ny)): - ystep = (ylayers[i][1]-ylayers[i][0])/ny[i] - if i == 0: - yCellsBounds[0:ny[i]] = np.arange(ylayers[i][0], ylayers[i][1], ystep) - else : - xCellsBounds[ny[i-1]:sum(ny[0:i+1])] = np.arange(ylayers[i][0], ylayers[i][1], ystep) - yCellsBounds[ny[-1]] = ylayers[i][1] - - for i in range(len(nz)): - zstep = (zlayers[i][1]-zlayers[i][0])/nz[i] - if i == 0: - zCellsBounds[0:nz[i]] = np.arange(zlayers[i][0], zlayers[i][1], zstep) - else : - zCellsBounds[nz[i-1]:sum(nz[0:i+1])] = np.arange(zlayers[i][0], zlayers[i][1], zstep) - zCellsBounds[nz[-1]] = zlayers[i][1] - - self.cellBounds = [xCellsBounds, yCellsBounds, zCellsBounds] - - elementTypes = mesh["elementTypes"].strip('').replace('{','').replace('}','').split(',') - - self.elementTypes=[] - for type in elementTypes: - if type == "C3D8": - self.elementTypes.append("Hexahedron") - else: - self.elementTypes.append(type) - - - self.numberOfCells = sum(nx) * sum(ny) * sum(nz) - self.numberOfPoints = (sum(nx) + 1) * (sum(ny) + 1) * (sum(nz) + 1) - - self.fieldSpecifications = xml.fieldSpecifications - self.isSet = True - diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py deleted file mode 100644 index cba7259..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkFieldSpecifications.py +++ /dev/null @@ -1,197 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import copy -import numpy as np -from vtk.util import numpy_support as VN - - -class VTKFieldSpecifications: - """ - Object containing field dataset of a VTK format mesh - - Attributes - ---------- - arrays : dict - Dict containing the {array names : array} of the given dataset - """ - def __init__(self, fieldData): - """ - Parameters - ---------- - fieldData : vtk.vtkFieldData - Data contained in the VTK mesh - """ - self.arrays = {} - assert(fieldData.IsA("vtkFieldData")) - for i in range(fieldData.GetNumberOfArrays()): - value = fieldData.GetArray(i) - key = value.GetName() - self.arrays.update({key: value}) - self.fieldtype = None - - - def hasArray(self, name): - """ - Check if the object contains an array with the requested name - - Parameters - ---------- - name : str - Name of the cell/point data array - - Returns - ------- - bool : True if the array exists. False otherwise - """ - if name in self.arrays.keys(): - return True - else: - return False - - - def getArray(self, name, sorted=False): - """ - Return the requested array from the cell data - - Parameters - ---------- - name : str - Name of the cell/point data array - sorted : bool - Sort with global ids \ - Require the presence of a global ids array - - Returns - ------- - numpy array : requested array - """ - array = None - - if self.hasArray(name): - array = VN.vtk_to_numpy(self.arrays[name]) - - if sorted: - ftype = self.fieldtype.split("Data")[0] - if self.hasArray(f"Global{ftype}Ids"): - gids = self.getCopyArray(f"Global{ftype}Ids") - array = array[np.argsort(gids)] - - return array - - - def getCopyArray(self, name, **kwargs): - """ - Return a copy of the requested array from the cell data - - Parameters - ---------- - name : str - Name of the cell/point data array - - Returns - ------- - numpy array : copy of the requested array - """ - - array = self.getArray(name, **kwargs) - - if array is not None: - array = copy.deepcopy(array) - - return array - - - def getVtkArray(self, name): - """ - Return the vtkDataArray requested - - Parameters - ---------- - name : str - Name of the cell/point data array - - Returns - ------- - numpy array : copy of the requested array - """ - if self.hasArray(name): - return self.arrays[name] - else: - return None - - - def setArray(self, name, value, overwrite=False): - """ - Return a copy of the requested array from the cell data - - Parameters - ---------- - name : str - Name of the cell data array - - Returns - ------- - numpy array : copy of the requested array - """ - if self.hasArray(name) and overwrite == False: - print(f"Warning! \n This dataset already contains a cell data array named {name}. Set the 'overwrite' parameter to True to bypass this warning") - else: - array = VN.vtk_to_numpy(self.arrays[name]) - array[:] = value[:] - - - -class VTKCellSpecifications(VTKFieldSpecifications): - """ - Contains the cell data information from a VTK Mesh - Inherits from VTKFieldSpecifications - """ - def __init__(self, celldata): - """ - Parameters - ---------- - celldata : vtk.vtkCellData - Cell data of the mesh - """ - assert(celldata.IsA("vtkCellData")) - super().__init__(fieldData=celldata) - self.fieldtype = "CellData" - - -class VTKPointSpecifications(VTKFieldSpecifications): - """ - Contains the point data information from a VTK Mesh - Inherits from VTKFieldSpecifications - - Parameters - ---------- - pointdata : vtk.vtkPointData - Point data of the mesh - - Attributes - --------- - arrays : dict - Dict containing the {name, vtkDataArray} of each point data array - """ - def __init__(self, pointdata): - """ - Parameters - ---------- - pointdata : vtk.vtkPointData - Point data of the mesh - """ - assert(pointdata.IsA("vtkPointData")) - super().__init__(fieldData=pointdata) - self.fieldtype = "PointData" diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py b/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py deleted file mode 100644 index f89a1e5..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/mesh/VtkMesh.py +++ /dev/null @@ -1,632 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import os -import vtk -from vtk.util import numpy_support as VN - -from .VtkFieldSpecifications import VTKCellSpecifications, VTKPointSpecifications -from ..model.utils.vtkUtils import cGlobalIds - - - -class VTKMesh: - """ - VTK format Mesh. Now handling .vtu .vts .pvts .pvtu - - Attributes - ---------- - meshfile : str - Mesh filename - vtktype : str - Format of the VTK mesh - bounds : tuple of int - Real bounds of the mesh (xmin, xmax, ymin, ymax, zmin, zmax) - numberOfPoints : int - Total number of points of the mesh - numberOfCells : int - Total number of cells of the mesh - isSet : bool - Whether or not the mesh properties have been set - hasLocator : bool - Whether or not the mesh cell locator has been initialized - """ - def __init__(self, meshfile): - """ - Parameters - ---------- - meshfile : str - Mesh filename - """ - self.meshfile = meshfile - self.vtktype = os.path.splitext(self.meshfile)[-1][1:] - - self.bounds = None - self.numberOfPoints = None - self.numberOfCells = None - - self.isSet = False - self.hasLocator = False - - - def getReader(self): - """Return the appropriate reader given the VTK format - - Returns - -------- - vtk.vtkXMLReader - Appropriate reader given the format of the VTK mesh file. - """ - - if self.vtktype == "vtu": - return vtk.vtkXMLUnstructuredGridReader() - elif self.vtktype == "vts": - return vtk.vtkXMLStructuredGridReader() - elif self.vtktype == "pvtu": - return vtk.vtkXMLPUnstructuredGridReader() - elif self.vtktype == "pvts": - return vtk.vtkXMLPStructuredGridReader() - else: - print("This VTK format is not handled.") - return None - - - def read(self): - """Read information from the VTK file - - Returns - -------- - vtk.vtkDataObject - General representation of VTK mesh data - """ - reader = self.getReader() - reader.SetFileName(self.meshfile) - reader.Update() - - return reader.GetOutput() - - - def setMeshProperties(self): - """Read and set as attributes the bounds, number of points and cells""" - data = self.read() - - self.bounds = data.GetBounds() - self.numberOfPoints = data.GetNumberOfPoints() - self.numberOfCells = data.GetNumberOfCells() - - self.isSet = True - - - def getBounds(self): - """ - Get the bounds of the mesh in the format: - (xmin, xmax, ymin, ymax, zmin, zmax) - - Returns - ------- - tuple or None - Bounds of the mesh - """ - return self.bounds - - - def getNumberOfPoints(self): - """ - Get the total number of points of the mesh - - Returns - ------- - int - Number of points - """ - return self.numberOfPoints - - - def getNumberOfCells(self): - """ - Get the total number of cells of the mesh - - Returns - ------- - int - Number of cells - """ - return self.numberOfCells - - - def getCellData(self): - """Read the cell data - - Returns - -------- - VTKCellSpecifications - Cell data information - """ - data = self.read() - - return VTKCellSpecifications(data.GetCellData()) - - - def getPointData(self): - """Read the point data - - Returns - -------- - VTKPointSpecifications - Point data information - """ - data = self.read() - - return VTKPointSpecifications(data.GetPointData()) - - - def getArray(self, name, dtype="cell", copy=False, sorted=False): - """ - Return a cell or point data array. If the file is a pvtu, the array is sorted with global ids - - Parameters - ----------- - name : str - Name of the vtk cell/point data array - dtype : str - Type of vtk data \ - `cell` or `point` - copy : bool - Return a copy of the requested array - Default is False - sorted : bool - Return the array sorted with respect to GlobalPointIds or GlobalCellIds - Default is False - - Returns - -------- - array : numpy array - Requested array - """ - assert dtype.lower() in ("cell", "point") - - if dtype.lower() == "cell": - fdata = self.getCellData() - else: - fdata = self.getPointData() - - if copy: - array = fdata.getCopyArray(name, sorted=sorted) - else: - array = fdata.getArray(name, sorted=sorted) - - return array - - - def extractMesh(self, center, srootname, dist=[None, None, None], comm=None, export=True): - """ - Extract a rectangular submesh such that for each axis we have the subax: [center-dist, center+dist] - - Parameters - --------- - center : 3d float - Requested center of the subbox - srootname : str - Submesh root filename - dist : 3d float - Distance to the center in each direction - comm : MPI.COMM_WORLD - MPI communicator - - Returns - ------- - VTKMesh - Submesh extracted - """ - assert self.vtktype in ("vtu", "pvtu", "vts", "pvts") - vtktype = self.vtktype[-3:] - sfilename = ".".join((srootname, vtktype)) - - if comm is None or comm.Get_rank() == 0: - if not self.isSet: - self.setMeshProperties() - minpos = [] - maxpos = [] - - for i in range(3): - xmin, d = self.getSubAx(center[i], dist[i], ax=i+1) - minpos.append(xmin) - maxpos.append(xmin+d) - - data = self.read() - submesh = VTKSubMesh(sfilename, data, minpos, maxpos, create=export) - - else: - submesh = VTKMesh(sfilename) - - # if file creation, wait for rank 0 to finish - if export: - info = "Done" - comm.bcast(info, root=0) - - return submesh - - - def getSubAx(self, center, dist, ax): - """ - Return the min and max positions in the mesh given the center, distance and ax considered. If the 2*distance if greater than the bounds, the min/max is the corresponding mesh bound. - - Parameters - ---------- - center : float - Central position considered - dist : float - Max distance requested - ax : int - Ax to consider (1, 2, 3) - - Returns - ------- - min, max : float - Min and Max positions - """ - assert(type(ax) == int) - - bounds = [self.bounds[(ax-1)*2], self.bounds[ax*2-1]] - - if dist is not None: - dist = abs(dist) - ox = max(bounds[0], center-dist) - x = min(bounds[1]-ox, 2*dist) - else: - ox = bounds[0] - x = bounds[1] - - return ox, x - - - def getNumberOfBlocks(self): - """Return the number of blocks of a mesh.""" - if self.vtktype in ["pvtu", "pvts"]: - with open(self.meshfile) as ff: - nb = 0 - for line in ff: - m = line.split() - if m[0] == ' high are set to high - comm : MPI_COMM - MPI communicators - """ - root = 0 - rank = comm.Get_rank() - size = comm.Get_size() - - x = None - if rank == root: - with h5py.File( filename, 'r' ) as f: - x = f["velocity"][:] - - imin = np.where( x < low )[0] - imax = np.where( x > high )[0] - x[imin] = low - x[imax] = high - - if self.model == "1/c2": - x = np.sqrt(1/x) - elif self.model == "1/c": - x = 1/x - elif self.model == "c": - pass - else: - raise ValueError("Not implemented") - - startModel = self.bcastField( x, comm ) - - self.updateVelocityModel( startModel ) - - - def updateVelocityModel(self, vel, **kwargs): - """ - Update velocity value in GEOS - - Parameters - ---------- - vel : array - Values for velocity field - """ - super().updateVelocityModel(vel, velocityName="acousticVelocity", **kwargs) - - - def setConstantVelocityModel(self, vel, velFieldName="acousticVelocity", **kwargs): - """ - Update velocity value in GEOS, using a constant value. - - Parameters - ---------- - vel : float - Value for velocity field - """ - prefix = self._getPrefixPath(**kwargs) - - velocity = self.solver.get_wrapper(prefix+velFieldName).value() - velocity.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - velocity.to_numpy()[:] = vel - - - def getVelocityModel(self, filterGhost=False, **kwargs): - """ - Get the velocity values - - Parameters - ----------- - filterGhost : bool - Filter the ghost ranks - - Returns - ------- - velocity : numpy array - Velocity values - """ - velocity = super().getVelocityModel(velocityName="acousticVelocity", filterGhost=filterGhost, **kwargs) - - return velocity - - - def updateDensityModel(self, density, **kwargs): - """ - Update density values in GEOS - - Parameters - ----------- - density : array - New values for the density - """ - super().updateDensityModel( density=density, densityName="acousticDensity", **kwargs ) - - - def getGradient(self, filterGhost=False, **kwargs): - """Get the local rank gradient value - - Returns - -------- - partialGradient : Numpy Array - Array containing the element id list for the local rank - """ - partialGradient = self.getField("partialGradient", **kwargs) - - if filterGhost: - partialGradient = self.filterGhostRank(partialGradient, **kwargs) - - return partialGradient - - - def computePartialGradient( self, shotId, minDepth, comm, gradDirectory="partialGradient", **kwargs ): - """Compute the partial Gradient - - Parameters - ----------- - shotId : string - Number of the shot as string - minDepth : float - Depth at which gradient values are kept, otherwise it is set to 0. - NOTE : this is done in this routine to avoid storage \ - of elementCenter coordinates in the .hdf5 \ - but might be problem for WolfeConditions later on \ - if minDepth is too large - comm : MPI_COMM - MPI communicators - gradDirectory : str, optional - Partial gradient directory \ - Default is `partialGradient` - """ - rank = comm.Get_rank() - size = comm.Get_size() - root = 0 - - # Get local gradient - grad = self.getGradient( **kwargs ) - if self.model == "1/c2": - x = self.getVelocityModel( **kwargs ) - grad = -( x * x * x / 2 ) * grad - elif self.model == "1/c": - x = self.getVelocityModel( filterGhost=True, **kwargs ) - grad = - x * x * grad - elif self.model == "c": - pass - else: - raise ValueError("Not implemented") - - grad = grad.astype( np.float64 ) - - zind = np.where(self.getElementCenter(**kwargs)[:,2] < minDepth)[0] - grad[zind] = 0.0 - - # Gather global gradient - gradFull, ntot = self.gatherField( field=grad, comm=comm, root=root ) - - if rank == root: - os.makedirs(gradDirectory, exist_ok=True) - - with h5py.File(f"{gradDirectory}/partialGradient_"+shotId+".hdf5", 'w') as h5p: - - h5p.create_dataset("partialGradient", data=np.zeros(ntot), chunks=True, maxshape=(ntot,)) - h5p["partialGradient"][:] = self.dtWaveField * gradFull - - shutil.move(f"{gradDirectory}/partialGradient_"+shotId+".hdf5", f"{gradDirectory}/partialGradient_ready_"+shotId+".hdf5") - - comm.Barrier() - - - def getPressureAtReceivers(self): - """ - Get the pressure values at receivers coordinates - - Returns - ------ - numpy Array : Array containing the pressure values at all time step at all receivers coordinates - """ - pressureAtReceivers = self.solver.get_wrapper("pressureNp1AtReceivers").value() - - return pressureAtReceivers.to_numpy() - - - def getFullPressureAtReceivers(self, comm): - """Return all pressures at receivers values on all ranks - Note that for a too large 2d array this may not work. - - Parameters: - ----------- - comm : MPI_COMM - MPI communicators - """ - rank = comm.Get_rank() - - allPressure = comm.gather(self.getPressureAtReceivers(), root=0) - pressure = np.zeros(self.getPressureAtReceivers().shape) - - if rank == 0: - for p in allPressure: - for i in range(p.shape[1]): - if any(p[1:, i])==True: - pressure[:, i] = p[:, i] - - pressure = comm.bcast(pressure, root=0) - - return pressure - - - def resetWaveField(self, **kwargs): - """Reinitialize all pressure values on the Wavefield to zero in GEOSX""" - - self.geosx.get_wrapper("Solvers/"+self.name+"/indexSeismoTrace").value()[0] = 0 - meshName = self._getMeshName() - discretization = self._getDiscretization() - - nodeManagerPath = f"domain/MeshBodies/{meshName}/meshLevels/{discretization}/nodeManager/" - - - if self.type == "AcousticSEM": - for ts in ("nm1", "n", "np1"): - pressure = self.geosx.get_wrapper(nodeManagerPath + f"pressure_{ts}").value() - pressure.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - pressure.to_numpy()[:] = 0.0 - - elif self.type == "AcousticFirstOrderSEM": - pressure_np1 = self.geosx.get_wrapper(nodeManagerPath + "pressure_np1").value() - pressure_np1.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - pressure_np1.to_numpy()[:] = 0.0 - - prefix = self._getPrefixPath(**kwargs) - for component in ("x", "y", "z"): - velocity = self.geosx.get_wrapper(prefix + f"velocity_{component}").value() - velocity.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - velocity.to_numpy()[:] = 0.0 - - - def resetPressureAtReceivers(self): - """Reinitialize pressure values at receivers to 0 - """ - pressure = self.solver.get_wrapper("pressureNp1AtReceivers").value() - pressure.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - pressure.to_numpy()[:] = 0.0 - - - def getWaveField(self): - return self.getPressureAtReceivers()[:,:-1] - - - def getFullWaveFieldAtReceivers(self, comm): - return self.getFullPressureAtReceivers(comm)[:,:-1] diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py deleted file mode 100644 index 34acbc7..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ElasticSolver.py +++ /dev/null @@ -1,319 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import numpy as np -import ctypes as ct - -import pygeosx - -from .WaveSolver import WaveSolver - - -class ElasticSolver(WaveSolver): - """ - ElasticSolver Object containing all methods to run ElasticSEM simulation with GEOSX - - Attributes - ----------- - dt : float - Time step for simulation - minTime : float - Min time to consider - maxTime : float - End time to consider - dtSeismo : float - Time step to save pressure for seismic trace - minTimeSim : float - Starting time of simulation - maxTimeSim : float - End Time of simulation - dtWaveField : float - Time step to save fields - sourceType : str - Type of source - sourceFreq : float - Frequency of the source - name : str - Solver name - type : str - Type of solver - Default is None - geosxArgs : GeosxArgs - Object containing GEOSX launching options - geosx : pygeosx instance - - alreadyInitialized : bool - Flag indicating if the initial conditions have been applied yet - firstGeosxInit : bool - Flag for initialize or reinitialize - collectionTargets : list - Output targets for geosx - hdf5Targets : list - HDF5 output targets for geosx - vtkTargets : list - VTK output targets for geosx - """ - def __init__(self, - dt=None, - minTime=0, - maxTime=None, - dtSeismo=None, - dtWaveField=None, - sourceType=None, - sourceFreq=None, - **kwargs): - """ - Parameters - ---------- - dt : float - Time step for simulation - minTime : float - Starting time of simulation - Default is 0 - maxTime : float - End Time of simulation - dtSeismo : float - Time step to save pressure for seismic trace - dtWaveField : float - Time step to save fields - sourceType : str - Type of source - Default is None - sourceFreq : float - Frequency of the source - Default is None - kwargs : keyword args - geosx_argv : list - GEOSX arguments or command line as a splitted line - """ - super().__init__(dt=dt, - minTime=minTime, - maxTime=maxTime, - dtSeismo=dtSeismo, - dtWaveField=dtWaveField, - sourceType=sourceType, - sourceFreq=sourceFreq, - **kwargs) - - def initialize(self, rank=0, xml=None): - super().initialize(rank, xml) - try: - useDAS = self.xml.getAttribute(parentElement=self._getType(), attributeTag="useDAS") - - except AttributeError: - useDAS = None - - if useDAS == "none": - try: - linearGEO = bool(self.xml.getAttribute(self._getType(), "linearDASGeometry")) - except AttributeError: - linearGEO = False - - if linearGEO is True: - self.useDAS = True - - - def __repr__(self): - string_list = [] - string_list.append("Solver type : " + type(self).__name__ + "\n") - string_list.append("dt : " + str(self.dt) + "\n") - string_list.append("maxTime : " + str(self.maxTime) + "\n") - string_list.append("dtSeismo : " + str(self.dtSeismo) + "\n") - string_list.append("Outputs : " + str(self.hdf5Targets) + "\n" + str(self.vtkTargets) + "\n") - rep = "" - for string in string_list: - rep += string - - return rep - - - def updateVelocityModel(self, vel, component, **kwargs): - """ - Update velocity value in GEOS - - Parameters - ---------- - vel : float/array - Value(s) for velocity field - component : str - Vs or Vp - """ - assert component.lower() in ("vs", "vp"), "Only Vs or Vp component accepted" - super().updateVelocityModel(vel, velocityName="elasticVelocity"+component.title(), **kwargs) - - - def getVelocityModel(self, component, filterGhost=False, **kwargs): - """ - Get the velocity values - - Parameters - ----------- - component : str - Vs or Vp - filterGhost : bool - Filter the ghost ranks - - Returns - ------- - velocity : numpy array - Array containing the velocity values - """ - assert component.lower() in ("vs", "vp"), "Only Vs or Vp component accepted" - - velocity = super().getVelocityModel(velocityName="elasticVelocity"+component.title(), filterGhost=filterGhost, **kwargs) - - return velocity - - - def getDensityModel(self, filterGhost=False, **kwargs): - """ - Get the density values - - Parameters - ----------- - filterGhost : bool - Filter the ghost ranks - - Returns - -------- - density : numpy array - Array containing the density values - """ - density = self.getField("elasticDensity", filterGhost=filterGhost, **kwargs) - - return density - - - def updateDensityModel(self, density, **kwargs): - """ - Update density values in GEOS - - Parameters - ----------- - density : array - New values for the density - """ - super().updateDensityModel( density=density, densityName="elasticDensity", **kwargs ) - - - def getDASSignalAtReceivers(self): - """ - Get the DAS signal values at receivers coordinates - - Returns - -------- - dassignal : numpy array - Array containing the DAS signal values at all time step at all receivers coordinates - """ - if self.type != "ElasticSEM": - raise TypeError(f"DAS signal not implemented for solver of type {self.type}.") - else: - dassignal = self.solver.get_wrapper(f"dasSignalNp1AtReceivers").value().to_numpy() - - return dassignal - - - def getDisplacementAtReceivers(self, component="X"): - """ - Get the displacement values at receivers coordinates for a given direction - - Returns - -------- - displacement : numpy array - Array containing the displacements values at all time step at all receivers coordinates - """ - assert component.upper() in ("X", "Y", "Z") - if self.type == "ElasticFirstOrderSEM": - displacement = self.solver.get_wrapper(f"displacement{component.lower()}Np1AtReceivers").value().to_numpy() - elif self.type == "ElasticSEM": - displacement = self.solver.get_wrapper(f"displacement{component.upper()}Np1AtReceivers").value().to_numpy() - - return displacement - - - def getAllDisplacementAtReceivers(self): - """ - Get the displacement for the x, y and z directions at all time step and all receivers coordinates - - Returns - -------- - displacementX : numpy array - Component X of the displacement - displacementY : numpy array - Component Y of the displacement - displacementZ : numpy array - Component Z of the displacement - """ - displacementX = self.getDisplacementAtReceivers("X") - displacementY = self.getDisplacementAtReceivers("Y") - displacementZ = self.getDisplacementAtReceivers("Z") - - return displacementX, displacementY, displacementZ - - - def resetWaveField(self, **kwargs): - """Reinitialize all displacement values on the Wavefield to zero in GEOSX""" - - self.geosx.get_wrapper("Solvers/"+self.name+"/indexSeismoTrace").value()[0] = 0 - meshName = self._getMeshName() - discretization = self._getDiscretization() - - nodeManagerPath = f"domain/MeshBodies/{meshName}/meshLevels/{discretization}/nodeManager/" - - if self.type == "ElasticSEM": - for component in ("x", "y", "z"): - for ts in ("nm1", "n", "np1"): - displacement = self.geosx.get_wrapper(nodeManagerPath+f"displacement{component}_{ts}").value() - displacement.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - displacement.to_numpy()[:] = 0.0 - - elif self.type == "ElasticFirstOrderSEM": - component = ("x", "y", "z") - for c in component: - displacement_np1 = self.geosx.get_wrapper(nodeManagerPath+f"displacement{c}_np1").value() - displacement_np1.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - displacement_np1.to_numpy()[:] = 0.0 - - prefix = self._getPrefixPath(**kwargs) - for i, c in enumerate(component): - for j in range(i, len(component)): - cc = c + component[j] - - sigma = self.solver.get_wrapper(prefix+f"stresstensor{cc}").value() - sigma.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - sigma.to_numpy()[:] = 0.0 - - - def resetDisplacementAtReceivers(self): - """Reinitialize displacement values at receivers to 0 - """ - for component in ("X", "Y", "Z"): - displacement = self.solver.get_wrapper(f"displacement{component}Np1AtReceivers").value() - displacement.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - displacement.to_numpy()[:] = 0.0 - - - def getWaveField( self ): - if self.useDAS: - return self.getDASSignalAtReceivers() - else: - return self.getAllDisplacementAtReceivers() - - - def getFullWaveFieldAtReceivers(self, comm): - print( "This method is not implemented yet" ) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py deleted file mode 100644 index ebf16fe..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/GeomechanicsSolver.py +++ /dev/null @@ -1,104 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -from .Solver import Solver - - -class GeomechanicsSolver(Solver): - """ - Geomechanics solver object containing methods to run geomechanics simulations in GEOS - - Attributes - ----------- - xml : XML - XML object containing parameters for GEOSX initialization - initialDt : float - Time step for the simulation - maxTime : float - End time of simulation - name : str - Solver name - type : str - Type of solver - Default is None - geosxArgs : GeosxArgs - Object containing GEOSX launching options - geosx : pygeosx.Group - pygeosx initialize instance - alreadyInitialized : bool - Flag indicating if the initial conditions have been applied yet - firstGeosxInit : bool - Flag for initialize or reinitialize - collectionTargets : list - Output targets for geosx - hdf5Targets : list - HDF5 output targets for geosx - vtkTargets : list - VTK output targets for geosx - """ - def __init__(self, - dt=None, - maxTime=None, - **kwargs): - """ - Parameters - ----------- - initialDt : float - Initial time step \ - Default is None - maxTime : float - End time of simulation \ - Default is None - """ - super().__init__(**kwargs) - - self.dt = dt - self.maxTime = maxTime - - - def _setTimeVariables(self): - """Set the time variables attributes""" - # Set up variables from xml - events = self.xml.events - - if self.maxTime is None: - self.maxTime = float(events["maxTime"]) - else: - self.updateTimeVariable("maxTime") - - if self.dt is None: - for event in events["PeriodicEvent"]: - if isinstance(event, dict): - poroName = "poromechanicsSolver" - if event["target"] == "/Solvers/" + poroName: - self.dt = float(event['forceDt']) - else: - if event == "target" and events["PeriodicEvent"]["target"] == "/Solvers/" + self.name: - self.dt = float(events["PeriodicEvent"]["forceDt"]) - else: - self.updateTimeVariable("dt") - - - def updateTimeVariable(self, variable): - """Overwrite a time variable in GEOS""" - if variable == "maxTime": - assert hasattr(self, "maxTime") - self.geosx.get_wrapper("Events/maxTime").value()[0] = self.maxTime - elif variable == "dt": - assert hasattr(self, "dt") - self.geosx.get_wrapper("Events/solverApplications/forceDt").value()[0] - - - def initialize( self, rank=0, xml=None ): - super().initialize( rank, xml, stype="SinglePhasePoromechanics" ) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py deleted file mode 100644 index 48f7a43..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/ReservoirSolver.py +++ /dev/null @@ -1,216 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import numpy as np - -from .Solver import Solver - - -class ReservoirSolver(Solver): - """ - Reservoir solver object containing methods to run reservoir simulations in GEOS - - Attributes - ----------- - xml : XML - XML object containing parameters for GEOSX initialization - initialDt : float - Time step for the simulation - maxTime : float - End time of simulation - name : str - Solver name - type : str - Type of solver - Default is None - geosxArgs : GeosxArgs - Object containing GEOSX launching options - geosx : pygeosx.Group - pygeosx initialize instance - alreadyInitialized : bool - Flag indicating if the initial conditions have been applied yet - firstGeosxInit : bool - Flag for initialize or reinitialize - collectionTargets : list - Output targets for geosx - hdf5Targets : list - HDF5 output targets for geosx - vtkTargets : list - VTK output targets for geosx - """ - def __init__(self, - initialDt=None, - maxTime=None, - **kwargs): - """ - Parameters - ----------- - initialDt : float - Initial time step \ - Default is None - maxTime : float - End time of simulation \ - Default is None - """ - super().__init__(**kwargs) - - self.initialDt = initialDt - self.maxTime = maxTime - - self.isCoupled = kwargs.get("coupled", False) - self.dtSeismic4D = None - - - def _setTimeVariables(self): - """Set the time variables attributes""" - # Set up variables from xml - events = self.xml.events - outputs = self.xml.outputs - - if self.isCoupled: - if self.dtSeismic4D is None: - for event in events["PeriodicEvent"]: - if event["name"] == "seismic4D": - self.dtSeismic4D = float(event["timeFrequency"]) - else: - self.updateTimeVariable("dtSeismic4D") - - if self.maxTime is None: - self.maxTime = float(events["maxTime"]) - else: - self.updateTimeVariable("maxTime") - - fvm = self.xml.solvers[self.type] - if self.initialDt is None: - for k, v in fvm.items(): - if k == "initialDt": - self.initialDt = np.array(v, dtype=float) - else: - self.updateTimeVariable("initialDt") - - - def updateTimeVariable(self, variable): - """Overwrite a time variable in GEOS""" - if variable == "maxTime": - assert hasattr(self, "maxTime") - self.geosx.get_wrapper("Events/maxTime").value()[0] = self.maxTime - elif variable == "initialDt": - assert hasattr(self, "initialDt") - self.geosx.get_wrapper(f"/Solvers/{self.name}/initialDt").value()[0] = self.initialDt - elif variable == "dtSeismic4D": - assert hasattr(self, "dtSeismic4D") - self.geosx.get_wrapper("Events/seismic4D/timeFrequency").value()[0] = self.dtSeismic4D - - - def execute(self, time): - """ - Do one solver iteration - - Parameters - ---------- - time : float - Current time of simulation - dt : float - Timestep - """ - self.solver.execute(time, self.initialDt) - - - def getTimeStep(self): - """ - Get the value of `initialDt` variable from GEOS - - Returns - -------- - float - initialDt value - """ - return self.solver.get_wrapper("initialDt").value()[0] - - - def updateTimeStep(self): - """Update the attribute value with the one from GEOS""" - self.initialDt = self.getTimeStep() - - - def getPressure(self, **kwargs): - """ - Get the local pressure - - Returns - -------- - pressure : numpy array - Local pressure - """ - pressure = self.getField("pressure", **kwargs) - - return pressure - - - def getDeltaPressure(self, **kwargs): - """ - Get the local delta pressure - - Returns - -------- - deltaPressure : numpy array - Local delta pressure - """ - deltaPressure = self.getField("deltaPressure", **kwargs) - - return deltaPressure - - - def getPhaseVolumeFractionGas(self, **kwargs): - """ - Get the local gas phase volume fraction - - Returns - -------- - phaseVolumeFractionGas : numpy array - Local gas phase volume fraction - """ - phaseVolumeFraction = self.getField("phaseVolumeFraction", **kwargs) - phaseVolumeFractionGas = np.ascontiguousarray(phaseVolumeFraction[:,0]) - - return phaseVolumeFractionGas - - - def getPhaseVolumeFractionWater(self, **kwargs): - """ - Get the local water phase volume fraction - - Returns - -------- - phaseVolumeFractionWater : numpy array - Local water phase volume fraction - """ - phaseVolumeFraction = self.getField("phaseVolumeFraction", **kwargs) - phaseVolumeFractionWater = np.ascontiguousarray(phaseVolumeFraction[:,1]) - - return phaseVolumeFractionWater - - - def getRockPorosity(self, **kwargs): - """ - Get the local rock porosity - - Returns - -------- - rockPorosity : numpy array - Local rock porosity - """ - rockPorosity = self.getField("rockPorosity_referencePorosity", **kwargs) - - return rockPorosity diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py deleted file mode 100644 index a2afd4d..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/Solver.py +++ /dev/null @@ -1,697 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -import os -import sys -import numpy as np - -import pygeosx -from ..input.Xml import XML -from ..input.GeosxArgs import GeosxArgs - -from mpi4py import MPI - -class Solver: - """ - Solver class containing the main methods of a GEOS solver - """ - - def __init__(self, **kwargs): - self.alreadyInitialized = False - self.type = None - - argv = kwargs.get("geosx_argv", sys.argv) - self.geosxArgs = GeosxArgs(argv) - self.xml = None - - - def initialize(self, rank=0, xml=None, **kwargs): - """Initialization or reinitialization of GEOSX - - Parameters - ---------- - rank : int - Process rank - xml : XML - XML object containing parameters for GEOSX initialization. - Only required if not set in the __init__ OR if different from it - """ - if xml: - self.updateXml(xml) - else: - if self.xml is None: - try: - self.xml = XML(self.geosxArgs.options["xml"]) - except: - raise ValueError("You need to provide a xml input file") - - - if not self.alreadyInitialized: - # if GEOS is UNINITIALIZED - if self.getGEOSState() == 0: - self.geosx = pygeosx.initialize(rank, self.geosxArgs.getCommandLine()) - self.alreadyInitialized = True - - # elif GEOS is INITIALIZED OR READY TO RUN - elif self.getGEOSState() in (1, 2): - self.geosx = pygeosx.reinit(self.geosxArgs.getCommandLine()) - self.alreadyInitialized = True - - # else if COMPLETED state - else: - print(f"The current state of GEOS does not allow for initialization\nCurrent state: {self.getGEOSState()}") - - self.name = self._getName() - stype = kwargs.get( "stype", None ) - self._setSolverGroup( stype ) - - # Check all time variables are set/ set them from xml - self._setTimeVariables() - - # Set the outputs collections/targets - self.__setOutputs() - - - def _setSolverGroup( self, stype=None ): - if stype is None: - if self.name is not None: - if isinstance( self.name, str ): - self.solver = self.geosx.get_group( "/Solvers/" + self.name ) - else: - name = self._getName( stype=stype ) - self.solver = self.geosx.get_group( "/Solvers/" + name ) - - - def getGEOSState(self): - """ - Return the current GEOS state - - Returns - --------- - int - GEOS state - 0 : UNINITIALIZED - 1 : INITIALIZED - 2 : READY TO RUN - 3 : COMPLETED - """ - return pygeosx.getState() - - - def _setTimeVariables(self): - """Initialize the time variables. Specific to each solver""" - pass - - - def __setOutputs(self): - if hasattr(self.xml, "outputs"): - outputs = self.xml.outputs - self.__setOutputTargets(outputs) - - self.collections = [] - self.hdf5Outputs = [] - self.vtkOutputs = [] - - if not outputs == None: - # Set the collections - for target in self.collectionTargets: - self.collections.append(self.geosx.get_group(target)) - - for target in self.hdf5Targets: - self.hdf5Outputs.append(self.geosx.get_group(target)) - - for target in self.vtkTargets: - self.vtkOutputs.append(self.geosx.get_group(target)) - - - def __setOutputTargets(self, outputs): - """ - Set the list of outputs targets - - Parameters - ----------- - outputs : dict - Dictionnary extracted from the xml input file - """ - self.collectionTargets = [] - self.hdf5Targets = [] - self.vtkTargets = [] - - if outputs: - if isinstance(list(outputs.values())[0], list): - if "TimeHistory" in outputs.keys(): - for hdf5 in outputs["TimeHistory"]: - self.collectionTargets.append(hdf5['sources'].strip("{} ")) - self.hdf5Targets.append("Outputs/"+hdf5['name']) - - if "VTK" in outputs.keys(): - for vtk in outputs["VTK"]: - self.vtkTargets.append("Outputs/"+vtk['name']) - - else: - if "TimeHistory" in list(outputs.keys()): - hdf5 = outputs["TimeHistory"] - self.collectionTargets.append(hdf5['sources'].strip("{} ")) - self.hdf5Targets.append("Outputs/"+hdf5['name']) - - if "VTK" in list(outputs.keys()): - vtk = outputs["VTK"] - self.vtkTargets.append("Outputs/"+vtk['name']) - - - def _getType(self): - """ - Set the type of solver given in the xml 'Solvers' block - - Raises - ------- - ValueError : if no solver is provided in the XML - """ - if self.xml is not None: - typesOfSolvers = self.xml.getSolverType() - - if len( typesOfSolvers ) == 1: - self.type = typesOfSolvers[0] - elif len( typesOfSolvers ) > 1: - self.type = typesOfSolvers - else: - raise ValueError("You must provide a Solver in the XML input file") - - - def _getName(self, stype=None): - """ - Get the solver 'name' attribute from the xml - - Returns - ------- - str or list of str - Solver name from the xml \ - List of solvers name if several solvers in xml - """ - if self.xml is not None: - if stype is None: - if self.type is None: - self._getType() - stype = self.type - - # Check only one type of solver - if isinstance(stype, str): - return self.xml.solvers[stype]["name"] - - elif isinstance( stype, list ): - return [ self.xml.solvers[solvertype]["name"] for solvertype in stype ] - - - def _getMeshName(self): - """ - Get the mesh 'name' attribute from the xml - - Returns - ------- - str - Mesh name from the xml - """ - if self.xml is not None: - meshes = [m for m in self.xml.mesh] - if len(meshes) <= 1: - return self.xml.mesh[meshes[0]]["name"] - - - def _getDiscretization(self): - """Get discretization from the XML - - Returns - -------- - discretization : str - Name of the discretization method - """ - if self.xml is not None: - if self.type is None: - self._getType() - - if isinstance(self.type, str): - return self.xml.solvers[self.type]["discretization"] - elif isinstance(self.type, list): - return [self.xml.solvers[solverType]["discretization"] for solverType in self.type] - - - def _getTargetRegion(self): - """ - Get the target region name from the xml - - Returns - ------- - str - target region from the xml - """ - if self.xml is not None: - if self.type is None: - self._getType() - - if isinstance(self.type, str): - targetRegionRaw = self.xml.solvers[self.type]["targetRegions"] - targetRegion = targetRegionRaw.strip("{ }").split(",") - - if len(targetRegion) <= 1: - return targetRegion[0] - - - def _getCellBlock(self): - """ - Get the cell blocks names from the xml - - Returns - ------- - str - cell blocks from the xml - """ - if self.xml is not None: - cellElementRegion = self.xml.elementRegions["CellElementRegion"][0] - cellBlocks = cellElementRegion["cellBlocks"].strip("{ }").split(",") - - if len(cellBlocks) <= 1: - return cellBlocks[0] - - - def reinitSolver(self): - """Reinitialize Solver""" - self.solver.reinit() - - - def applyInitialConditions(self): - """Apply the initial conditions after GEOS (re)initialization""" - if self.getGEOSState() == 1: - pygeosx.apply_initial_conditions() - - - - def finalize(self): - """Terminate GEOSX""" - pygeosx._finalize() - - - def updateXml(self, xml): - """ - Update XML - - Parameters - ----------- - xml : XML - XML object corresponding to GEOSX input - """ - self.xml = xml - - if self.geosxArgs.updateArg("xml", xml.filename): - self.alreadyInitialized = False - - - def updateHdf5OutputsName(self, directory, filenames, reinit=False): - """ - Overwrite GEOSX hdf5 Outputs paths that have been read in the XML. - - Parameters - ---------- - list_of_output : list of str - List of requested output paths - reinit : bool - Perform reinitialization or not. Must be set to True if called after applyInitialConditions() - """ - - if not len(self.hdf5Outputs): - raise ValueError("No HDF5 Outputs specified in XML.") - else: - for i in range(len(filenames)): - os.makedirs(directory, exist_ok=True) - - self.hdf5Outputs[i].setOutputName(os.path.join(directory, filenames[i])) - if reinit: - self.hdf5Outputs[i].reinit() - - - def updateVtkOutputsName(self, directory): - """ - Overwrite GEOSX vtk Outputs paths that have been read in the XML. - - Parameters - ---------- - list_of_output : list of str - List of vtk output paths - reinit : bool - Perform reinitialization or not. Must be set to True if called after applyInitialConditions() - """ - if not len(self.vtkOutputs): - pass - else: - self.vtkOutputs[0].setOutputDir(directory) - - - def execute(self, time): - """ - Do one solver iteration - - Parameters - ---------- - time : float - Current time of simulation - """ - - self.solver.execute(time, self.dt) - - - def cleanup(self, time): - """ - Finalize simulation. Also triggers write of leftover seismogram data - - Parameters - ---------- - time : float - Current time of simulation - """ - self.solver.cleanup(time) - - - def outputVtk(self, time): - """ - Trigger the VTK output - - Parameters - ---------- - time : float - Current time of simulation - """ - for vtkOutput in self.vtkOutputs: - vtkOutput.output(time, self.dt) - - - def _getPrefixPath(self, targetRegion=None, meshName=None, cellBlock=None): - """ - Return the prefix path to get wrappers or fields in GEOS - - Parameters - ----------- - targetRegion : str, optional - Name of the target Region \ - Default value is taken from the xml - meshName : str, optional - Name of the mesh \ - Default value is taken from the xml - cellBlock : str, optional - Name of the cell blocks \ - Default value is taken from the xml - - Returns - ------- - prefix : str - Prefix path - - Raises - ------- - AssertionError : if the variables 'targetRegion', 'meshName' \ - or `cellBlock` have multiple or no values - """ - if targetRegion is None: - targetRegion = self._getTargetRegion() - if meshName is None: - meshName = self._getMeshName() - if cellBlock is None: - cellBlock = self._getCellBlock() - - discretization=self._getDiscretization() - if discretization is None: - discretization="Level0" - assert None not in (targetRegion, meshName, cellBlock, discretization), "No values or multiple values found for `targetRegion`, `meshName` and `cellBlock` arguments" - - prefix = os.path.join("/domain/MeshBodies", meshName, "meshLevels", discretization, "ElementRegions/elementRegionsGroup", targetRegion, "elementSubRegions", cellBlock, "") - return prefix - - - def getField(self, fieldName, **kwargs): - """ - Get the requested field as numpy array - - Parameters - ----------- - fieldName : str - Name of the field in GEOSX - - Returns - ------- - field : np.array - Field requested - """ - prefix = self._getPrefixPath(**kwargs) - field = self.solver.get_wrapper(prefix+fieldName).value() - - return field.to_numpy() - - - def getElementCenter(self, filterGhost=False, **kwargs): - """ - Get element center position as numpy array - - Returns - ------- - elementCenter : array-like - Element center coordinates - """ - elementCenter = self.getField("elementCenter", **kwargs) - - if filterGhost: - elementCenter = self.filterGhostRank(elementCenter, **kwargs) - - return elementCenter - - - def getElementCenterZ(self, **kwargs): - """ - Get the z coordinate of the element center - - Returns - ------- - elementCenterZ : array-like - Element center z coordinates - """ - elementCenter = self.getField("elementCenter", **kwargs) - elementCenterZ = np.ascontiguousarray(elementCenter[:,2]) - - return elementCenterZ - - - def getGhostRank(self, **kwargs): - """ - Get the local ghost ranks - - Returns - ------- - ghostRank : array-like - Local ghost ranks - """ - ghostRank = self.getField("ghostRank", **kwargs) - - return ghostRank - - - def getLocalToGlobalMap(self, filterGhost=False, **kwargs): - """ - Get the local rank element id list - - Returns - ------- - Numpy Array : Array containing the element id list for the local rank - """ - localToGlobalMap = self.getField("localToGlobalMap", **kwargs) - - if filterGhost: - localToGlobalMap = self.filterGhostRank(localToGlobalMap, **kwargs) - - return localToGlobalMap - - - def gatherField(self, field, comm, root=0, **kwargs): - """ - Gather a full GEOS field from all local ranks - - Parameters - ----------- - field : numpy array - Local field - comm : MPI.COMM_WORLD - MPI communicator - root : int - MPI rank used for the gather \ - Default is rank 0 - """ - assert isinstance(root, int) - assert root < comm.Get_size() - - rank = comm.Get_rank() - - ghostRank = self.getGhostRank(**kwargs) - localToGlobalMap = self.getLocalToGlobalMap(**kwargs) - - # Prepare buffer - nlocalElements = ghostRank.shape[0] - nmax = np.zeros( 1 ) - nmax[0] = np.max( localToGlobalMap ) # max global number of elements - - comm.Barrier() - comm.Allreduce( MPI.IN_PLACE, nmax, op=MPI.MAX ) - ntot = round( nmax[0] + 1 ) - - if rank != root: - fullField = None - nrcv = nlocalElements - comm.send(nrcv, dest=root, tag=1) - comm.Send(field, dest=root, tag=2) - comm.Send(ghostRank, dest=root, tag=3) - comm.Send(localToGlobalMap, dest=root, tag=4) - - else: - fullField = np.full( (ntot), fill_value=np.nan ) - jj = np.where( ghostRank < 0 )[0] - fullField[localToGlobalMap[jj]] = field[jj] - - for r in range( comm.Get_size() ): - if r != root: - nrcv = comm.recv(source=r, tag=1) - - fieldRcv = np.zeros(nrcv, dtype=np.float64) - ghostRankRcv = np.zeros(nrcv, dtype=np.int32) - localToGlobalMapRcv = np.zeros(nrcv, dtype=np.int64) - - comm.Recv(fieldRcv, source=r, tag=2) - comm.Recv(ghostRankRcv, source=r, tag=3) - comm.Recv(localToGlobalMapRcv, source=r, tag=4) - - jj = np.where ( ghostRankRcv < 0 )[0] - - fullField[ localToGlobalMapRcv[jj]] = fieldRcv[jj] - comm.Barrier() - return fullField, ntot - - - def bcastField( self, fullField, comm, root=0, **kwargs ): - """ - Broadcast a field to local ranks with GEOS local to global map - - Parameters - ----------- - fullField : numpy array - Full field - comm : MPI.COMM_WORLD - MPI communicator - root : int - MPI rank used for the gather \ - Default is rank 0 - - Returns - -------- - field : numpy array - Local field - """ - rank = comm.Get_rank() - size = comm.Get_size() - - ghostRank = self.getGhostRank( **kwargs ) - localToGlobalMap = self.getLocalToGlobalMap( **kwargs ) - nlocalElements = ghostRank.shape[0] - - field = np.zeros( nlocalElements ) - - if rank == root: - jj = np.where(ghostRank < 0)[0] - field[jj] = fullField[localToGlobalMap[jj]] - - for r in range( size ): - if r != root: - nrcv = comm.recv( source=r, tag=1 ) - fieldRcv = np.zeros( nrcv, dtype=np.float64 ) - ghostRankRcv = np.zeros( nrcv, dtype=np.int32 ) - localToGlobalMapRcv = np.zeros( nrcv, dtype=np.int64 ) - - comm.Recv( ghostRankRcv, r, 3 ) - comm.Recv( localToGlobalMapRcv, r, 4 ) - - jj = np.where(ghostRankRcv < 0)[0] - fieldRcv[jj] = fullField[localToGlobalMapRcv[jj]] - - comm.Send( fieldRcv, dest=r, tag=100+r ) - - else: - nrcv = nlocalElements - comm.send( nrcv, root, 1 ) - comm.Send( ghostRank, root, 3 ) - comm.Send( localToGlobalMap, root, 4 ) - - comm.Recv( field, source=root, tag=100+rank ) - - return field - - - def filterGhostRank(self, field, **kwargs): - """ - Filter the ghost rank from a GEOS field - - Parameters - ----------- - field : numpy array - Field to filter - - Returns - ------- - field : numpy array - Filtered field - """ - ghostRank = self.getGhostRank(**kwargs) - ind = np.where(ghostRank<0)[0] - - return field[ind] - - - def getWrapper(self, path): - """ - Get the GEOS wrapper - - Parameters - ----------- - path : str - GEOS path - - Returns - -------- - Requested wrapper - """ - if hasattr(self, "solver"): - wrapper = self.solver.get_wrapper(path) - - return wrapper - - - def getGroup(self, path): - """ - Get the GEOS group - - Parameters - ----------- - path : str - GEOS path - - Returns - -------- - Group of the path requested - """ - if hasattr(self, "solver"): - group = self.solver.get_group(path) - - return group diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py deleted file mode 100644 index 34b1d85..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/WaveSolver.py +++ /dev/null @@ -1,520 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -from scipy.fftpack import fftfreq, ifft, fft -import numpy as np -import pygeosx - -from .Solver import Solver - -class WaveSolver(Solver): - """ - WaveSolver Object containing methods useful for simulation using wave solvers in GEOS - - Attributes - ----------- - dt : float - Time step for simulation - minTime : float - Min time to consider - maxTime : float - End time to consider - dtSeismo : float - Time step to save pressure for seismic trace - minTimeSim : float - Starting time of simulation - maxTimeSim : float - End Time of simulation - dtWaveField : float - Time step to save fields - sourceType : str - Type of source - sourceFreq : float - Frequency of the source - name : str - Solver name - type : str - Type of solver - Default is None - geosxArgs : GeosxArgs - Object containing GEOSX launching options - geosx : pygeosx instance - - alreadyInitialized : bool - Flag indicating if the initial conditions have been applied yet - firstGeosxInit : bool - Flag for initialize or reinitialize - collectionTargets : list - Output targets for geosx - hdf5Targets : list - HDF5 output targets for geosx - vtkTargets : list - VTK output targets for geosx - """ - def __init__(self, - dt=None, - minTime=0, - maxTime=None, - dtSeismo=None, - dtWaveField=None, - sourceType=None, - sourceFreq=None, - **kwargs): - """ - Parameters - ---------- - dt : float - Time step for simulation - minTime : float - Starting time of simulation - Default is 0 - maxTime : float - End Time of simulation - dtSeismo : float - Time step to save pressure for seismic trace - dtWaveField : float - Time step to save fields - sourceType : str - Type of source - Default is None - sourceFreq : float - Frequency of the source - Default is None - kwargs : keyword args - geosx_argv : list - GEOSX arguments or command line as a splitted line - """ - super().__init__(**kwargs) - - self.name = None - - self.dt = dt - self.minTime = minTime - self.maxTime = maxTime - - self.minTimeSim = minTime - self.maxTimeSim = maxTime - - self.dtSeismo = dtSeismo - - self.sourceType = sourceType - self.sourceFreq = sourceFreq - - self.dtWaveField = dtWaveField - - - def __repr__(self): - string_list = [] - string_list.append("Solver type : " + type(self).__name__ + "\n") - string_list.append("dt : " + str(self.dt) + "\n") - string_list.append("maxTime : " + str(self.maxTime) + "\n") - string_list.append("dtSeismo : " + str(self.dtSeismo) + "\n") - string_list.append("Outputs : " + str(self.hdf5Targets) + "\n" + str(self.vtkTargets) + "\n") - rep = "" - for string in string_list: - rep += string - - return rep - - - def _setTimeVariables(self): - """Initialize the time variables""" - if hasattr(self.xml, "events"): - events = self.xml.events - if self.dt is None: - for event in events["PeriodicEvent"]: - if isinstance(event, dict): - if event["target"] == "/Solvers/" + self.name: - self.dt = float(event['forceDt']) - else: - if event == "target" and events["PeriodicEvent"]["target"] == "/Solvers/" + self.name: - self.dt = float(events["PeriodicEvent"]["forceDt"]) - else: - self.updateTimeVariable("dt") - - if self.maxTime is None: - self.maxTime = float(events["maxTime"]) - self.maxTimeSim = self.maxTime - else: - self.updateTimeVariable("maxTime") - - if self.dtSeismo is None: - if self.type is None: - self._getType() - - solverdict = self.xml.solvers[self.type] - for k, v in solverdict.items(): - if k == "dtSeismoTrace": - self.dtSeismo = float(v) - else: - self.updateTimeVariable("dtSeismo") - - assert None not in (self.dt, self.dtSeismo, self.maxTime) - - - def _setSourceProperties(self): - """Set the frequency and type of source""" - if self.sourceFreq is None: - if self.type is None: - self._getType() - solverdict = self.xml.solvers[self.type] - for k, v in solverdict.items(): - if k == "timeSourceFrequency": - self.sourceFreq = v - - if self.sourceType is None: - if hasattr(self.xml, "events"): - events = self.xml.events - try: - for event in events["PeriodicEvent"]: - if isinstance(event, dict): - if event["target"] == "/Solvers/" + self.name: - self.sourceType = "ricker" + event['rickerOrder'] - else: - if event == "target" and events["PeriodicEvent"]["target"] == "/Solvers/" + self.name: - self.sourceType = "ricker" + events["PeriodicEvent"]["rickerOrder"] - except: - self.sourceType = "ricker2" - - - def initialize(self, rank=0, xml=None): - """ - Initialization or reinitialization of GEOSX - - Parameters - ---------- - rank : int - Process rank - xml : XML - XML object containing parameters for GEOSX initialization. - Only required if not set in the __init__ OR if different from it - """ - super().initialize(rank, xml) - self._setSourceProperties() - - - def updateTimeStep(self, dt): - """ - Update the solver time step - - Parameters - ---------- - dt : double - Time step - """ - self.dt = dt - - - def updateTimeVariable(self, timeVariable): - """ - Overwrite a GEOS time variable - - Parameters - ---------- - timeVariable : str - Name of the time variable to update - """ - if timeVariable == "maxTime": - assert hasattr(self, "maxTime") - self.geosx.get_wrapper("Events/maxTime").value()[0] = self.maxTime - - elif timeVariable == "minTime": - assert hasattr(self, "minTime") - self.geosx.get_wrapper("Events/minTime").value()[0] = self.minTime - - elif timeVariable == "dt": - assert hasattr(self, "dt") - self.geosx.get_wrapper("Events/solverApplications/forceDt").value()[0] = self.dt - - elif timeVariable == "dtSeismo": - assert hasattr(self, "dtSeismo") - self.geosx.get_wrapper("/Solvers/"+self.name+"/dtSeismoTrace").value()[0] = self.dtSeismo - - - def updateSourceFrequency(self, freq): - """ - Overwrite GEOSX source frequency - - Parameters - ---------- - freq : float - Frequency of the source in Hz - """ - self.geosx.get_wrapper("/Solvers/"+self.name+"/timeSourceFrequency").value()[0] = freq - self.sourceFreq = freq - - - - def updateSourceAndReceivers(self, sourcesCoords=[], receiversCoords=[]): - """ - Update sources and receivers positions in GEOS - - Parameters - ---------- - sourcesCoords : list - List of coordinates for the sources - receiversCoords : list - List of coordinates for the receivers - """ - src_pos_geosx = self.solver.get_wrapper( "sourceCoordinates" ).value() - src_pos_geosx.set_access_level( pygeosx.pylvarray.RESIZEABLE ) - - rcv_pos_geosx = self.solver.get_wrapper( "receiverCoordinates" ).value() - rcv_pos_geosx.set_access_level( pygeosx.pylvarray.RESIZEABLE ) - - src_pos_geosx.resize_all(( len( sourcesCoords ), 3 )) - if len( sourcesCoords ) == 0: - src_pos_geosx.to_numpy()[:] = np.zeros(( 0, 3 )) - else: - src_pos_geosx.to_numpy()[:] = sourcesCoords[:] - - - rcv_pos_geosx.resize_all(( len( receiversCoords ), 3 )) - if len( receiversCoords ) == 0: - rcv_pos_geosx.to_numpy()[:] = np.zeros(( 0, 3 )) - else: - rcv_pos_geosx.to_numpy()[:] = receiversCoords[:] - - self.solver.reinit() - - - def evaluateSource(self): - """ - Evaluate source and update on GEOS - Only ricker order {0 - 4} accepted - """ - sourceTypes = ("ricker0", "ricker1", "ricker2", "ricker3", "ricker4") - assert self.sourceType in sourceTypes, f"Only {sourceTypes} are allowed" - - f0 = self.sourceFreq - delay = 1.0 / f0 - alpha = - ( f0 * np.pi )**2 - - nsamples = int( round( ( self.maxTime - self.minTime ) / self.dt )) + 1 - sourceValue = np.zeros(( nsamples, 1 )) - - order = int( self.sourceType[-1] ) - sgn = ( -1 )**( order + 1 ) - - time = self.minTime - for nt in range(nsamples): - - if self.minTime <= - 1.0 / f0: - tmin = - 2.9 / f0 - tmax = 2.9 / f0 - time_d = time - - else: - time_d = time - delay - tmin = 0.0 - tmax = 2.9 / f0 - - if (time > tmin and time < tmax) or ( self.minTime < - 1 / f0 and time == tmin ): - gaussian = np.exp( alpha * time_d**2) - - if order == 0: - sourceValue[nt, 0] = sgn * gaussian - - elif order == 1: - sourceValue[nt, 0] = sgn * ( 2 * alpha * time_d ) * gaussian - - elif order == 2: - sourceValue[nt, 0] = sgn * ( 2 * alpha + 4 * alpha**2 * time_d**2 ) * gaussian - - elif order == 3: - sourceValue[nt, 0] = sgn * ( 12 * alpha**2 * time_d + 8 * alpha**3 * time_d**3 ) * gaussian - - elif order == 4: - sourceValue[nt, 0] = sgn * ( 12 * alpha**2 + 48 * alpha**3 * time_d**2 + 16 * alpha**4 * time_d**4 ) * gaussian - - time += self.dt - - self.updateSourceFrequency(self.sourceFreq) - self.updateSourceValue(sourceValue) - self.sourceValue = sourceValue - - - def updateSourceValue(self, value): - """ - Update the value of the source in GEOS - - Parameters - ---------- - value : array/list - List/array containing the value of the source at each time step - """ - src_value = self.solver.get_wrapper("sourceValue").value() - src_value.set_access_level(pygeosx.pylvarray.RESIZEABLE) - - src_value.resize_all(value.shape) - src_value.to_numpy()[:] = value[:] - - self.maxTimeSim = ( value.shape[0] - 1 ) * self.dt - self.geosx.get_wrapper("Events/minTime").value()[0] = self.minTime - self.sourceValue = value[:] - - - def filterSource(self, fmax): - """ - Filter the source value and give the value to GEOSX. Note that is can also modify the start and end time of simulation to avoid discontinuity. - - Parameters - ----------- - fmax : float/string - Max frequency of the source wanted. The source then have frequencies in the interval [0,fmax+1] - """ - if str(fmax) == "all": - return - - minTime = self.minTime - maxTime = self.maxTime - dt = self.dt - f0 = self.sourceFreq - - sourceValue = self.sourceValue - - pad = int(round(sourceValue.shape[0]/2)) - n = sourceValue.shape[0] + 2 * pad - - tf = fftfreq(n, dt) - y_fft = np.zeros((n,sourceValue.shape[1]), dtype="complex_") - y = np.zeros(y_fft.shape, dtype="complex_") - - for i in range(y_fft.shape[1]): - y_fft[pad:n-pad,i] = sourceValue[:,i] - y_fft[:,i] = fft(y_fft[:,i])# Perform fourier transform - - isup = np.where(tf>=fmax)[0] - imax = np.where(tf[isup]>=fmax+1)[0][0] - i1 = isup[0] - i2 = isup[imax] - - iinf = np.where(tf<=-fmax)[0] - imin = np.where(tf[iinf]<=-fmax-1)[0][-1] - - i3 = iinf[imin] - i4 = iinf[-1] - - for i in range(y_fft.shape[1]): - y_fft[i1:i2,i] = np.cos((isup[0:imax] - i1)/(i2-i1) * np.pi/2)**2 * y_fft[i1:i2,i] - y_fft[i3:i4,i] = np.cos((iinf[imin:-1] - i4)/(i3-i4) * np.pi/2)**2 * y_fft[i3:i4,i] - y_fft[i2:i3,i] = 0 - - for i in range(y.shape[1]): - y[:,i] = ifft(y_fft[:,i])# Perform inverse fourier transform - - it0 = int(round(abs(minTime/dt))) + pad - d = int(round(1/f0/dt)) - - i1 = max(it0 - 4*d, 0) - i2 = int(round(i1 + d/4)) - - i4 = min(n,n - pad + 4*d) - i3 = int(round(i4 - d/4)) - - for i in range(y.shape[1]): - y[i1:i2,i] = np.cos((np.arange(i1,i2) - i2)/(i2-i1) * np.pi/2)**2 * y[i1:i2,i] - y[i3:i4,i] = np.cos((np.arange(i3,i4) - i3)/(i4-i3) * np.pi/2)**2 * y[i3:i4,i] - y[max(i1-d,0):i1,i] = 0.0 - y[i4:min(i4+d,n),i] = 0.0 - - - t = np.arange(minTime-pad*dt, maxTime+pad*dt+dt/2, dt) - - self.updateSourceValue(np.real(y[max(i1-d,0):min(i4+d,n),:])) - self.minTimeSim = t[max(i1-d,0)] - self.maxTimeSim = t[min(i4+d,n-1)] - self.geosx.get_wrapper("Events/minTime").value()[0] = self.minTimeSim - self.sourceValue = np.real(y[max(i1-d,0):min(i4+d,n),:]) - - - def updateVelocityModel(self, vel, velocityName, **kwargs): - """ - Update velocity value in GEOS - - Parameters - ---------- - vel : float/array - Value(s) for velocity field - velocityName : str - Name of the velocity array in GEOS - """ - prefix = self._getPrefixPath(**kwargs) - - velocity = self.solver.get_wrapper(prefix+velocityName).value() - velocity.set_access_level(pygeosx.pylvarray.MODIFIABLE) - - velocity.to_numpy()[vel>0] = vel[vel>0] - - - def updateDensityModel( self, density, densityName, **kwargs ): - """ - Update density values in GEOS - - Parameters - ----------- - density : array - New values for the density - densityName : str - Name of density array in GEOS - """ - prefix = self._getPrefixPath( **kwargs ) - - d = self.solver.get_wrapper( prefix + densityName ).value() - d.set_access_level( pygeosx.pylvarray.MODIFIABLE ) - - d.to_numpy()[density>0] = density[density>0] - - - def outputWaveField(self, time): - """ - Trigger the wavefield output - - Parameters - ---------- - time : float - Current time of simulation - """ - self.collections[0].collect(time, self.dt) - self.hdf5Outputs[0].output(time, self.dt) - - - def getVelocityModel(self, velocityName, filterGhost=False, **kwargs): - """ - Get the velocity values - - Parameters - ----------- - velocityName : str - Name of velocity array in GEOS - filterGhost : bool - Filter the ghost ranks - - Returns - ------- - Numpy Array : Array containing the velocity values - """ - velocity = self.getField(velocityName, **kwargs) - - if filterGhost: - velocity = self.filterGhostRank(velocity, **kwargs) - - return velocity - - - def getWaveField(self): - pass - - def getWaveFieldAtReceivers(self, comm): - pass diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py deleted file mode 100644 index be13e6d..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -"""Solvers classes""" - -from .Solver import Solver -from .AcousticSolver import AcousticSolver -from .ReservoirSolver import ReservoirSolver -from .ElasticSolver import ElasticSolver -from .WaveSolver import WaveSolver -from .GeomechanicsSolver import GeomechanicsSolver - -from .utils.solverutils import ( - print_group, - print_with_indent, - printGeosx, - printSolver, - printGroup, -) diff --git a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py b/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py deleted file mode 100644 index 5c0b518..0000000 --- a/pygeos-tools/src/geos/pygeos_tools/utilities/solvers/utils/solverutils.py +++ /dev/null @@ -1,43 +0,0 @@ -# ------------------------------------------------------------------------------------------------------------ -# SPDX-License-Identifier: LGPL-2.1-only -# -# Copyright (c) 2016-2024 Lawrence Livermore National Security LLC -# Copyright (c) 2018-2024 TotalEnergies -# Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University -# Copyright (c) 2023-2024 Chevron -# Copyright (c) 2019- GEOS/GEOSX Contributors -# Copyright (c) 2019- INRIA project-team Makutu -# All rights reserved -# -# See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. -# ------------------------------------------------------------------------------------------------------------ - -def print_group(group, indent=0): - print("{}{}".format(" " * indent, group)) - - indent += 4 - print("{}wrappers:".format(" " * indent)) - - for wrapper in group.wrappers(): - print("{}{}".format(" " * (indent + 4), wrapper)) - print_with_indent(str(wrapper.value(False)), indent + 8) - - print("{}groups:".format(" " * indent)) - - for subgroup in group.groups(): - print_group(subgroup, indent + 4) - - -def print_with_indent(msg, indent): - indent_str = " " * indent - print(indent_str + msg.replace("\n", "\n" + indent_str)) - - -def printGeosx(solver): - print_group(solver.geosx) - -def printSolver(solver): - print_group(solver.solver) - -def printGroup(solver, path): - print_group(solver.solver.get_group(path)) \ No newline at end of file From 4f70b93d323c775da87ece5ab7ee1e806b6f1cf1 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 11 Dec 2024 17:51:15 -0800 Subject: [PATCH 09/10] Fix mapping method to perform copy of fields from input mesh to splitted mesh and fracture meshes --- .../mesh/doctor/checks/generate_fractures.py | 263 +++++++++--------- 1 file changed, 124 insertions(+), 139 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py b/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py index 9928d12..a9a5d7e 100644 --- a/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py +++ b/geos-mesh/src/geos/mesh/doctor/checks/generate_fractures.py @@ -1,9 +1,9 @@ import logging import networkx -import numpy from collections import defaultdict from dataclasses import dataclass from enum import Enum +from numpy import empty, ones, zeros from tqdm import tqdm from typing import Collection, Iterable, Mapping, Optional, Sequence from vtk import vtkDataArray @@ -48,6 +48,7 @@ class Result: class FractureInfo: node_to_cells: Mapping[ int, Iterable[ int ] ] # For each _fracture_ node, gives all the cells that use this node. face_nodes: Iterable[ Collection[ int ] ] # For each fracture face, returns the nodes of this face + face_cell_id: Iterable[ int ] # For each fracture face, returns the corresponding id of the cell in the mesh def build_node_to_cells( mesh: vtkUnstructuredGrid, @@ -100,14 +101,16 @@ def __build_fracture_info_from_fields( mesh: vtkUnstructuredGrid, f: Sequence[ i face_nodes_hashes.add( fnh ) face_nodes.append( fn ) node_to_cells: dict[ int, Iterable[ int ] ] = build_node_to_cells( mesh, face_nodes ) + face_cell_id: list = list() # no cell of the mesh corresponds to that face when fracture policy is 'field' - return FractureInfo( node_to_cells=node_to_cells, face_nodes=face_nodes ) + return FractureInfo( node_to_cells=node_to_cells, face_nodes=face_nodes, face_cell_id=face_cell_id ) def __build_fracture_info_from_internal_surfaces( mesh: vtkUnstructuredGrid, f: Sequence[ int ], field_values: frozenset[ int ] ) -> FractureInfo: node_to_cells: dict[ int, list[ int ] ] = defaultdict( list ) face_nodes: list[ Collection[ int ] ] = list() + face_cell_id: list[ int ] = list() for cell_id in tqdm( range( mesh.GetNumberOfCells() ), desc="Computing the face to nodes mapping" ): cell = mesh.GetCell( cell_id ) if cell.GetCellDimension() == 2: @@ -118,6 +121,7 @@ def __build_fracture_info_from_internal_surfaces( mesh: vtkUnstructuredGrid, f: node_to_cells[ point_id ] = list() nodes.append( point_id ) face_nodes.append( tuple( nodes ) ) + face_cell_id.append( cell_id ) for cell_id in tqdm( range( mesh.GetNumberOfCells() ), desc="Computing the node to cells mapping" ): cell = mesh.GetCell( cell_id ) @@ -126,7 +130,7 @@ def __build_fracture_info_from_internal_surfaces( mesh: vtkUnstructuredGrid, f: if cell.GetPointId( v ) in node_to_cells: node_to_cells[ cell.GetPointId( v ) ].append( cell_id ) - return FractureInfo( node_to_cells=node_to_cells, face_nodes=face_nodes ) + return FractureInfo( node_to_cells=node_to_cells, face_nodes=face_nodes, face_cell_id=face_cell_id ) def build_fracture_info( mesh: vtkUnstructuredGrid, @@ -235,150 +239,110 @@ def __call__( self, index: int ) -> int: return result -def truncated_coordinates_with_id( mesh: vtkUnstructuredGrid, decimals: int = 3 ) -> dict[ Coordinates3D, int ]: - """Creates a mapping of truncated points coordinates with the their point id for every point of a mesh. - - Args: - mesh (vtkUnstructuredGrid): An unstructured mesh. - decimals (int, optional): Number of decimals to keep for truncation. Defaults to 3. - - Returns: - dict[ Coordinates3D, int ]: { coords0: pt_id0, ..., coordsN: pt_idN } - """ - points: vtkPoints = mesh.GetPoints() - coords_with_id: dict[ Coordinates3D, int ] = dict() - for point_id in range( points.GetNumberOfPoints() ): - pt_coords = points.GetPoint( point_id ) - truncated_pt_coords = tuple( [ round( coord, decimals ) for coord in pt_coords ] ) - coords_with_id[ truncated_pt_coords ] = point_id - return coords_with_id - - -def link_new_cells_with_old_cells_id( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid ) -> IDMapping: - """After mapping each truncated point coordinates to a list of its points ids for the old and new mesh, - it is possible to determine the link between old and new cell id by coordinates position. - - Args: - old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. - new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. - - Returns: - IDMapping: { new_cell_id: old_cell_id, ... } +def __copy_fields_splitted_mesh( old_mesh: vtkUnstructuredGrid, splitted_mesh: vtkUnstructuredGrid, + added_points_with_old_id: list[ tuple[ int ] ] ) -> None: """ - truncated_coords_with_id_old: dict[ Coordinates3D, int ] = truncated_coordinates_with_id( old_mesh ) - truncated_coords_with_id_new: dict[ Coordinates3D, int ] = truncated_coordinates_with_id( new_mesh ) - # Every new_mesh by convention in this workflow is a mesh extracted from the old_mesh. - # So the number of elements is lower or equal in new mesh than in old mesh so we'd rather iterate over it - new_pts_to_old_pts_id: IDMapping = dict() - for coords, new_pt_id in truncated_coords_with_id_new.items(): - old_pt_id: int = truncated_coords_with_id_old[ coords ] # We can do that because new_mesh is from old_mesh - new_pts_to_old_pts_id[ new_pt_id ] = old_pt_id - # Now we have a link between point ids from the new_mesh to the old_mesh - # So we can now link the cells of new_mesh to the ones of old_mesh - new_cells_with_old_cells_id: IDMapping = dict() - for new_cell_id in range( new_mesh.GetNumberOfCells() ): - new_cell_pt_ids: tuple[ int ] = tuple( vtk_iter( new_mesh.GetCell( new_cell_id ).GetPointIds() ) ) - old_cell_pt_ids: tuple[ int ] = tuple( [ new_pts_to_old_pts_id[ new_pt_id ] for new_pt_id in new_cell_pt_ids ] ) - for old_cell_id in range( old_mesh.GetNumberOfCells() ): - pt_ids: tuple[ int ] = tuple( vtk_iter( old_mesh.GetCell( old_cell_id ).GetPointIds() ) ) - if pt_ids == old_cell_pt_ids: # the old cell was identified with the new cell - new_cells_with_old_cells_id[ new_cell_id ] = old_cell_id - break - return new_cells_with_old_cells_id - - -def __copy_cell_data( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid, is_fracture_mesh: bool = False ): - """Copy the cell data arrays from the old mesh to the new mesh. - If the new mesh is a fracture_mesh, the fracture_mesh has less cells than the old mesh. - Therefore, copying the entire old cell arrays to the new one will assignate invalid - indexing of the values of these arrays. - So here, we consider the new indexing of cells to add an array with the valid size of elements - and correct indexes. - - Args: - old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. - new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. - is_fracture_mesh (bool, optional): True if dealing with a new_mesh being the fracture mesh. - Defaults to False. + Copies the fields from the old mesh to the new one. + Point data will be duplicated for collocated nodes. + :param old_mesh: The mesh before the split. + :param new_mesh: The mesh after the split. Will receive the fields in place. + :return: None """ - # Copying the cell data. - # The cells are the same, just their nodes support have changed. - if is_fracture_mesh: - new_to_old_cells: IDMapping = link_new_cells_with_old_cells_id( old_mesh, new_mesh ) + # Copying the cell data. The cells are the same, just their nodes support have changed. input_cell_data = old_mesh.GetCellData() for i in range( input_cell_data.GetNumberOfArrays() ): input_array: vtkDataArray = input_cell_data.GetArray( i ) logging.info( f"Copying cell field \"{input_array.GetName()}\"." ) - if is_fracture_mesh: - array_name: str = input_array.GetName() - old_values: numpy.array = vtk_to_numpy( input_array ) - useful_values: list = list() - for old_cell_id in new_to_old_cells.values(): - useful_values.append( old_values[ old_cell_id ] ) - useful_input_array = numpy_to_vtk( numpy.array( useful_values ) ) - useful_input_array.SetName( array_name ) - new_mesh.GetCellData().AddArray( useful_input_array ) - else: - new_mesh.GetCellData().AddArray( input_array ) - - -# TODO consider the fracture_mesh case ? -def __copy_field_data( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid ): - """Copy the field data arrays from the old mesh to the new mesh. - Does not consider the fracture_mesh case. + tmp = input_array.NewInstance() + tmp.DeepCopy( input_array ) + splitted_mesh.GetCellData().AddArray( input_array ) - Args: - old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. - new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. - """ # Copying field data. This data is a priori not related to geometry. input_field_data = old_mesh.GetFieldData() for i in range( input_field_data.GetNumberOfArrays() ): input_array = input_field_data.GetArray( i ) logging.info( f"Copying field data \"{input_array.GetName()}\"." ) - new_mesh.GetFieldData().AddArray( input_array ) - - -# TODO consider the fracture_mesh case ? -def __copy_point_data( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid ): - """Copy the point data arrays from the old mesh to the new mesh. - Does not consider the fracture_mesh case. + tmp = input_array.NewInstance() + tmp.DeepCopy( input_array ) + splitted_mesh.GetFieldData().AddArray( input_array ) - Args: - old_mesh (vtkUnstructuredGrid): An unstructured mesh before splitting. - new_mesh (vtkUnstructuredGrid): An unstructured mesh after splitting the old_mesh. - """ - # Copying the point data. + # Copying copy data. Need to take into account the new points. input_point_data = old_mesh.GetPointData() + new_number_points: int = splitted_mesh.GetNumberOfPoints() for i in range( input_point_data.GetNumberOfArrays() ): - input_array = input_point_data.GetArray( i ) - logging.info( f"Copying point field \"{input_array.GetName()}\"." ) - tmp = input_array.NewInstance() - tmp.DeepCopy( input_array ) - new_mesh.GetPointData().AddArray( tmp ) + old_points_array = vtk_to_numpy( input_point_data.GetArray( i ) ) + name: str = input_point_data.GetArrayName( i ) + logging.info( f"Copying point data \"{name}\"." ) + old_nrows: int = old_points_array.shape[ 0 ] + old_ncols: int = 1 if len( old_points_array.shape ) == 1 else old_points_array.shape[ 1 ] + # Reshape old_points_array if it is 1-dimensional + if len( old_points_array.shape ) == 1: + old_points_array = old_points_array.reshape( ( old_nrows, 1 ) ) + new_points_array = empty( ( new_number_points, old_ncols ) ) + new_points_array[ :old_nrows, : ] = old_points_array + for new_and_old_id in added_points_with_old_id: + new_points_array[ new_and_old_id[ 0 ], : ] = old_points_array[ new_and_old_id[ 1 ], : ] + # Reshape the VTK array to match the original dimensions + if old_ncols > 1: + vtk_array = numpy_to_vtk( new_points_array.flatten() ) + vtk_array.SetNumberOfComponents( old_ncols ) + vtk_array.SetNumberOfTuples( new_number_points ) + else: + vtk_array = numpy_to_vtk( new_points_array ) + vtk_array.SetName( name ) + splitted_mesh.GetPointData().AddArray( vtk_array ) -def __copy_fields( old_mesh: vtkUnstructuredGrid, new_mesh: vtkUnstructuredGrid, is_fracture_mesh: bool = False ): +def __copy_fields_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_mesh: vtkUnstructuredGrid, + face_cell_id: list[ int ], node_3d_to_node_2d: IDMapping ) -> None: """ - Copies the fields from the old mesh to the new one. - Point data will be duplicated for collocated nodes. + Copies the fields from the old mesh to the new fracture when using internal_surfaces policy. :param old_mesh: The mesh before the split. - :param new_mesh: The mesh after the split. Will receive the fields in place. - :param collocated_nodes: New index to old index. - :param is_fracture_mesh: True when copying fields for a fracture mesh, False instead. + :param fracture: The fracture mesh generated from the fracture_info. :return: None """ - # Mesh cannot contains global ids before splitting. - if has_invalid_field( old_mesh, [ "GLOBAL_IDS_POINTS", "GLOBAL_IDS_CELLS" ] ): - err_msg: str = ( - "The mesh cannot contain global ids for neither cells nor points. The correct procedure is to split" + - " the mesh and then generate global ids for new split meshes. Dying ..." ) - logging.error( err_msg ) - raise ValueError( err_msg ) + # No copy of field data will be done with the fracture mesh because may lose its relevance compared to the splitted. + # Copying the cell data. The interesting cells are the ones stored in face_cell_id. + new_number_cells: int = fracture_mesh.GetNumberOfCells() + input_cell_data = old_mesh.GetCellData() + for i in range( input_cell_data.GetNumberOfArrays() ): + old_cells_array = vtk_to_numpy( input_cell_data.GetArray( i ) ) + old_nrows: int = old_cells_array.shape[ 0 ] + if len( old_cells_array.shape ) == 1: + old_cells_array = old_cells_array.reshape( ( old_nrows, 1 ) ) + name: str = input_cell_data.GetArrayName( i ) + logging.info( f"Copying cell data \"{name}\"." ) + new_array = old_cells_array[ face_cell_id, : ] + # Reshape the VTK array to match the original dimensions + old_ncols: int = 1 if len( old_cells_array.shape ) == 1 else old_cells_array.shape[ 1 ] + if old_ncols > 1: + vtk_array = numpy_to_vtk( new_array.flatten() ) + vtk_array.SetNumberOfComponents( old_ncols ) + vtk_array.SetNumberOfTuples( new_number_cells ) + else: + vtk_array = numpy_to_vtk( new_array ) + vtk_array.SetName( name ) + fracture_mesh.GetCellData().AddArray( vtk_array ) - __copy_cell_data( old_mesh, new_mesh, is_fracture_mesh ) - __copy_field_data( old_mesh, new_mesh ) - __copy_point_data( old_mesh, new_mesh ) + new_number_points: int = fracture_mesh.GetNumberOfPoints() + input_point_data = old_mesh.GetPointData() + for i in range( input_point_data.GetNumberOfArrays() ): + old_points_array = vtk_to_numpy( input_point_data.GetArray( i ) ) + old_nrows = old_points_array.shape[ 0 ] + if len( old_points_array.shape ) == 1: + old_points_array = old_points_array.reshape( ( old_nrows, 1 ) ) + name = input_point_data.GetArrayName( i ) + logging.info( f"Copying point data \"{name}\"." ) + new_array = old_points_array[ list( node_3d_to_node_2d.keys() ), : ] + old_ncols = 1 if len( old_points_array.shape ) == 1 else old_points_array.shape[ 1 ] + if old_ncols > 1: + vtk_array = numpy_to_vtk( new_array.flatten() ) + vtk_array.SetNumberOfComponents( old_ncols ) + vtk_array.SetNumberOfTuples( new_number_points ) + else: + vtk_array = numpy_to_vtk( new_array ) + vtk_array.SetName( name ) + fracture_mesh.GetPointData().AddArray( vtk_array ) def __perform_split( old_mesh: vtkUnstructuredGrid, cell_to_node_mapping: Mapping[ int, @@ -390,17 +354,19 @@ def __perform_split( old_mesh: vtkUnstructuredGrid, cell_to_node_mapping: Mappin :return: The main 3d mesh split at the fracture location. """ added_points: set[ int ] = set() + added_points_with_old_id: list[ tuple[ int ] ] = list() for node_mapping in cell_to_node_mapping.values(): for i, o in node_mapping.items(): if i != o: added_points.add( o ) + added_points_with_old_id.append( ( o, i ) ) num_new_points: int = old_mesh.GetNumberOfPoints() + len( added_points ) # Creating the new points for the new mesh. old_points: vtkPoints = old_mesh.GetPoints() new_points = vtkPoints() new_points.SetNumberOfPoints( num_new_points ) - collocated_nodes = numpy.ones( num_new_points, dtype=int ) * -1 + collocated_nodes = ones( num_new_points, dtype=int ) * -1 # Copying old points into the new container. for p in range( old_points.GetNumberOfPoints() ): new_points.SetPoint( p, old_points.GetPoint( p ) ) @@ -451,7 +417,7 @@ def __perform_split( old_mesh: vtkUnstructuredGrid, cell_to_node_mapping: Mappin cell_point_ids.SetId( i, new_point_id ) new_mesh.InsertNextCell( cell_type, cell_point_ids ) - __copy_fields( old_mesh, new_mesh ) + __copy_fields_splitted_mesh( old_mesh, new_mesh, added_points_with_old_id ) return new_mesh @@ -468,22 +434,30 @@ def __generate_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_info: Frac logging.info( "Generating the meshes" ) mesh_points: vtkPoints = old_mesh.GetPoints() - is_node_duplicated = numpy.zeros( mesh_points.GetNumberOfPoints(), dtype=bool ) # defaults to False + is_node_duplicated = zeros( mesh_points.GetNumberOfPoints(), dtype=bool ) # defaults to False for node_mapping in cell_to_node_mapping.values(): for i, o in node_mapping.items(): if not is_node_duplicated[ i ]: is_node_duplicated[ i ] = i != o # Some elements can have all their nodes not duplicated. - # In this case, it's mandatory not get rid of this element - # because the neighboring 3d elements won't follow. + # In this case, it's mandatory not get rid of this element because the neighboring 3d elements won't follow. face_nodes: list[ Collection[ int ] ] = list() discarded_face_nodes: set[ Iterable[ int ] ] = set() - for ns in fracture_info.face_nodes: - if any( map( is_node_duplicated.__getitem__, ns ) ): - face_nodes.append( ns ) - else: - discarded_face_nodes.add( ns ) + if fracture_info.face_cell_id != list(): # The fracture policy is 'internal_surfaces' + face_cell_id: list[ int ] = list() + for ns, f_id in zip( fracture_info.face_nodes, fracture_info.face_cell_id ): + if any( map( is_node_duplicated.__getitem__, ns ) ): + face_nodes.append( ns ) + face_cell_id.append( f_id ) + else: + discarded_face_nodes.add( ns ) + else: # The fracture policy is 'field' + for ns in fracture_info.face_nodes: + if any( map( is_node_duplicated.__getitem__, ns ) ): + face_nodes.append( ns ) + else: + discarded_face_nodes.add( ns ) if discarded_face_nodes: # tmp = list() @@ -497,7 +471,7 @@ def __generate_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_info: Frac logging.info( f"The faces made of nodes [{msg}] were/was discarded" + "from the fracture mesh because none of their/its nodes were duplicated." ) - fracture_nodes_tmp = numpy.ones( mesh_points.GetNumberOfPoints(), dtype=int ) * -1 + fracture_nodes_tmp = ones( mesh_points.GetNumberOfPoints(), dtype=int ) * -1 for ns in face_nodes: for n in ns: fracture_nodes_tmp[ n ] = n @@ -505,12 +479,14 @@ def __generate_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_info: Frac num_points: int = len( fracture_nodes ) points = vtkPoints() points.SetNumberOfPoints( num_points ) - node_3d_to_node_2d: IDMapping = {} # Building the node mapping, from 3d mesh nodes to 2d fracture nodes. + node_3d_to_node_2d: IDMapping = dict() # Building the node mapping, from 3d mesh nodes to 2d fracture nodes. for i, n in enumerate( fracture_nodes ): coords: Coordinates3D = mesh_points.GetPoint( n ) points.SetPoint( i, coords ) node_3d_to_node_2d[ n ] = i + # The polygons are constructed in the same order as the faces defined in the fracture_info. Therefore, + # fracture_info.face_cell_id can be used to link old cells to fracture cells for copy with internal_surfaces. polygons = vtkCellArray() for ns in face_nodes: polygon = vtkPolygon() @@ -528,7 +504,7 @@ def __generate_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_info: Frac assert set( buckets.keys() ) == set( range( num_points ) ) max_collocated_nodes: int = max( map( len, buckets.values() ) ) if buckets.values() else 0 - collocated_nodes = numpy.ones( ( num_points, max_collocated_nodes ), dtype=int ) * -1 + collocated_nodes = ones( ( num_points, max_collocated_nodes ), dtype=int ) * -1 for i, bucket in buckets.items(): for j, val in enumerate( bucket ): collocated_nodes[ i, j ] = val @@ -541,7 +517,10 @@ def __generate_fracture_mesh( old_mesh: vtkUnstructuredGrid, fracture_info: Frac fracture_mesh.SetCells( [ VTK_POLYGON ] * polygons.GetNumberOfCells(), polygons ) fracture_mesh.GetPointData().AddArray( array ) - __copy_fields( old_mesh, fracture_mesh, True ) + # The copy of fields from the old mesh to the fracture is only available when using the internal_surfaces policy + # because the FractureInfo is linked to 2D elements from the old_mesh + if fracture_info.face_cell_id != list(): + __copy_fields_fracture_mesh( old_mesh, fracture_mesh, face_cell_id, node_3d_to_node_2d ) return fracture_mesh @@ -577,6 +556,12 @@ def __check( mesh, options: Options ) -> Result: def check( vtk_input_file: str, options: Options ) -> Result: try: mesh = read_mesh( vtk_input_file ) + # Mesh cannot contain global ids before splitting. + if has_invalid_field( mesh, [ "GLOBAL_IDS_POINTS", "GLOBAL_IDS_CELLS" ] ): + err_msg: str = ( "The mesh cannot contain global ids for neither cells nor points. The correct procedure " + + " is to split the mesh and then generate global ids for new split meshes." ) + logging.error( err_msg ) + raise ValueError( err_msg ) return __check( mesh, options ) except BaseException as e: logging.error( e ) From 1e919018c811407bb25c742e0a0132a7e9114144 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Wed, 11 Dec 2024 17:52:05 -0800 Subject: [PATCH 10/10] Fix to allow specification of the data mode of the fracture meshes --- .../doctor/parsing/generate_fractures_parsing.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py b/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py index 619a4a8..0a549aa 100644 --- a/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py +++ b/geos-mesh/src/geos/mesh/doctor/parsing/generate_fractures_parsing.py @@ -14,6 +14,8 @@ __FRACTURES_OUTPUT_DIR = "fractures_output_dir" __FRACTURES_DATA_MODE = "fractures_data_mode" +__FRACTURES_DATA_MODE_VALUES = "binary", "ascii" +__FRACTURES_DATA_MODE_DEFAULT = __FRACTURES_DATA_MODE_VALUES[ 0 ] def convert_to_fracture_policy( s: str ) -> FracturePolicy: @@ -62,6 +64,8 @@ def fill_subparser( subparsers ) -> None: p.add_argument( '--' + __FRACTURES_DATA_MODE, type=str, + metavar=", ".join( __FRACTURES_DATA_MODE_VALUES ), + default=__FRACTURES_DATA_MODE_DEFAULT, help=f'[string]: For ".vtu" output format, the data mode can be binary or ascii. Defaults to binary.' ) @@ -84,7 +88,7 @@ def convert( parsed_options ) -> Options: ] fracture_names: list[ str ] = [ "fracture_" + frac.replace( ",", "_" ) + ".vtu" for frac in per_fracture ] fractures_output_dir: str = parsed_options[ __FRACTURES_OUTPUT_DIR ] - fractures_data_mode: str = parsed_options[ __FRACTURES_DATA_MODE ] + fractures_data_mode: str = parsed_options[ __FRACTURES_DATA_MODE ] == __FRACTURES_DATA_MODE_DEFAULT all_fractures_VtkOutput: list[ VtkOutput ] = build_all_fractures_VtkOutput( fractures_output_dir, fractures_data_mode, mesh_vtk_output, fracture_names ) @@ -110,13 +114,13 @@ def are_values_parsable( values: str ) -> bool: return True -def build_all_fractures_VtkOutput( fracture_output_dir: str, fractures_data_mode: str, mesh_vtk_output: VtkOutput, +def build_all_fractures_VtkOutput( fracture_output_dir: str, fractures_data_mode: bool, mesh_vtk_output: VtkOutput, fracture_names: list[ str ] ) -> list[ VtkOutput ]: if not os.path.exists( fracture_output_dir ): - raise ValueError( f"The --{__FRACTURES_OUTPUT_DIR} given directory does not exist." ) + raise ValueError( f"The --{__FRACTURES_OUTPUT_DIR} given directory '{fracture_output_dir}' does not exist." ) if not os.access( fracture_output_dir, os.W_OK ): - raise ValueError( f"The --{__FRACTURES_OUTPUT_DIR} given directory is not writable." ) + raise ValueError( f"The --{__FRACTURES_OUTPUT_DIR} given directory '{fracture_output_dir}' is not writable." ) output_name = os.path.basename( mesh_vtk_output.output ) splitted_name_without_expension: list[ str ] = output_name.split( "." )[ :-1 ]