diff --git a/backend_py/primary/primary/routers/flow_network/router.py b/backend_py/primary/primary/routers/flow_network/router.py index e6fe2be19..c5ea30265 100644 --- a/backend_py/primary/primary/routers/flow_network/router.py +++ b/backend_py/primary/primary/routers/flow_network/router.py @@ -45,7 +45,7 @@ async def get_realization_flow_network( summary_access=summary_access, realization=realization, summary_frequency=summary_frequency, - node_types=unique_node_types, + selected_node_types=unique_node_types, flow_network_mode=NetworkModeOptions.SINGLE_REAL, ) @@ -57,7 +57,7 @@ async def get_realization_flow_network( dated_networks, edge_metadata, node_metadata, - ) = await network_assembler.create_dated_networks_and_metadata_lists() + ) = network_assembler.create_dated_networks_and_metadata_lists() create_data_time_ms = timer.lap_ms() LOGGER.info( diff --git a/backend_py/primary/primary/services/flow_network_assembler/_assembler_performance_times.py b/backend_py/primary/primary/services/flow_network_assembler/_assembler_performance_times.py new file mode 100644 index 000000000..68e0f5bbc --- /dev/null +++ b/backend_py/primary/primary/services/flow_network_assembler/_assembler_performance_times.py @@ -0,0 +1,42 @@ +import logging +from dataclasses import dataclass + +LOGGER = logging.getLogger(__name__) + + +@dataclass +# Dataclass needs to save a bunch of timestamps. Many attributes is okay here, as splitting it would be more cumbersome +# pylint: disable-next=too-many-instance-attributes +class PerformanceTimes: + """Simple utility class to store performance timer results for different internal method calls""" + + init_sumo_data: int = 0 + init_summary_vector_list: int = 0 + fetch_grouptree_df: int = 0 + init_grouptree_df_model: int = 0 + create_filtered_dataframe: int = 0 + init_summary_vector_data_table: int = 0 + create_node_classifications: int = 0 + create_network_summary_vectors_info: int = 0 + + # Unused for logging for now, but available if needed + build_and_verify_vectors_of_interest: int = 0 + create_well_node_classifications: int = 0 + + def log_sumo_download_times(self) -> None: + # Log download from Sumo times + LOGGER.info( + f"Total time to fetch data from Sumo: {self.init_sumo_data + self.init_summary_vector_data_table}ms, " + f"Get summary vector list in: {self.init_summary_vector_list}ms, " + f"Get group tree table in: {self.fetch_grouptree_df}ms, " + f"Get summary vectors in: {self.init_summary_vector_data_table}ms" + ) + + def log_structure_init_times(self) -> None: + # Log initialization of data structures times + LOGGER.info( + f"Initialize GroupTreeModel in: {self.init_grouptree_df_model}ms, " + f"Create filtered dataframe in: {self.create_filtered_dataframe}ms, " + f"Create node classifications in: {self.create_node_classifications}ms, " + f"Create group tree summary vectors info in: {self.create_network_summary_vectors_info}ms" + ) diff --git a/backend_py/primary/primary/services/flow_network_assembler/_utils.py b/backend_py/primary/primary/services/flow_network_assembler/_utils.py index 0538b11b5..4c40f9efc 100644 --- a/backend_py/primary/primary/services/flow_network_assembler/_utils.py +++ b/backend_py/primary/primary/services/flow_network_assembler/_utils.py @@ -1,4 +1,6 @@ import logging +import pandas as pd +import numpy as np from primary.services.sumo_access.group_tree_types import TreeType from .flow_network_types import ( @@ -9,11 +11,18 @@ NodeSummaryVectorsInfo, EdgeOrNode, DataType, + NodeType, ) LOGGER = logging.getLogger(__name__) +NODE_TYPE_ENUM_TO_STRING_MAPPING = { + NodeType.INJ: "Injector", + NodeType.PROD: "Producer", + NodeType.OTHER: "Other", +} + FIELD_DATATYPE_VECTOR_MAP = { DataType.OILRATE: "FOPR", @@ -255,3 +264,33 @@ def _compute_node_datatypes_for_name_and_keyword( datatypes.append(DataType.GASINJRATE) return datatypes + + +def is_valid_node_type(node_classification: NodeClassification, valid_node_types: set[NodeType]) -> bool: + """Returns True if the node classification is a valid node type""" + if node_classification.IS_PROD and NodeType.PROD in valid_node_types: + return True + if node_classification.IS_INJ and NodeType.INJ in valid_node_types: + return True + if node_classification.IS_OTHER and NodeType.OTHER in valid_node_types: + return True + return False + + +def create_edge_label_list_from_vfp_table_column(vfp_table_column: pd.Series) -> list[str]: + """ + Creates an edge label list based on the column named "VFP_TABLE". + + If the VFP_TABLE column is not present, the function will raise a ValueError. + """ + if vfp_table_column.empty: + raise ValueError("VFP_TABLE column is empty.") + + edge_labels: list[str] = [] + for vfp_nb in vfp_table_column: + if vfp_nb in [None, 9999] or np.isnan(vfp_nb): + edge_labels.append("") + else: + edge_labels.append(f"VFP {int(vfp_nb)}") + + return edge_labels diff --git a/backend_py/primary/primary/services/flow_network_assembler/flow_network_assembler.py b/backend_py/primary/primary/services/flow_network_assembler/flow_network_assembler.py index 550207cd9..0b3a7e318 100644 --- a/backend_py/primary/primary/services/flow_network_assembler/flow_network_assembler.py +++ b/backend_py/primary/primary/services/flow_network_assembler/flow_network_assembler.py @@ -11,16 +11,18 @@ from fastapi import HTTPException from webviz_pkg.core_utils.perf_timer import PerfTimer - -from primary.services.sumo_access.summary_access import Frequency, SummaryAccess +from primary.services.service_exceptions import NoDataError, Service +from primary.services.sumo_access.summary_access import Frequency, SummaryAccess, VectorMetadata from primary.services.sumo_access.group_tree_access import GroupTreeAccess from primary.services.sumo_access.group_tree_types import TreeType from . import _utils +from ._assembler_performance_times import PerformanceTimes from ._group_tree_dataframe_model import ( GroupTreeDataframeModel, ) + from .flow_network_types import ( DataType, DatedFlowNetwork, @@ -40,44 +42,6 @@ LOGGER = logging.getLogger(__name__) -@dataclass -# Dataclass needs to save a bunch of timestamps. Many attributes is okay here, as splitting it would be more cumbersome -# pylint: disable-next=too-many-instance-attributes -class PerformanceTimes: - """Simple utility class to store performance timer results for different internal method calls""" - - init_sumo_data: int = 0 - init_summary_vector_list: int = 0 - fetch_grouptree_df: int = 0 - init_grouptree_df_model: int = 0 - create_filtered_dataframe: int = 0 - init_summary_vector_data_table: int = 0 - create_node_classifications: int = 0 - create_network_summary_vectors_info: int = 0 - - # Unused for logging for now, but available if needed - build_and_verify_vectors_of_interest: int = 0 - create_well_node_classifications: int = 0 - - def log_sumo_download_times(self) -> None: - # Log download from Sumo times - LOGGER.info( - f"Total time to fetch data from Sumo: {self.init_sumo_data + self.init_summary_vector_data_table}ms, " - f"Get summary vector list in: {self.init_summary_vector_list}ms, " - f"Get group tree table in: {self.fetch_grouptree_df}ms, " - f"Get summary vectors in: {self.init_summary_vector_data_table}ms" - ) - - def log_structure_init_times(self) -> None: - # Log initialization of data structures times - LOGGER.info( - f"Initialize GroupTreeModel in: {self.init_grouptree_df_model}ms, " - f"Create filtered dataframe in: {self.create_filtered_dataframe}ms, " - f"Create node classifications in: {self.create_node_classifications}ms, " - f"Create group tree summary vectors info in: {self.create_network_summary_vectors_info}ms" - ) - - @dataclass class FlatNetworkNodeData: """ @@ -107,7 +71,7 @@ def __init__( summary_access: SummaryAccess, realization: int, summary_frequency: Frequency, - node_types: set[NodeType], + selected_node_types: set[NodeType], flow_network_mode: NetworkModeOptions, terminal_node: str = "FIELD", tree_type: TreeType = TreeType.GRUPTREE, @@ -126,11 +90,12 @@ def __init__( self._excl_well_startswith = excl_well_startswith self._excl_well_endswith = excl_well_endswith self._summary_resampling_frequency = summary_frequency - self._node_types = node_types + self._selected_node_types = selected_node_types self._group_tree_df_model: Optional[GroupTreeDataframeModel] = None self._filtered_group_tree_df: Optional[pd.DataFrame] = None self._all_vectors: Optional[set[str]] = None + self._vector_metadata_by_keyword: dict[str, list[VectorMetadata]] = {} self._smry_table_sorted_by_date: pa.Table | None = None self._node_static_working_data: dict[str, StaticNodeWorkingData] | None = None @@ -142,6 +107,43 @@ def __init__( HAS_GAS_INJ=False, HAS_WATER_INJ=False, TERMINAL_NODE=terminal_node ) + @property + def _group_tree_df_model_safe(self) -> GroupTreeDataframeModel: + if self._group_tree_df_model is None: + raise ValueError("Grouptree dataframe model has not been initialized") + return self._group_tree_df_model + + @property + def _all_vectors_safe(self) -> set[str]: + if self._all_vectors is None: + raise ValueError("List of summary vectors has not been initialized") + return self._all_vectors + + @property + def _filtered_group_tree_df_safe(self) -> pd.DataFrame: + if self._filtered_group_tree_df is None: + raise ValueError("Filtered group-tree dataframe has not been initialized") + return self._filtered_group_tree_df + + @property + def _edge_data_types(self) -> list[DataType]: + # ! Using a list to keep the datatypes in the same order every run + data_types: list[DataType] = [] + + if NodeType.PROD in self._selected_node_types: + data_types.extend([DataType.OILRATE, DataType.GASRATE, DataType.WATERRATE]) + if NodeType.INJ in self._selected_node_types and self._network_classification.HAS_WATER_INJ: + data_types.append(DataType.WATERINJRATE) + if NodeType.INJ in self._selected_node_types and self._network_classification.HAS_GAS_INJ: + data_types.append(DataType.GASINJRATE) + + return data_types + + @property + def _node_data_types(self) -> list[DataType]: + # ! Using a list to keep the datatypes in the same order every run + return [DataType.PRESSURE, DataType.BHP, DataType.WMCTL] + def _verify_that_sumvecs_exists(self, check_sumvecs: set[str]) -> None: """ Takes in a list of summary vectors and checks if they are present among the assemblers available vectors. @@ -195,24 +197,6 @@ def _validate_assembler_config(self) -> None: if self._realization is None: raise ValueError("FlowNetworkAssembler missing realization") - @property - def _group_tree_df_model_safe(self) -> GroupTreeDataframeModel: - if self._group_tree_df_model is None: - raise ValueError("Grouptree dataframe model has not been initialized") - return self._group_tree_df_model - - @property - def _all_vectors_safe(self) -> set[str]: - if self._all_vectors is None: - raise ValueError("List of summary vectors has not been initialized") - return self._all_vectors - - @property - def _filtered_group_tree_df_safe(self) -> pd.DataFrame: - if self._filtered_group_tree_df is None: - raise ValueError("Filtered group-tree dataframe has not been initialized") - return self._filtered_group_tree_df - async def fetch_and_initialize_async(self) -> None: """ Fetches the group tree and summary data from Sumo, and initializes the data structures needed to build a single realization @@ -246,11 +230,24 @@ async def fetch_and_initialize_async(self) -> None: # Get summary vectors for all data simultaneously to obtain one request from Sumo # Many summary vectors might not be needed, but will be filtered out later on. This is the most efficient way to get the data # NOTE: "WSTAT" vectors are enumerated well state indicator, thus interpolated values might create issues (should be resolved by resampling-code) - single_realization_vectors_table, _ = await self._summary_access.get_single_real_vectors_table_async( + ( + single_realization_vectors_table, + vector_metadata, + ) = await self._summary_access.get_single_real_vectors_table_async( vector_names=list(vectors_of_interest), resampling_frequency=self._summary_resampling_frequency, realization=self._realization, ) + + # Store vector metadata entries in a dict for easy lookup later + vector_metadata_by_keyword: dict[str, list[VectorMetadata]] = {} + for vec_meta in vector_metadata: + entries = vector_metadata_by_keyword.get(vec_meta.keyword, []) + entries.append(vec_meta) + vector_metadata_by_keyword[vec_meta.keyword] = entries + + self._vector_metadata_by_keyword = vector_metadata_by_keyword + self._performance_times.init_summary_vector_data_table = timer.lap_ms() # Create list of column names in the table once (for performance) @@ -277,7 +274,7 @@ async def fetch_and_initialize_async(self) -> None: self._performance_times.log_sumo_download_times() self._performance_times.log_structure_init_times() - async def create_dated_networks_and_metadata_lists( + def create_dated_networks_and_metadata_lists( self, ) -> Tuple[list[DatedFlowNetwork], list[FlowNetworkMetadata], list[FlowNetworkMetadata]]: """ @@ -295,52 +292,52 @@ async def create_dated_networks_and_metadata_lists( if self._node_static_working_data is None: raise ValueError("Static working data for nodes has not been initialized") + edge_data_types = self._edge_data_types + node_data_types = self._node_data_types + data_types_of_interest: set[DataType] | None = (set(node_data_types) | set(edge_data_types)) or None + dated_network_list = _create_dated_networks( self._smry_table_sorted_by_date, self._filtered_group_tree_df_safe, self._node_static_working_data, - self._node_types, + self._selected_node_types, self._network_classification.TERMINAL_NODE, + data_types_of_interest, ) return ( dated_network_list, - self._get_edge_options(self._node_types), - [ - FlowNetworkMetadata(key=datatype.value, label=_utils.get_label_for_datatype(datatype)) - for datatype in [DataType.PRESSURE, DataType.BHP, DataType.WMCTL] - ], + self._assemble_metadata_for_data_types(edge_data_types), + self._assemble_metadata_for_data_types(node_data_types), ) - def _get_edge_options(self, node_types: set[NodeType]) -> list[FlowNetworkMetadata]: - """Returns a list with edge node options for the dropdown - menu in the Flow Network module. - - The output list has the format: - [ - {"name": DataType.OILRATE.value, "label": "Oil Rate"}, - {"name": DataType.GASRATE.value, "label": "Gas Rate"}, - ] - """ + def _assemble_metadata_for_data_types(self, data_types: list[DataType]) -> list[FlowNetworkMetadata]: + """Returns a list with metadata for a set of data types""" options: list[FlowNetworkMetadata] = [] - if NodeType.PROD in node_types: - for rate in [DataType.OILRATE, DataType.GASRATE, DataType.WATERRATE]: - options.append(FlowNetworkMetadata(key=rate.value, label=_utils.get_label_for_datatype(rate))) - if NodeType.INJ in node_types and self._network_classification.HAS_WATER_INJ: - options.append( - FlowNetworkMetadata( - key=DataType.WATERINJRATE.value, label=_utils.get_label_for_datatype(DataType.WATERINJRATE) - ) - ) - if NodeType.INJ in node_types and self._network_classification.HAS_GAS_INJ: - options.append( - FlowNetworkMetadata( - key=DataType.GASINJRATE.value, label=_utils.get_label_for_datatype(DataType.GASINJRATE) - ) + + for data_type in data_types: + vector_metadata = self._get_vector_metadata_for_data_type(data_type) + + network_metadata = FlowNetworkMetadata( + key=data_type.value, + label=_utils.get_label_for_datatype(data_type), + unit=vector_metadata.unit, ) - if options: - return options - return [FlowNetworkMetadata(key=DataType.OILRATE.value, label=_utils.get_label_for_datatype(DataType.OILRATE))] + + options.append(network_metadata) + + return options + + def _get_vector_metadata_for_data_type(self, data_type: DataType) -> VectorMetadata: + # ! Assumes that unit is equivalent for field, group and well vectors. + data_vector = _utils.WELL_DATATYPE_VECTOR_MAP[data_type] + + vector_meta_list = self._vector_metadata_by_keyword.get(data_vector, []) + + if len(vector_meta_list) < 1: + raise ValueError(f"Vector metadata missing for vector {data_vector}") + + return vector_meta_list[0] def _verify_neccessary_injection_vectors(self, vectors_of_interest: set[str]) -> None: # Has any water injection or gas injection vectors among vectors of interest @@ -716,8 +713,9 @@ def _create_dated_networks( smry_sorted_by_date: pa.Table, group_tree_df: pd.DataFrame, node_static_working_data_dict: dict[str, StaticNodeWorkingData], - valid_node_types: set[NodeType], + selected_node_types: set[NodeType], terminal_node: str, + data_types_of_interest: set[DataType] | None, ) -> list[DatedFlowNetwork]: """ Create a list of static flow networks with summary data, based on the group trees and resampled summary data. @@ -731,8 +729,9 @@ def _create_dated_networks( - `smry_sorted_by_date`. pa.Table - Summary data table sorted by date. Expected columns: [DATE, summary_vector_1, ... , summary_vector_n] - `group_tree_df`: Dataframe with group tree for dates - expected columns: [KEYWORD, CHILD, PARENT], optional column: [VFP_TABLE] - `node_static_working_data_dict`: Dictionary with node name as key and its static work data for building flow networks - - `valid_node_types`: Set of node types to include from the group tree + - `selected_node_types`: Set of node types to include from the group tree - `terminal_node`: Name of the terminal node in the group tree + - `data_types_of_interest`: Set of data types to include for edges and nodes. If None, all data types are included. `Returns`: A list of dated networks with recursive node structure and summary data for each node in the tree. @@ -785,8 +784,9 @@ def _create_dated_networks( smry_in_datespan_sorted_by_date, len(dates), node_static_working_data_dict, - valid_node_types, + selected_node_types, terminal_node, + data_types_of_interest, ) dated_networks.append(DatedFlowNetwork(dates=formatted_dates, network=network)) @@ -812,8 +812,9 @@ def _create_dated_network( smry_for_grouptree_sorted_by_date: pa.Table, number_of_dates_in_smry: int, node_static_working_data_dict: dict[str, StaticNodeWorkingData], - valid_node_types: set[NodeType], + selected_node_types: set[NodeType], terminal_node: str, + data_types_of_interest: set[DataType] | None, ) -> NetworkNode: """ Create a static flowm network with summary data for a set of dates. @@ -823,12 +824,13 @@ def _create_dated_network( `Arguments`: - `grouptree_at_date`: Dataframe with group tree for one date - expected columns: [KEYWORD, CHILD, PARENT, EDGE_LABEL] - `grouptree_date`: Timestamp - Date of the group tree - - smry_for_grouptree_sorted_by_date: Summary data for time span defined from the group tree at date to the next group tree date. The summary data is + - `smry_for_grouptree_sorted_by_date`: Summary data for time span defined from the group tree at date to the next group tree date. The summary data is sorted by date, which implies unique dates, ordered by date. Thereby each node or edge is a column in the summary dataframe. - - number_of_dates_in_smry: Number of unique dates in the summary data df. To be used for filling missing data - i.e. num rows of smry_sorted_by_date - - node_static_working_data_dict: Dictionary with node name as key and its static work data for building networks - - valid_node_types: Set of valid node types for the group tree - - terminal_node: Name of the terminal node in the group tree + - `number_of_dates_in_smry`: Number of unique dates in the summary data df. To be used for filling missing data - i.e. num rows of smry_sorted_by_date + - `node_static_working_data_dict`: Dictionary with node name as key and its static work data for building networks + - `selected_node_types`: Set of selected node types for the group tree + - `terminal_node`: Name of the terminal node in the group tree + - `data_types_of_interest`: Set of data types to include for edges and nodes. If None, all data types are included. `Returns`: A dated flow network with a recursive node structure, with summary data for the each date added to each node. @@ -838,11 +840,18 @@ def _create_dated_network( nodes_dict = _create_flat_network_nodes_map( grouptree_at_date, node_static_working_data_dict, - valid_node_types, + selected_node_types, smry_for_grouptree_sorted_by_date, number_of_dates_in_smry, + data_types_of_interest, ) + if not nodes_dict: + raise NoDataError( + f"No nodes found in the group tree for the selected node types: {[_utils.NODE_TYPE_ENUM_TO_STRING_MAPPING[elm] for elm in selected_node_types]}", + Service.GENERAL, + ) + terminal_node_elm = nodes_dict.get(terminal_node) if terminal_node_elm is None: @@ -865,9 +874,10 @@ def _create_dated_network( def _create_flat_network_nodes_map( grouptree_at_date: pd.DataFrame, node_static_working_data_dict: dict[str, StaticNodeWorkingData], - valid_node_types: set[NodeType], + selected_node_types: set[NodeType], smry_for_grouptree_sorted_by_date: pa.Table, number_of_dates_in_smry: int, + data_types_of_interest: set[DataType] | None, ) -> dict[str, FlatNetworkNodeData]: """ Creates a map with node names and their respective flat network node data. @@ -890,7 +900,7 @@ def _create_flat_network_nodes_map( # Create edge label for nodes edge_labels = [""] * len(node_names) if "VFP_TABLE" in grouptree_at_date.columns: - edge_labels = _create_edge_label_list_from_vfp_table_column(grouptree_at_date["VFP_TABLE"]) + edge_labels = _utils.create_edge_label_list_from_vfp_table_column(grouptree_at_date["VFP_TABLE"]) # Iterate over every row in the grouptree dataframe to create the network nodes for node_name, parent_name, node_keyword, edge_label in zip(node_names, parent_names, keywords, edge_labels): @@ -901,7 +911,7 @@ def _create_flat_network_nodes_map( if node_static_working_data is None: raise ValueError(f"No summary vector info found for node {node_name}") - if not _is_valid_node_type(node_static_working_data.node_classification, valid_node_types): + if not _utils.is_valid_node_type(node_static_working_data.node_classification, selected_node_types): continue network_node = _create_network_node( @@ -912,6 +922,7 @@ def _create_flat_network_nodes_map( smry_columns_set, smry_for_grouptree_sorted_by_date, number_of_dates_in_smry, + data_types_of_interest, ) nodes_dict[node_name] = FlatNetworkNodeData(parent_name=parent_name, node_without_children=network_node) @@ -919,36 +930,6 @@ def _create_flat_network_nodes_map( return nodes_dict -def _is_valid_node_type(node_classification: NodeClassification, valid_node_types: set[NodeType]) -> bool: - """Returns True if the node classification is a valid node type""" - if node_classification.IS_PROD and NodeType.PROD in valid_node_types: - return True - if node_classification.IS_INJ and NodeType.INJ in valid_node_types: - return True - if node_classification.IS_OTHER and NodeType.OTHER in valid_node_types: - return True - return False - - -def _create_edge_label_list_from_vfp_table_column(vfp_table_column: pd.Series) -> list[str]: - """ - Creates an edge label list based on the column named "VFP_TABLE". - - If the VFP_TABLE column is not present, the function will raise a ValueError. - """ - if vfp_table_column.empty: - raise ValueError("VFP_TABLE column is empty.") - - edge_labels: list[str] = [] - for vfp_nb in vfp_table_column: - if vfp_nb in [None, 9999] or np.isnan(vfp_nb): - edge_labels.append("") - else: - edge_labels.append(f"VFP {int(vfp_nb)}") - - return edge_labels - - def _create_network_node( node_name: str, keyword: str, @@ -957,6 +938,7 @@ def _create_network_node( smry_columns_set: set, smry_for_grouptree_sorted_by_date: pa.Table, number_of_dates_in_smry: int, + data_types_of_interest: set[DataType] | None, ) -> NetworkNode: # Find working data for the node @@ -972,6 +954,9 @@ def _create_network_node( for sumvec, info in summary_vector_info.items(): datatype = info.DATATYPE + if data_types_of_interest is not None and datatype not in data_types_of_interest: + continue + if sumvec in smry_columns_set: data = smry_for_grouptree_sorted_by_date[sumvec].to_numpy().round(2) else: diff --git a/backend_py/primary/primary/services/flow_network_assembler/flow_network_types.py b/backend_py/primary/primary/services/flow_network_assembler/flow_network_types.py index a4001a558..5200b48b9 100644 --- a/backend_py/primary/primary/services/flow_network_assembler/flow_network_types.py +++ b/backend_py/primary/primary/services/flow_network_assembler/flow_network_types.py @@ -140,3 +140,4 @@ class DatedFlowNetwork: class FlowNetworkMetadata: key: str label: str + unit: str | None = None diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 6dafa8805..a675cf8cd 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -18,7 +18,7 @@ "@tanstack/react-query": "^5.63", "@tanstack/react-query-devtools": "^5.63", "@types/geojson": "^7946.0.14", - "@webviz/group-tree-plot": "^1.1.14", + "@webviz/group-tree-plot": "^1.4.0", "@webviz/subsurface-viewer": "^1.1.1", "@webviz/well-completions-plot": "^1.5.11", "@webviz/well-log-viewer": "^1.12.7", @@ -6740,12 +6740,13 @@ } }, "node_modules/@webviz/group-tree-plot": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/@webviz/group-tree-plot/-/group-tree-plot-1.1.14.tgz", - "integrity": "sha512-3N5lhuQWn/lBvg0jQSN31Jp7hPjMhIygTYkCLGpFKGTZgPUB6RvNLKnl31EZJhFsQf9xhjJg/XfI5WifrTs+qw==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@webviz/group-tree-plot/-/group-tree-plot-1.4.0.tgz", + "integrity": "sha512-tewQ2xAkX4swHlFut7gLmrOzA2md1r73Sv3jtut7RFBkTEbKiwX5el+4m2F86mRvfc5vSiqruPZM0mSqMd2Qlw==", "dependencies": { "d3": "^7.8.2", - "lodash": "^4.17.21" + "lodash": "^4.17.21", + "motion": "^11.18.0" }, "peerDependencies": { "react": "^17 || ^18", @@ -10171,6 +10172,32 @@ "url": "https://github.com/sponsors/rawify" } }, + "node_modules/framer-motion": { + "version": "11.18.2", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-11.18.2.tgz", + "integrity": "sha512-5F5Och7wrvtLVElIpclDT0CBzMVg3dL22B64aZwHtsIY8RB4mXICLrkajK4G9R+ieSAGcgrLeae2SeUTg2pr6w==", + "dependencies": { + "motion-dom": "^11.18.1", + "motion-utils": "^11.18.1", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, "node_modules/from2": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", @@ -12474,6 +12501,44 @@ "node": ">=0.4.0" } }, + "node_modules/motion": { + "version": "11.18.2", + "resolved": "https://registry.npmjs.org/motion/-/motion-11.18.2.tgz", + "integrity": "sha512-JLjvFDuFr42NFtcVoMAyC2sEjnpA8xpy6qWPyzQvCloznAyQ8FIXioxWfHiLtgYhoVpfUqSWpn1h9++skj9+Wg==", + "dependencies": { + "framer-motion": "^11.18.2", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/motion-dom": { + "version": "11.18.1", + "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-11.18.1.tgz", + "integrity": "sha512-g76KvA001z+atjfxczdRtw/RXOM3OMSdd1f4DL77qCTF/+avrRJiawSG4yDibEQ215sr9kpinSlX2pCTJ9zbhw==", + "dependencies": { + "motion-utils": "^11.18.1" + } + }, + "node_modules/motion-utils": { + "version": "11.18.1", + "resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-11.18.1.tgz", + "integrity": "sha512-49Kt+HKjtbJKLtgO/LKj9Ld+6vw9BjH5d9sc40R/kVyH8GLAXgT42M2NnuPcJNuA3s9ZfZBUcwIgpmZWGEE+hA==" + }, "node_modules/mouse-change": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/mouse-change/-/mouse-change-1.4.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index b9b76e889..e354107c2 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -29,7 +29,7 @@ "@tanstack/react-query": "^5.63", "@tanstack/react-query-devtools": "^5.63", "@types/geojson": "^7946.0.14", - "@webviz/group-tree-plot": "^1.1.14", + "@webviz/group-tree-plot": "^1.4.0", "@webviz/subsurface-viewer": "^1.1.1", "@webviz/well-completions-plot": "^1.5.11", "@webviz/well-log-viewer": "^1.12.7", diff --git a/frontend/src/api/autogen/types.gen.ts b/frontend/src/api/autogen/types.gen.ts index 7c8f31b8a..0f0cdb99f 100644 --- a/frontend/src/api/autogen/types.gen.ts +++ b/frontend/src/api/autogen/types.gen.ts @@ -189,6 +189,7 @@ export type FlowNetworkData_api = { export type FlowNetworkMetadata_api = { key: string; label: string; + unit: string | null; }; export enum FlowRateType_api { diff --git a/frontend/src/modules/FlowNetwork/settings/settings.tsx b/frontend/src/modules/FlowNetwork/settings/settings.tsx index 5bb3ce55e..5b61a7ad3 100644 --- a/frontend/src/modules/FlowNetwork/settings/settings.tsx +++ b/frontend/src/modules/FlowNetwork/settings/settings.tsx @@ -191,6 +191,8 @@ export function Settings({ workbenchSession, settingsContext }: ModuleSettingsPr