Skip to content

Commit

Permalink
[Group Tree / Flow Network] Refactor - Change use of "Group tree" to …
Browse files Browse the repository at this point in the history
…"Flow network" (#813)

Co-authored-by: jorgenherje <[email protected]>
  • Loading branch information
Anders2303 and jorgenherje authored Dec 4, 2024
1 parent 20542c9 commit 7e96080
Show file tree
Hide file tree
Showing 27 changed files with 1,550 additions and 1,254 deletions.
4 changes: 2 additions & 2 deletions backend_py/primary/primary/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from primary.routers.general import router as general_router
from primary.routers.graph.router import router as graph_router
from primary.routers.grid3d.router import router as grid3d_router
from primary.routers.group_tree.router import router as group_tree_router
from primary.routers.flow_network.router import router as flow_network_router
from primary.routers.inplace_volumetrics.router import router as inplace_volumetrics_router
from primary.routers.observations.router import router as observations_router
from primary.routers.parameters.router import router as parameters_router
Expand Down Expand Up @@ -77,7 +77,7 @@ def custom_generate_unique_id(route: APIRoute) -> str:
app.include_router(surface_router, prefix="/surface", tags=["surface"])
app.include_router(parameters_router, prefix="/parameters", tags=["parameters"])
app.include_router(grid3d_router, prefix="/grid3d", tags=["grid3d"])
app.include_router(group_tree_router, prefix="/group_tree", tags=["group_tree"])
app.include_router(flow_network_router, prefix="/flow_network", tags=["flow_network"])
app.include_router(pvt_router, prefix="/pvt", tags=["pvt"])
app.include_router(well_completions_router, prefix="/well_completions", tags=["well_completions"])
app.include_router(well_router, prefix="/well", tags=["well"])
Expand Down
Empty file.
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
import logging
from fastapi import APIRouter, Depends, Query
from webviz_pkg.core_utils.perf_timer import PerfTimer
from primary.auth.auth_helper import AuthHelper
from primary.services.group_tree_assembler.group_tree_assembler import GroupTreeAssembler

from primary.services.flow_network_assembler.flow_network_assembler import FlowNetworkAssembler
from primary.services.flow_network_assembler.flow_network_types import NetworkModeOptions, NodeType
from primary.services.sumo_access.group_tree_access import GroupTreeAccess
from primary.services.sumo_access.group_tree_types import TreeModeOptions, NodeType
from primary.services.sumo_access.summary_access import Frequency, SummaryAccess
from primary.services.utils.authenticated_user import AuthenticatedUser

from . import schemas

from webviz_pkg.core_utils.perf_timer import PerfTimer
import logging

LOGGER = logging.getLogger(__name__)

router = APIRouter()


@router.get("/realization_group_tree_data/")
async def get_realization_group_tree_data(
@router.get("/realization_flow_network/")
async def get_realization_flow_network(
# fmt:off
authenticated_user: AuthenticatedUser = Depends(AuthHelper.get_authenticated_user),
case_uuid: str = Query(description="Sumo case uuid"),
Expand All @@ -26,7 +26,7 @@ async def get_realization_group_tree_data(
resampling_frequency: schemas.Frequency = Query(description="Resampling frequency"),
node_type_set: set[schemas.NodeType] = Query(description="Node types"),
# fmt:on
) -> schemas.GroupTreeData:
) -> schemas.FlowNetworkData:
timer = PerfTimer()

group_tree_access = await GroupTreeAccess.from_case_uuid_async(
Expand All @@ -38,33 +38,33 @@ async def get_realization_group_tree_data(
summary_frequency = Frequency.YEARLY

# Convert to NodeType enum in group_tree_types
unique_node_types = set([NodeType(elm.value) for elm in node_type_set])
unique_node_types = {NodeType(elm.value) for elm in node_type_set}

group_tree_data = GroupTreeAssembler(
network_assembler = FlowNetworkAssembler(
group_tree_access=group_tree_access,
summary_access=summary_access,
realization=realization,
summary_frequency=summary_frequency,
node_types=unique_node_types,
group_tree_mode=TreeModeOptions.SINGLE_REAL,
flow_network_mode=NetworkModeOptions.SINGLE_REAL,
)

timer.lap_ms()
await group_tree_data.fetch_and_initialize_async()
await network_assembler.fetch_and_initialize_async()
initialize_time_ms = timer.lap_ms()

(
dated_trees,
dated_networks,
edge_metadata,
node_metadata,
) = await group_tree_data.create_dated_trees_and_metadata_lists()
) = await network_assembler.create_dated_networks_and_metadata_lists()
create_data_time_ms = timer.lap_ms()

LOGGER.info(
f"Group tree data for single realization fetched and processed in: {timer.elapsed_ms()}ms "
f"(initialize={initialize_time_ms}ms, create group tree={create_data_time_ms}ms)"
)

return schemas.GroupTreeData(
edge_metadata_list=edge_metadata, node_metadata_list=node_metadata, dated_trees=dated_trees
return schemas.FlowNetworkData(
edgeMetadataList=edge_metadata, nodeMetadataList=node_metadata, datedNetworks=dated_networks
)
36 changes: 36 additions & 0 deletions backend_py/primary/primary/routers/flow_network/schemas.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from enum import Enum, StrEnum

from pydantic import BaseModel, ConfigDict
from primary.services.flow_network_assembler.flow_network_types import DatedFlowNetwork, FlowNetworkMetadata


class Frequency(str, Enum):
DAILY = "DAILY"
WEEKLY = "WEEKLY"
MONTHLY = "MONTHLY"
QUARTERLY = "QUARTERLY"
YEARLY = "YEARLY"


class StatOption(str, Enum):
MEAN = "MEAN"
P10 = "P10"
P90 = "P90"
P50 = "P50"
MIN = "MIN"
MAX = "MAX"


# ! Copy of the flow network service NodeType enum
class NodeType(StrEnum):
PROD = "prod"
INJ = "inj"
OTHER = "other"


class FlowNetworkData(BaseModel):
model_config = ConfigDict(revalidate_instances="always")

edgeMetadataList: list[FlowNetworkMetadata]
nodeMetadataList: list[FlowNetworkMetadata]
datedNetworks: list[DatedFlowNetwork]
34 changes: 0 additions & 34 deletions backend_py/primary/primary/routers/group_tree/schemas.py

This file was deleted.

Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -2,54 +2,7 @@

import pandas as pd

from primary.services.sumo_access.group_tree_types import DataType, TreeType

GROUP_TREE_FIELD_DATATYPE_TO_VECTOR_MAP = {
DataType.OILRATE: "FOPR",
DataType.GASRATE: "FGPR",
DataType.WATERRATE: "FWPR",
DataType.WATERINJRATE: "FWIR",
DataType.GASINJRATE: "FGIR",
DataType.PRESSURE: "GPR",
}

TREE_TYPE_DATATYPE_TO_GROUP_VECTOR_DATATYPE_MAP = {
"GRUPTREE": {
DataType.OILRATE: "GOPR",
DataType.GASRATE: "GGPR",
DataType.WATERRATE: "GWPR",
DataType.WATERINJRATE: "GWIR",
DataType.GASINJRATE: "GGIR",
DataType.PRESSURE: "GPR",
},
# BRANPROP can not be used for injection, but the nodes
# might also be GNETINJE and could therefore have injection.
"BRANPROP": {
DataType.OILRATE: "GOPRNB",
DataType.GASRATE: "GGPRNB",
DataType.WATERRATE: "GWPRNB",
DataType.WATERINJRATE: "GWIR",
DataType.GASINJRATE: "GGIR",
DataType.PRESSURE: "GPR",
},
}

GROUPTREE_DATATYPE_TO_WELL_VECTOR_DATATYPE_MAP = {
DataType.OILRATE: "WOPR",
DataType.GASRATE: "WGPR",
DataType.WATERRATE: "WWPR",
DataType.WATERINJRATE: "WWIR",
DataType.GASINJRATE: "WGIR",
DataType.PRESSURE: "WTHP",
DataType.BHP: "WBHP",
DataType.WMCTL: "WMCTL",
}

FIELD_VECTORS_OF_INTEREST: List[str] = list(GROUP_TREE_FIELD_DATATYPE_TO_VECTOR_MAP.values())
WELLS_VECTOR_DATATYPES_OF_INTEREST: List[str] = list(GROUPTREE_DATATYPE_TO_WELL_VECTOR_DATATYPE_MAP.values())
GROUP_VECTOR_DATATYPES_OF_INTEREST = [
v for kw in ["GRUPTREE", "BRANPROP"] for v in TREE_TYPE_DATATYPE_TO_GROUP_VECTOR_DATATYPE_MAP[kw].values()
]
from primary.services.sumo_access.group_tree_types import TreeType


class GroupTreeDataframeModel:
Expand Down Expand Up @@ -77,7 +30,6 @@ class GroupTreeDataframeModel:

_grouptree_wells: List[str] = []
_grouptree_groups: List[str] = []
_grouptree_wstat_vectors: List[str] = []

def __init__(
self,
Expand Down Expand Up @@ -132,7 +84,6 @@ def __init__(

self._grouptree_wells = list(group_tree_wells)
self._grouptree_groups = list(group_tree_groups)
self._grouptree_wstat_vectors = [f"WSTAT:{well}" for well in self._grouptree_wells]

@property
def dataframe(self) -> pd.DataFrame:
Expand All @@ -157,15 +108,6 @@ def group_tree_wells(self) -> List[str]:
def group_tree_groups(self) -> List[str]:
return self._grouptree_groups

@property
def wstat_vectors(self) -> List[str]:
"""
Returns the well state indicator vectors for all wells in the group tree
The vectors are of the form "WSTAT:{well_name}"
"""
return self._grouptree_wstat_vectors

def create_filtered_dataframe(
self,
terminal_node: Optional[str] = None,
Expand Down Expand Up @@ -214,40 +156,6 @@ def filter_wells(dframe: pd.DataFrame, well_name_criteria: Callable) -> pd.DataF

return df.copy()

def create_vector_of_interest_list(self) -> List[str]:
"""
Create a list of vectors based on the possible combinations of vector datatypes and vector nodes
for a group tree
This implies vectors for field, group and well.
Only returns the candidates which exist among the valid vectors
"""

# Find all summary vectors with group tree wells
group_tree_well_vector_candidates = _create_vector_candidates(
WELLS_VECTOR_DATATYPES_OF_INTEREST, self._grouptree_wells
)

# Find all summary vectors with group tree groups
group_tree_group_vector_candidates = _create_vector_candidates(
GROUP_VECTOR_DATATYPES_OF_INTEREST, self._grouptree_groups
)

# Find all summary vectors with field vectors
group_tree_field_vectors_candidates = FIELD_VECTORS_OF_INTEREST

all_vectors_of_interest = (
group_tree_well_vector_candidates
+ group_tree_group_vector_candidates
+ group_tree_field_vectors_candidates
+ self._grouptree_wstat_vectors
)

# Ensure non duplicated vectors
unique_vectors_of_interst = list(set(all_vectors_of_interest))
return unique_vectors_of_interst

def _create_branch_node_list(self, terminal_node: str) -> List[str]:
"""
This function lists all nodes in a branch of the group tree starting from the terminal node.
Expand All @@ -263,25 +171,11 @@ def _create_branch_node_list(self, terminal_node: str) -> List[str]:
current_parents = [terminal_node]
while len(current_parents) > 0:
# Find all indexes matching the current parents
children_indices = set([i for i, x in enumerate(parents_array) if x in current_parents])
children_indices = {i for i, x in enumerate(parents_array) if x in current_parents}

# Find all children of the current parents
children = nodes_array[list(children_indices)]
branch_node_set.update(children)
current_parents = children

return list(branch_node_set)


def _create_vector_candidates(vector_datatype_candidates: List[str], vector_node_candidates: List[str]) -> List[str]:
"""Create a list of vectors based on the list of vector datatype candidates and vector node candidates
A vector is then given by "{vector_datatype}:{vector_node}"
E.g. "WOPT:WELL1"
"""
result: List[str] = []
for datatype in vector_datatype_candidates:
for node in vector_node_candidates:
result.append(f"{datatype}:{node}")
return result
Loading

0 comments on commit 7e96080

Please sign in to comment.