diff --git a/backend/src/backend/auth/auth_helper.py b/backend/src/backend/auth/auth_helper.py
index 57fb1e969..041e4be4a 100644
--- a/backend/src/backend/auth/auth_helper.py
+++ b/backend/src/backend/auth/auth_helper.py
@@ -101,7 +101,6 @@ async def _authorized_callback_route(self, request: Request) -> Response:
def get_authenticated_user(
request_with_session: Request,
) -> Optional[AuthenticatedUser]:
-
timer = PerfTimer()
# We may already have created and stored the AuthenticatedUser object on the request
@@ -173,6 +172,9 @@ def get_authenticated_user(
# print("-------------------------------------------------")
smda_token = token_dict.get("access_token") if token_dict else None
+ token_dict = cca.acquire_token_silent(scopes=config.GRAPH_SCOPES, account=accounts[0])
+ graph_token = token_dict.get("access_token") if token_dict else None
+
# print(f" get tokens {timer.lap_ms():.1f}ms")
_save_token_cache_in_session(request_with_session, token_cache)
@@ -187,10 +189,13 @@ def get_authenticated_user(
authenticated_user = AuthenticatedUser(
user_id=user_id,
username=user_name,
- sumo_access_token=sumo_token,
- smda_access_token=smda_token,
- pdm_access_token=None,
- ssdl_access_token=None,
+ access_tokens={
+ "graph_access_token": graph_token,
+ "sumo_access_token": sumo_token,
+ "smda_access_token": smda_token,
+ "pdm_access_token": None,
+ "ssdl_access_token": None,
+ },
)
request_with_session.state.authenticated_user_obj = authenticated_user
@@ -203,7 +208,6 @@ def get_authenticated_user(
def _create_msal_confidential_client_app(
token_cache: msal.TokenCache,
) -> msal.ConfidentialClientApplication:
-
authority = f"https://login.microsoftonline.com/{config.TENANT_ID}"
return msal.ConfidentialClientApplication(
client_id=config.CLIENT_ID,
@@ -217,7 +221,6 @@ def _create_msal_confidential_client_app(
# Note that this function will NOT return the token itself, but rather a dict
# that typically has an "access_token" key
def _get_token_dict_from_session_token_cache(request_with_session: Request, scopes: List[str]) -> Optional[dict]:
-
token_cache = _load_token_cache_from_session(request_with_session)
cca = _create_msal_confidential_client_app(token_cache)
diff --git a/backend/src/backend/primary/main.py b/backend/src/backend/primary/main.py
index 9386e9b9b..36c29c12e 100644
--- a/backend/src/backend/primary/main.py
+++ b/backend/src/backend/primary/main.py
@@ -17,7 +17,7 @@
from .routers.correlations.router import router as correlations_router
from .routers.grid.router import router as grid_router
from .routers.pvt.router import router as pvt_router
-from .routers.well_completion.router import router as well_completion_router
+from .routers.well_completions.router import router as well_completions_router
from .routers.well.router import router as well_router
from .routers.surface_polygons.router import router as surface_polygons_router
@@ -53,7 +53,7 @@ def custom_generate_unique_id(route: APIRoute) -> str:
app.include_router(correlations_router, prefix="/correlations", tags=["correlations"])
app.include_router(grid_router, prefix="/grid", tags=["grid"])
app.include_router(pvt_router, prefix="/pvt", tags=["pvt"])
-app.include_router(well_completion_router, prefix="/well_completion", tags=["well_completion"])
+app.include_router(well_completions_router, prefix="/well_completions", tags=["well_completions"])
app.include_router(well_router, prefix="/well", tags=["well"])
app.include_router(surface_polygons_router, prefix="/surface_polygons", tags=["surface_polygons"])
diff --git a/backend/src/backend/primary/routers/general.py b/backend/src/backend/primary/routers/general.py
index 304c0d3ee..29e9c7f9f 100644
--- a/backend/src/backend/primary/routers/general.py
+++ b/backend/src/backend/primary/routers/general.py
@@ -1,19 +1,24 @@
+import asyncio
import datetime
import logging
+import httpx
import starsessions
from starlette.responses import StreamingResponse
-from fastapi import APIRouter, HTTPException, Request, status, Depends
+from fastapi import APIRouter, HTTPException, Request, status, Depends, Query
from pydantic import BaseModel
from src.backend.auth.auth_helper import AuthHelper, AuthenticatedUser
from src.backend.primary.user_session_proxy import proxy_to_user_session
+from src.services.graph_access.graph_access import GraphApiAccess
LOGGER = logging.getLogger(__name__)
class UserInfo(BaseModel):
username: str
+ display_name: str | None
+ avatar_b64str: str | None
has_sumo_access: bool
has_smda_access: bool
@@ -34,7 +39,12 @@ def alive_protected() -> str:
@router.get("/logged_in_user", response_model=UserInfo)
-async def logged_in_user(request: Request) -> UserInfo:
+async def logged_in_user(
+ request: Request,
+ includeGraphApiInfo: bool = Query(
+ False, description="Set to true to include user avatar and display name from Microsoft Graph API"
+ ),
+) -> UserInfo:
print("entering logged_in_user route")
await starsessions.load_session(request)
@@ -47,10 +57,29 @@ async def logged_in_user(request: Request) -> UserInfo:
user_info = UserInfo(
username=authenticated_user.get_username(),
+ avatar_b64str=None,
+ display_name=None,
has_sumo_access=authenticated_user.has_sumo_access_token(),
has_smda_access=authenticated_user.has_smda_access_token(),
)
+ if authenticated_user.has_graph_access_token() and includeGraphApiInfo:
+ graph_api_access = GraphApiAccess(authenticated_user.get_graph_access_token())
+ try:
+ avatar_b64str_future = asyncio.create_task(graph_api_access.get_user_profile_photo())
+ graph_user_info_future = asyncio.create_task(graph_api_access.get_user_info())
+
+ avatar_b64str = await avatar_b64str_future
+ graph_user_info = await graph_user_info_future
+
+ user_info.avatar_b64str = avatar_b64str
+ if graph_user_info is not None:
+ user_info.display_name = graph_user_info.get("displayName", None)
+ except httpx.HTTPError as e:
+ print("Error while fetching user avatar and info from Microsoft Graph API (HTTP error):\n", e)
+ except httpx.InvalidURL as e:
+ print("Error while fetching user avatar and info from Microsoft Graph API (Invalid URL):\n", e)
+
return user_info
diff --git a/backend/src/backend/primary/routers/well_completion/router.py b/backend/src/backend/primary/routers/well_completion/router.py
deleted file mode 100644
index f9044792a..000000000
--- a/backend/src/backend/primary/routers/well_completion/router.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from typing import Optional
-
-from fastapi import APIRouter, Depends, Query
-
-from src.backend.auth.auth_helper import AuthHelper
-from src.services.utils.authenticated_user import AuthenticatedUser
-
-from src.services.sumo_access.well_completion_access import WellCompletionAccess
-from src.services.utils.well_completion_utils import WellCompletionDataModel
-
-from . import schemas
-
-router = APIRouter()
-
-
-@router.get("/well_completion_data/")
-def get_well_completion_data(
- # fmt:off
- authenticated_user: AuthenticatedUser = Depends(AuthHelper.get_authenticated_user),
- case_uuid: str = Query(description="Sumo case uuid"),
- ensemble_name: str = Query(description="Ensemble name"),
- realization: Optional[int] = Query(None, description="Optional realization to include. If not specified, all realizations will be returned."),
- # fmt:on
-) -> schemas.WellCompletionData:
- access = WellCompletionAccess(authenticated_user.get_sumo_access_token(), case_uuid, ensemble_name)
-
- well_completion_df = access.get_well_completion_data(realization=realization)
- well_completion_data_model = WellCompletionDataModel(well_completion_df)
-
- return schemas.WellCompletionData(json_data=well_completion_data_model.create_well_completion_dataset())
diff --git a/backend/src/backend/primary/routers/well_completion/schemas.py b/backend/src/backend/primary/routers/well_completion/schemas.py
deleted file mode 100644
index e04e65187..000000000
--- a/backend/src/backend/primary/routers/well_completion/schemas.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from pydantic import BaseModel
-
-from src.services.utils.well_completion_utils import WellCompletionDataSet
-
-
-class WellCompletionData(BaseModel):
- json_data: WellCompletionDataSet
diff --git a/backend/src/backend/primary/routers/well_completions/router.py b/backend/src/backend/primary/routers/well_completions/router.py
new file mode 100644
index 000000000..de3bb50a6
--- /dev/null
+++ b/backend/src/backend/primary/routers/well_completions/router.py
@@ -0,0 +1,29 @@
+from typing import Optional
+
+from fastapi import APIRouter, Depends, HTTPException, Query
+
+from src.backend.auth.auth_helper import AuthHelper
+from src.services.utils.authenticated_user import AuthenticatedUser
+
+from src.services.sumo_access.well_completions_access import WellCompletionsAccess
+from src.services.sumo_access.well_completions_types import WellCompletionsData
+
+router = APIRouter()
+
+
+@router.get("/well_completions_data/")
+def get_well_completions_data(
+ # fmt:off
+ authenticated_user: AuthenticatedUser = Depends(AuthHelper.get_authenticated_user),
+ case_uuid: str = Query(description="Sumo case uuid"),
+ ensemble_name: str = Query(description="Ensemble name"),
+ realization: Optional[int] = Query(None, description="Optional realization to include. If not specified, all realizations will be returned."),
+ # fmt:on
+) -> WellCompletionsData:
+ access = WellCompletionsAccess(authenticated_user.get_sumo_access_token(), case_uuid, ensemble_name)
+ well_completions_data = access.get_well_completions_data(realization=realization)
+
+ if not well_completions_data:
+ raise HTTPException(status_code=404, detail="Well completions data not found")
+
+ return well_completions_data
diff --git a/backend/src/services/graph_access/graph_access.py b/backend/src/services/graph_access/graph_access.py
new file mode 100644
index 000000000..fe3b92c6e
--- /dev/null
+++ b/backend/src/services/graph_access/graph_access.py
@@ -0,0 +1,39 @@
+import base64
+from typing import Mapping
+
+# Using the same http client as sumo
+import httpx
+
+
+class GraphApiAccess:
+ def __init__(self, access_token: str):
+ self._access_token = access_token
+
+ def _make_headers(self) -> Mapping[str, str]:
+ return {"Authorization": f"Bearer {self._access_token}"}
+
+ async def _request(self, url: str) -> httpx.Response:
+ async with httpx.AsyncClient() as client:
+ response = await client.get(
+ url,
+ headers=self._make_headers(),
+ )
+ return response
+
+ async def get_user_profile_photo(self) -> str | None:
+ print("entering get_user_profile_photo")
+ response = await self._request("https://graph.microsoft.com/v1.0/me/photo/$value")
+
+ if response.status_code == 200:
+ return base64.b64encode(response.content).decode("utf-8")
+ else:
+ return None
+
+ async def get_user_info(self) -> Mapping[str, str] | None:
+ print("entering get_user_info")
+ response = await self._request("https://graph.microsoft.com/v1.0/me")
+
+ if response.status_code == 200:
+ return response.json()
+ else:
+ return None
diff --git a/backend/src/services/sumo_access/well_completion_access.py b/backend/src/services/sumo_access/well_completion_access.py
deleted file mode 100644
index d226d0ee5..000000000
--- a/backend/src/services/sumo_access/well_completion_access.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from typing import Optional
-
-import pandas as pd
-
-from fmu.sumo.explorer.explorer import CaseCollection, Case, SumoClient
-from ._helpers import create_sumo_client_instance
-
-
-class WellCompletionAccess:
- """
- Class for accessing and retrieving well completion data
- """
-
- def __init__(self, access_token: str, case_uuid: str, iteration_name: str) -> None:
- sumo_client: SumoClient = create_sumo_client_instance(access_token)
- case_collection = CaseCollection(sumo_client).filter(uuid=case_uuid)
- if len(case_collection) > 1:
- raise ValueError(f"Multiple sumo cases found {case_uuid=}")
- if len(case_collection) < 1:
- raise ValueError(f"No sumo cases found {case_uuid=}")
-
- self._case: Case = case_collection[0]
- self._iteration_name = iteration_name
- self._tagname = str("wellcompletiondata") # Should tagname be hard coded?
-
- def get_well_completion_data(self, realization: Optional[int]) -> pd.DataFrame:
- """Get well completion data for case and iteration"""
-
- # With single realization, return the table including additional column REAL
- if realization is not None:
- well_completion_tables = self._case.tables.filter(
- tagname=self._tagname, realization=realization, iteration=self._iteration_name
- )
- well_completion_df = well_completion_tables[0].to_pandas if len(well_completion_tables) > 0 else None
- if well_completion_df is None:
- return {}
-
- well_completion_df["REAL"] = realization
- return well_completion_df
-
- # With multiple realizations, retrieve each column and concatenate
- # Expect one table with aggregated OP/SH and one with aggregate KH data
- well_completion_tables = self._case.tables.filter(
- tagname=self._tagname, aggregation="collection", iteration=self._iteration_name
- )
-
- # Improve code (iterate over tables and concatenate) - concat gives issue? See jupyter-notebook
- if len(well_completion_tables) < 2:
- return {}
-
- first_df = well_completion_tables[0].to_pandas
- second_df = well_completion_tables[1].to_pandas
-
- expected_columns = set(["WELL", "DATE", "ZONE", "REAL"])
- if not set(first_df.columns).issuperset(expected_columns) or not set(second_df.columns).issuperset(
- expected_columns
- ):
- raise ValueError(
- f"Expected df columns to be superset of columns: {expected_columns} - got: {first_df.columns} and {second_df.columns}"
- )
-
- if "OP/SH" in first_df.columns and "KH" in second_df.columns:
- first_df["KH"] = second_df["KH"]
- return first_df
-
- if "OP/SH" in second_df.columns and "KH" in first_df.columns:
- second_df["KH"] = first_df["KH"]
- return second_df
-
- raise ValueError('Expected columns "OP/SH" and "KH" not found in tables')
diff --git a/backend/src/services/sumo_access/well_completions_access.py b/backend/src/services/sumo_access/well_completions_access.py
new file mode 100644
index 000000000..276e03140
--- /dev/null
+++ b/backend/src/services/sumo_access/well_completions_access.py
@@ -0,0 +1,277 @@
+import itertools
+from typing import Dict, Iterator, List, Optional, Set, Tuple
+
+import pandas as pd
+
+from fmu.sumo.explorer.explorer import CaseCollection, Case, SumoClient
+from ._helpers import create_sumo_client_instance
+
+from .well_completions_types import (
+ Completions,
+ WellCompletionsAttributeType,
+ WellCompletionsWell,
+ WellCompletionsData,
+ WellCompletionsZone,
+ WellCompletionsUnitInfo,
+ WellCompletionsUnits,
+)
+
+
+class WellCompletionsAccess:
+ """
+ Class for accessing and retrieving well completions data
+ """
+
+ def __init__(self, access_token: str, case_uuid: str, iteration_name: str) -> None:
+ sumo_client: SumoClient = create_sumo_client_instance(access_token)
+ case_collection = CaseCollection(sumo_client).filter(uuid=case_uuid)
+ if len(case_collection) > 1:
+ raise ValueError(f"Multiple sumo cases found {case_uuid=}")
+ if len(case_collection) < 1:
+ raise ValueError(f"No sumo cases found {case_uuid=}")
+
+ self._case: Case = case_collection[0]
+ self._iteration_name = iteration_name
+ self._tagname = str("wellcompletiondata") # Should tagname be hard coded?
+
+ def get_well_completions_data(self, realization: Optional[int]) -> Optional[WellCompletionsData]:
+ """Get well completions data for case and iteration"""
+
+ # With single realization, filter on realization
+ if realization is not None:
+ well_completions_tables = self._case.tables.filter(
+ tagname=self._tagname, realization=realization, iteration=self._iteration_name
+ )
+ well_completions_df = well_completions_tables[0].to_pandas if len(well_completions_tables) > 0 else None
+ if well_completions_df is None:
+ return None
+
+ return WellCompletionDataConverter(well_completions_df).create_data()
+
+ # With multiple realizations, expect one table with aggregated OP/SH and one with aggregate KH data
+ well_completions_tables = self._case.tables.filter(
+ tagname=self._tagname, aggregation="collection", iteration=self._iteration_name
+ )
+
+ # As of now, two tables are expected - one with OP/SH and one with KH
+ if len(well_completions_tables) < 2:
+ return None
+
+ expected_common_columns = set(["WELL", "DATE", "ZONE", "REAL"])
+ first_df = well_completions_tables[0].to_pandas
+ second_df = well_completions_tables[1].to_pandas
+
+ # Validate columns and ensure equal column content in both tables
+ self._validate_common_dataframe_columns(expected_common_columns, first_df, second_df)
+
+ # Assign "KH" column to the dataframe with missing column
+ if "OP/SH" in first_df.columns and "KH" in second_df.columns:
+ first_df["KH"] = second_df["KH"]
+ return WellCompletionDataConverter(first_df).create_data()
+ if "OP/SH" in second_df.columns and "KH" in first_df.columns:
+ second_df["KH"] = first_df["KH"]
+ return WellCompletionDataConverter(second_df).create_data()
+
+ raise ValueError('Expected columns "OP/SH" and "KH" not found in tables')
+
+ def _validate_common_dataframe_columns(
+ self, common_column_names: Set[str], first_df: pd.DataFrame, second_df: pd.DataFrame
+ ) -> None:
+ """
+ Validates that the two dataframes contains same common columns and that the columns have the same content,
+ raises value error if not matching.
+ """
+ # Ensure expected columns are present
+ if not common_column_names.issubset(first_df.columns):
+ raise ValueError(f"Expected columns of first table: {common_column_names} - got: {first_df.columns}")
+ if not common_column_names.issubset(second_df.columns):
+ raise ValueError(f"Expected columns of second table: {common_column_names} - got: {second_df.columns}")
+
+ # Verify equal columns in both tables
+ for column_name in common_column_names:
+ if not (first_df[column_name] == second_df[column_name]).all():
+ raise ValueError(f'Expected equal column content, "{column_name}", in first and second dataframe')
+
+
+class WellCompletionDataConverter:
+ """
+ Class for converter into WellCompletionData type from a pandas dataframe with well completions data
+
+ Accessor retrieves well completions data from Sumo as table data. This converter class handles
+ the pandas dataframe and provides a data structure for API to consume.
+ """
+
+ def __init__(self, well_completions_df: pd.DataFrame) -> None:
+ # NOTE: Which level of verification?
+ # - Only columns names?
+ # - Verify dtype of columns?
+ # - Verify dimension of columns - only 2D df?
+
+ # Based on realization filtering in Accessor, the "REAL" column is optional - not expected
+ expected_columns = set(["WELL", "DATE", "ZONE", "OP/SH", "KH"])
+
+ if not expected_columns.issubset(well_completions_df.columns):
+ raise ValueError(f"Expected columns: {expected_columns} - got: {well_completions_df.columns}")
+
+ self._well_completions_df = well_completions_df
+
+ # NOTE: Metadata should be provided by Sumo?
+ # _kh_unit = (
+ # kh_metadata.unit
+ # if kh_metadata is not None and kh_metadata.unit is not None
+ # else ""
+ # )
+ self._kh_unit = "mDm" # NOTE: How to find metadata?
+ self._kh_decimal_places = 2
+ self._datemap = {dte: i for i, dte in enumerate(sorted(self._well_completions_df["DATE"].unique()))}
+ self._zones = list(sorted(self._well_completions_df["ZONE"].unique()))
+
+ self._well_completions_df["TIMESTEP"] = self._well_completions_df["DATE"].map(self._datemap)
+
+ # NOTE:
+ # - How to handle well attributes? Should be provided by Sumo?
+ # - How to handle theme colors?
+ self._well_attributes: Dict[
+ str, Dict[str, WellCompletionsAttributeType]
+ ] = {} # Each well has dict of attributes
+ self._theme_colors = ["#6EA35A", "#EDAF4C", "#CA413D"] # Hard coded
+
+ def _dummy_stratigraphy(self) -> List[WellCompletionsZone]:
+ """
+ Returns a default stratigraphy for TESTING, should be provided by Sumo
+ """
+ return [
+ WellCompletionsZone(
+ name="TopVolantis_BaseVolantis",
+ color="#6EA35A",
+ subzones=[
+ WellCompletionsZone(name="Valysar", color="#6EA35A"),
+ WellCompletionsZone(name="Therys", color="#EDAF4C"),
+ WellCompletionsZone(name="Volon", color="#CA413D"),
+ ],
+ ),
+ ]
+
+ def create_data(self) -> WellCompletionsData:
+ """Creates well completions dataset for front-end"""
+
+ return WellCompletionsData(
+ version="1.1.0",
+ units=WellCompletionsUnits(
+ kh=WellCompletionsUnitInfo(unit=self._kh_unit, decimalPlaces=self._kh_decimal_places)
+ ),
+ stratigraphy=self._extract_stratigraphy(self._dummy_stratigraphy(), self._zones),
+ timeSteps=[pd.to_datetime(str(dte)).strftime("%Y-%m-%d") for dte in self._datemap.keys()],
+ wells=self._extract_wells(),
+ )
+
+ def _extract_wells(self) -> List[WellCompletionsWell]:
+ """Generates the wells part of the dataset to front-end"""
+ # Optional "REAL" column, i.e. no column implies only one realization
+ no_real = self._well_completions_df["REAL"].nunique() if "REAL" in self._well_completions_df.columns else 1
+
+ well_list = []
+ for well_name, well_group in self._well_completions_df.groupby("WELL"):
+ well_data = self._extract_well(well_group, well_name, no_real)
+ well_data.attributes = self._well_attributes[well_name] if well_name in self._well_attributes else {}
+ well_list.append(well_data)
+ return well_list
+
+ def _extract_well(self, well_group: pd.DataFrame, well_name: str, no_real: int) -> WellCompletionsWell:
+ """Extract completions events and kh values for a single well"""
+ well: WellCompletionsWell = WellCompletionsWell(name=well_name, attributes={}, completions={})
+
+ completions: Dict[str, Completions] = {}
+ for (zone, timestep), group_df in well_group.groupby(["ZONE", "TIMESTEP"]):
+ data = group_df["OP/SH"].value_counts()
+ if zone not in completions:
+ completions[zone] = Completions(t=[], open=[], shut=[], kh_mean=[], kh_min=[], kh_max=[])
+
+ zone_completions = completions[zone]
+ zone_completions.t.append(int(timestep))
+ zone_completions.open.append(float(data["OPEN"] / no_real if "OPEN" in data else 0))
+ zone_completions.shut.append(float(data["SHUT"] / no_real if "SHUT" in data else 0))
+ zone_completions.kh_mean.append(round(float(group_df["KH"].mean()), 2))
+ zone_completions.kh_min.append(round(float(group_df["KH"].min()), 2))
+ zone_completions.kh_max.append(round(float(group_df["KH"].max()), 2))
+
+ well.completions = completions
+ return well
+
+ def _extract_stratigraphy(
+ self, stratigraphy: Optional[List[WellCompletionsZone]], zones: List[str]
+ ) -> List[WellCompletionsZone]:
+ """Returns the stratigraphy part of the dataset to front-end"""
+ color_iterator = itertools.cycle(self._theme_colors)
+
+ # If no stratigraphy file is found then the stratigraphy is
+ # created from the unique zones in the well completions data input.
+ # They will then probably not come in the correct order.
+ if stratigraphy is None:
+ return [WellCompletionsZone(name=zone, color=next(color_iterator)) for zone in zones]
+
+ # If stratigraphy is not None the following is done:
+ stratigraphy, remaining_valid_zones = self._filter_valid_nodes(stratigraphy, zones)
+
+ if remaining_valid_zones:
+ raise ValueError(
+ "The following zones are defined in the well completions data, "
+ f"but not in the stratigraphy: {remaining_valid_zones}"
+ )
+
+ return self._add_colors_to_stratigraphy(stratigraphy, color_iterator)
+
+ def _add_colors_to_stratigraphy(
+ self,
+ stratigraphy: List[WellCompletionsZone],
+ color_iterator: Iterator,
+ zone_color_mapping: Optional[Dict[str, str]] = None,
+ ) -> List[WellCompletionsZone]:
+ """Add colors to the stratigraphy tree. The function will recursively parse the tree.
+
+ There are tree sources of color:
+ 1. The color is given in the stratigraphy list, in which case nothing is done to the node
+ 2. The color is the optional the zone->color map
+ 3. If none of the above applies, the color will be taken from the theme color iterable for \
+ the leaves. For other levels, a dummy color grey is used
+ """
+ for zone in stratigraphy:
+ if zone.color == "":
+ if zone_color_mapping is not None and zone.name in zone_color_mapping:
+ zone.color = zone_color_mapping[zone.name]
+ elif zone.subzones is None:
+ zone = next(color_iterator) # theme colors only applied on leaves
+ else:
+ zone.color = "#808080" # grey
+ if zone.subzones is not None:
+ zone.subzones = self._add_colors_to_stratigraphy(
+ zone.subzones,
+ color_iterator,
+ zone_color_mapping=zone_color_mapping,
+ )
+ return stratigraphy
+
+ def _filter_valid_nodes(
+ self, stratigraphy: List[WellCompletionsZone], valid_zone_names: List[str]
+ ) -> Tuple[List[WellCompletionsZone], List[str]]:
+ """Returns the stratigraphy tree with only valid nodes.
+ A node is considered valid if it self or one of it's subzones are in the
+ valid zone names list (passed from the lyr file)
+
+ The function recursively parses the tree to add valid nodes.
+ """
+
+ output = []
+ remaining_valid_zones = valid_zone_names
+ for zone in stratigraphy:
+ if zone.subzones is not None:
+ zone.subzones, remaining_valid_zones = self._filter_valid_nodes(zone.subzones, remaining_valid_zones)
+ if zone.name in remaining_valid_zones:
+ output.append(zone)
+ remaining_valid_zones = [
+ elm for elm in remaining_valid_zones if elm != zone.name
+ ] # remove zone name from valid zones if it is found in the stratigraphy
+ elif zone.subzones is not None:
+ output.append(zone)
+
+ return output, remaining_valid_zones
diff --git a/backend/src/services/sumo_access/well_completions_types.py b/backend/src/services/sumo_access/well_completions_types.py
new file mode 100644
index 000000000..4e23be989
--- /dev/null
+++ b/backend/src/services/sumo_access/well_completions_types.py
@@ -0,0 +1,45 @@
+from typing import Dict, List, Optional, Union
+from pydantic import BaseModel
+
+
+WellCompletionsAttributeType = Union[str, int, bool]
+
+
+class Completions(BaseModel):
+ t: List[int]
+ open: List[float]
+ shut: List[float]
+ kh_mean: List[float]
+ kh_min: List[float]
+ kh_max: List[float]
+
+
+class WellCompletionsWell(BaseModel):
+ name: str
+ attributes: Dict[str, WellCompletionsAttributeType]
+ completions: Dict[str, Completions]
+
+
+class WellCompletionsZone(BaseModel):
+ name: str
+ color: str
+ subzones: Optional[List["WellCompletionsZone"]] = None
+
+
+class WellCompletionsUnitInfo(BaseModel):
+ unit: str
+ decimalPlaces: int
+
+
+class WellCompletionsUnits(BaseModel):
+ kh: WellCompletionsUnitInfo
+
+
+class WellCompletionsData(BaseModel):
+ """Type definition for well completions data"""
+
+ version: str
+ units: WellCompletionsUnits
+ stratigraphy: List[WellCompletionsZone]
+ timeSteps: List[str]
+ wells: List[WellCompletionsWell]
diff --git a/backend/src/services/types/well_completion_types.py b/backend/src/services/types/well_completion_types.py
deleted file mode 100644
index 0df76ca7a..000000000
--- a/backend/src/services/types/well_completion_types.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from pydantic import BaseModel
-from typing import Dict, List, Optional, Union
-
-
-WellCompletionAttributeType = Union[str, int, bool]
-
-
-class Completions(BaseModel):
- t: List[int]
- open: List[float]
- shut: List[float]
- kh_mean: List[float]
- kh_min: List[float]
- kh_max: List[float]
-
-
-class WellCompletionWellInfo(BaseModel):
- name: str
- attributes: Dict[str, WellCompletionAttributeType]
-
-
-class WellCompletionWell(WellCompletionWellInfo):
- completions: Dict[str, Completions]
-
-
-class WellCompletionZone(BaseModel):
- name: str
- color: str
- subzones: Optional[List["WellCompletionZone"]] = None
-
-
-class WellCompletionUnitInfo(BaseModel):
- unit: str
- decimalPlaces: int
-
-
-class WellCompletionUnits(BaseModel):
- kh: WellCompletionUnitInfo
-
-
-class WellCompletionDataSet(BaseModel):
- """Type definition for well completion data set"""
-
- version: str
- units: WellCompletionUnits
- stratigraphy: List[WellCompletionZone]
- timeSteps: List[str]
- wells: List[WellCompletionWell]
diff --git a/backend/src/services/utils/authenticated_user.py b/backend/src/services/utils/authenticated_user.py
index 97f3ed7df..3b28aa515 100644
--- a/backend/src/services/utils/authenticated_user.py
+++ b/backend/src/services/utils/authenticated_user.py
@@ -1,6 +1,14 @@
# pylint: disable=bare-except
-from typing import Any, Optional
+from typing import Any, Optional, TypedDict
+
+
+class AccessTokens(TypedDict):
+ graph_access_token: Optional[str]
+ sumo_access_token: Optional[str]
+ smda_access_token: Optional[str]
+ pdm_access_token: Optional[str]
+ ssdl_access_token: Optional[str]
class AuthenticatedUser:
@@ -8,17 +16,15 @@ def __init__(
self,
user_id: str,
username: str,
- sumo_access_token: Optional[str],
- smda_access_token: Optional[str],
- pdm_access_token: Optional[str],
- ssdl_access_token: Optional[str],
+ access_tokens: AccessTokens,
) -> None:
self._user_id = user_id
self._username = username
- self._sumo_access_token = sumo_access_token
- self._smda_access_token = smda_access_token
- self._pdm_access_token = pdm_access_token
- self._ssdl_access_token = ssdl_access_token
+ self._graph_access_token = access_tokens.get("graph_access_token")
+ self._sumo_access_token = access_tokens.get("sumo_access_token")
+ self._smda_access_token = access_tokens.get("smda_access_token")
+ self._pdm_access_token = access_tokens.get("pdm_access_token")
+ self._ssdl_access_token = access_tokens.get("ssdl_access_token")
def __hash__(self) -> int:
return hash(self._user_id)
@@ -29,6 +35,19 @@ def __eq__(self, other: Any) -> bool:
def get_username(self) -> str:
return self._username
+ def get_graph_access_token(self) -> str:
+ if isinstance(self._graph_access_token, str) and self._graph_access_token:
+ return self._graph_access_token
+
+ raise ValueError("User has no graph access token")
+
+ def has_graph_access_token(self) -> bool:
+ try:
+ self.get_graph_access_token()
+ return True
+ except ValueError:
+ return False
+
def get_sumo_access_token(self) -> str:
if isinstance(self._sumo_access_token, str) and len(self._sumo_access_token) > 0:
return self._sumo_access_token
diff --git a/backend/src/services/utils/well_completion_utils.py b/backend/src/services/utils/well_completion_utils.py
deleted file mode 100644
index 7f51e26e4..000000000
--- a/backend/src/services/utils/well_completion_utils.py
+++ /dev/null
@@ -1,187 +0,0 @@
-import itertools
-from typing import Dict, Iterator, List, Optional, Tuple
-
-import pandas as pd
-
-from src.services.types.well_completion_types import (
- Completions,
- WellCompletionAttributeType,
- WellCompletionWell,
- WellCompletionDataSet,
- WellCompletionZone,
- WellCompletionUnitInfo,
- WellCompletionUnits,
-)
-
-
-class WellCompletionDataModel:
- def __init__(self, well_completion_data: pd.DataFrame) -> None:
- # NOTE: Which level of verification?
- # - Only columns names?
- # - Verify dtype of columns?
- # - Verify dimension of columns - only 2D df?
-
- expected_columns = set(["WELL", "DATE", "ZONE", "REAL", "OP/SH", "KH"])
- if expected_columns != set(well_completion_data.columns):
- raise ValueError(f"Expected columns: {expected_columns} - got: {well_completion_data.columns}")
-
- self._well_completion_df = well_completion_data
-
- # NOTE: Metadata should be provided by Sumo?
- # _kh_unit = (
- # kh_metadata.unit
- # if kh_metadata is not None and kh_metadata.unit is not None
- # else ""
- # )
- self._kh_unit = "mDm" # NOTE: How to find metadata?
- self._kh_decimal_places = 2
- self._datemap = {dte: i for i, dte in enumerate(sorted(self._well_completion_df["DATE"].unique()))}
- self._zones = list(sorted(self._well_completion_df["ZONE"].unique()))
-
- self._well_completion_df["TIMESTEP"] = self._well_completion_df["DATE"].map(self._datemap)
-
- # NOTE:
- # - How to handle well attributes? Should be provided by Sumo?
- # - How to handle theme colors?
- self._well_attributes: Dict[
- str, Dict[str, WellCompletionAttributeType]
- ] = {} # Each well has dict of attributes
- self._theme_colors = ["#6EA35A", "#EDAF4C", "#CA413D"] # Hard coded
-
- def _dummy_stratigraphy(self) -> List[WellCompletionZone]:
- """
- Returns a default stratigraphy for TESTING, should be provided by Sumo
- """
- return [
- WellCompletionZone(
- name="TopVolantis_BaseVolantis",
- color="#6EA35A",
- subzones=[
- WellCompletionZone(name="Valysar", color="#6EA35A"),
- WellCompletionZone(name="Therys", color="#EDAF4C"),
- WellCompletionZone(name="Volon", color="#CA413D"),
- ],
- ),
- ]
-
- def create_well_completion_dataset(self) -> WellCompletionDataSet:
- """Creates well completion dataset for front-end"""
-
- return WellCompletionDataSet(
- version="1.1.0",
- units=WellCompletionUnits(
- kh=WellCompletionUnitInfo(unit=self._kh_unit, decimalPlaces=self._kh_decimal_places)
- ),
- stratigraphy=self._extract_stratigraphy(self._dummy_stratigraphy(), self._zones),
- timeSteps=[pd.to_datetime(str(dte)).strftime("%Y-%m-%d") for dte in self._datemap.keys()],
- wells=self._extract_wells(),
- )
-
- def _extract_wells(self) -> List[WellCompletionWell]:
- """Generates the wells part of the dataset to front-end"""
- well_list = []
- no_real = self._well_completion_df["REAL"].nunique()
- for well_name, well_group in self._well_completion_df.groupby("WELL"):
- well_data = self._extract_well(well_group, well_name, no_real)
- well_data.attributes = self._well_attributes[well_name] if well_name in self._well_attributes else {}
- well_list.append(well_data)
- return well_list
-
- def _extract_well(self, well_group: pd.DataFrame, well_name: str, no_real: int) -> WellCompletionWell:
- """Extract completion events and kh values for a single well"""
- well: WellCompletionWell = WellCompletionWell(name=well_name, attributes={}, completions={})
-
- completions: Dict[str, Completions] = {}
- for (zone, timestep), group_df in well_group.groupby(["ZONE", "TIMESTEP"]):
- data = group_df["OP/SH"].value_counts()
- if zone not in completions:
- completions[zone] = Completions(t=[], open=[], shut=[], kh_mean=[], kh_min=[], kh_max=[])
-
- zone_completions = completions[zone]
- zone_completions.t.append(int(timestep))
- zone_completions.open.append(float(data["OPEN"] / no_real if "OPEN" in data else 0))
- zone_completions.shut.append(float(data["SHUT"] / no_real if "SHUT" in data else 0))
- zone_completions.kh_mean.append(round(float(group_df["KH"].mean()), 2))
- zone_completions.kh_min.append(round(float(group_df["KH"].min()), 2))
- zone_completions.kh_max.append(round(float(group_df["KH"].max()), 2))
-
- well.completions = completions
- return well
-
- def _extract_stratigraphy(
- self, stratigraphy: Optional[List[WellCompletionZone]], zones: List[str]
- ) -> List[WellCompletionZone]:
- """Returns the stratigraphy part of the dataset to front-end"""
- color_iterator = itertools.cycle(self._theme_colors)
-
- # If no stratigraphy file is found then the stratigraphy is
- # created from the unique zones in the wellcompletiondata input.
- # They will then probably not come in the correct order.
- if stratigraphy is None:
- return [WellCompletionZone(name=zone, color=next(color_iterator)) for zone in zones]
-
- # If stratigraphy is not None the following is done:
- stratigraphy, remaining_valid_zones = self._filter_valid_nodes(stratigraphy, zones)
-
- if remaining_valid_zones:
- raise ValueError(
- "The following zones are defined in the well completion data, "
- f"but not in the stratigraphy: {remaining_valid_zones}"
- )
-
- return self._add_colors_to_stratigraphy(stratigraphy, color_iterator)
-
- def _add_colors_to_stratigraphy(
- self,
- stratigraphy: List[WellCompletionZone],
- color_iterator: Iterator,
- zone_color_mapping: Optional[Dict[str, str]] = None,
- ) -> List[WellCompletionZone]:
- """Add colors to the stratigraphy tree. The function will recursively parse the tree.
-
- There are tree sources of color:
- 1. The color is given in the stratigraphy list, in which case nothing is done to the node
- 2. The color is the optional the zone->color map
- 3. If none of the above applies, the color will be taken from the theme color iterable for \
- the leaves. For other levels, a dummy color grey is used
- """
- for zone in stratigraphy:
- if zone.color == "":
- if zone_color_mapping is not None and zone.name in zone_color_mapping:
- zone.color = zone_color_mapping[zone.name]
- elif zone.subzones is None:
- zone = next(color_iterator) # theme colors only applied on leaves
- else:
- zone.color = "#808080" # grey
- if zone.subzones is not None:
- zone.subzones = self._add_colors_to_stratigraphy(
- zone.subzones,
- color_iterator,
- zone_color_mapping=zone_color_mapping,
- )
- return stratigraphy
-
- def _filter_valid_nodes(
- self, stratigraphy: List[WellCompletionZone], valid_zone_names: List[str]
- ) -> Tuple[List[WellCompletionZone], List[str]]:
- """Returns the stratigraphy tree with only valid nodes.
- A node is considered valid if it self or one of it's subzones are in the
- valid zone names list (passed from the lyr file)
-
- The function recursively parses the tree to add valid nodes.
- """
-
- output = []
- remaining_valid_zones = valid_zone_names
- for zone in stratigraphy:
- if zone.subzones is not None:
- zone.subzones, remaining_valid_zones = self._filter_valid_nodes(zone.subzones, remaining_valid_zones)
- if zone.name in remaining_valid_zones:
- output.append(zone)
- remaining_valid_zones = [
- elm for elm in remaining_valid_zones if elm != zone.name
- ] # remove zone name from valid zones if it is found in the stratigraphy
- elif zone.subzones is not None:
- output.append(zone)
-
- return output, remaining_valid_zones
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index 9dc9be156..06fad83bd 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -9,8 +9,8 @@
"version": "0.0.0",
"dependencies": {
"@headlessui/react": "^1.7.8",
- "@heroicons/react": "^2.0.14",
"@mui/base": "^5.0.0-beta.3",
+ "@mui/icons-material": "^5.14.9",
"@tanstack/react-query": "^4.24.10",
"@tanstack/react-query-devtools": "^4.24.12",
"@webviz/subsurface-viewer": "^0.0.2-alpha.9",
@@ -647,9 +647,9 @@
}
},
"node_modules/@babel/runtime": {
- "version": "7.22.11",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.22.11.tgz",
- "integrity": "sha512-ee7jVNlWN09+KftVOu9n7S8gQzD/Z6hN/I8VBRXW4P1+Xe7kJGXMwu8vds4aGIMHZnNbdpSWCfZZtinytpcAvA==",
+ "version": "7.22.15",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.22.15.tgz",
+ "integrity": "sha512-T0O+aa+4w0u06iNmapipJXMV4HoUir03hpx3/YqXXhu9xim3w+dVphjFWl1OH8NbZHw5Lbm9k45drDkgq2VNNA==",
"dependencies": {
"regenerator-runtime": "^0.14.0"
},
@@ -1408,14 +1408,6 @@
"react-dom": "^16 || ^17 || ^18"
}
},
- "node_modules/@heroicons/react": {
- "version": "2.0.18",
- "resolved": "https://registry.npmjs.org/@heroicons/react/-/react-2.0.18.tgz",
- "integrity": "sha512-7TyMjRrZZMBPa+/5Y8lN0iyvUU/01PeMGX2+RE7cQWpEUIcb4QotzUObFkJDejj/HUH4qjP/eQ0gzzKs2f+6Yw==",
- "peerDependencies": {
- "react": ">= 16"
- }
- },
"node_modules/@humanwhocodes/config-array": {
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz",
@@ -2791,12 +2783,11 @@
}
},
"node_modules/@mui/icons-material": {
- "version": "5.14.7",
- "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.14.7.tgz",
- "integrity": "sha512-mWp4DwMa8c1Gx9yOEtPgxM4b+e6hAbtZyzfSubdBwrnEE6G5D2rbAJ5MB+If6kfI48JaYaJ5j8+zAdmZLuZc0A==",
- "peer": true,
+ "version": "5.14.9",
+ "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.14.9.tgz",
+ "integrity": "sha512-xTRQbDsogsJo7tY5Og8R9zbuG2q+KIPVIM6JQoKxtJlz9DPOw1u0T2fGrvwD+XAOVifQf6epNMcGCDLfJAz4Nw==",
"dependencies": {
- "@babel/runtime": "^7.22.10"
+ "@babel/runtime": "^7.22.15"
},
"engines": {
"node": ">=12.0.0"
diff --git a/frontend/package.json b/frontend/package.json
index 968995756..dc3cd32be 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -16,8 +16,8 @@
},
"dependencies": {
"@headlessui/react": "^1.7.8",
- "@heroicons/react": "^2.0.14",
"@mui/base": "^5.0.0-beta.3",
+ "@mui/icons-material": "^5.14.9",
"@tanstack/react-query": "^4.24.10",
"@tanstack/react-query-devtools": "^4.24.12",
"@webviz/subsurface-viewer": "^0.0.2-alpha.9",
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index 4a0e9b82f..5f7e01cd2 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -1,7 +1,6 @@
import React from "react";
import { DrawerContent, LayoutElement, Workbench } from "@framework/Workbench";
-import { LoginDialog } from "@framework/internal/components/LoginDialog";
import { NavBar } from "@framework/internal/components/NavBar";
import { SettingsContentPanels } from "@framework/internal/components/SettingsContentPanels";
import { useQueryClient } from "@tanstack/react-query";
@@ -41,13 +40,10 @@ function App() {
}, []);
return (
- <>
-
-
-
-
-
- >
+
+
+
+
);
}
diff --git a/frontend/src/GlobalErrorBoundary.tsx b/frontend/src/GlobalErrorBoundary.tsx
index 5c1adc268..446b666f1 100644
--- a/frontend/src/GlobalErrorBoundary.tsx
+++ b/frontend/src/GlobalErrorBoundary.tsx
@@ -1,9 +1,9 @@
import React from "react";
-import { BugAntIcon, Square2StackIcon } from "@heroicons/react/20/solid";
import { Button } from "@lib/components/Button";
import { IconButton } from "@lib/components/IconButton";
import { resolveClassNames } from "@lib/utils/resolveClassNames";
+import { BugReport, ContentCopy } from "@mui/icons-material";
type Props = {
children?: React.ReactNode;
@@ -68,7 +68,7 @@ export class GlobalErrorBoundary extends React.Component {
{freshStartUrl.toString()}
-
+
{
this.state.error?.stack ?? ""
)
}
- startIcon={}
+ startIcon={}
>
Report issue
diff --git a/frontend/src/api/ApiService.ts b/frontend/src/api/ApiService.ts
index 44c7fe0f9..d0fab3543 100644
--- a/frontend/src/api/ApiService.ts
+++ b/frontend/src/api/ApiService.ts
@@ -15,7 +15,7 @@ import { SurfaceService } from './services/SurfaceService';
import { SurfacePolygonsService } from './services/SurfacePolygonsService';
import { TimeseriesService } from './services/TimeseriesService';
import { WellService } from './services/WellService';
-import { WellCompletionService } from './services/WellCompletionService';
+import { WellCompletionsService } from './services/WellCompletionsService';
type HttpRequestConstructor = new (config: OpenAPIConfig) => BaseHttpRequest;
@@ -31,7 +31,7 @@ export class ApiService {
public readonly surfacePolygons: SurfacePolygonsService;
public readonly timeseries: TimeseriesService;
public readonly well: WellService;
- public readonly wellCompletion: WellCompletionService;
+ public readonly wellCompletions: WellCompletionsService;
public readonly request: BaseHttpRequest;
@@ -58,7 +58,7 @@ export class ApiService {
this.surfacePolygons = new SurfacePolygonsService(this.request);
this.timeseries = new TimeseriesService(this.request);
this.well = new WellService(this.request);
- this.wellCompletion = new WellCompletionService(this.request);
+ this.wellCompletions = new WellCompletionsService(this.request);
}
}
diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts
index 1923f75da..174ba475c 100644
--- a/frontend/src/api/index.ts
+++ b/frontend/src/api/index.ts
@@ -47,12 +47,11 @@ export type { VectorStatisticData as VectorStatisticData_api } from './models/Ve
export type { VectorStatisticSensitivityData as VectorStatisticSensitivityData_api } from './models/VectorStatisticSensitivityData';
export type { WellBoreHeader as WellBoreHeader_api } from './models/WellBoreHeader';
export type { WellBoreTrajectory as WellBoreTrajectory_api } from './models/WellBoreTrajectory';
-export type { WellCompletionData as WellCompletionData_api } from './models/WellCompletionData';
-export type { WellCompletionDataSet as WellCompletionDataSet_api } from './models/WellCompletionDataSet';
-export type { WellCompletionUnitInfo as WellCompletionUnitInfo_api } from './models/WellCompletionUnitInfo';
-export type { WellCompletionUnits as WellCompletionUnits_api } from './models/WellCompletionUnits';
-export type { WellCompletionWell as WellCompletionWell_api } from './models/WellCompletionWell';
-export type { WellCompletionZone as WellCompletionZone_api } from './models/WellCompletionZone';
+export type { WellCompletionsData as WellCompletionsData_api } from './models/WellCompletionsData';
+export type { WellCompletionsUnitInfo as WellCompletionsUnitInfo_api } from './models/WellCompletionsUnitInfo';
+export type { WellCompletionsUnits as WellCompletionsUnits_api } from './models/WellCompletionsUnits';
+export type { WellCompletionsWell as WellCompletionsWell_api } from './models/WellCompletionsWell';
+export type { WellCompletionsZone as WellCompletionsZone_api } from './models/WellCompletionsZone';
export { DefaultService } from './services/DefaultService';
export { ExploreService } from './services/ExploreService';
@@ -64,4 +63,4 @@ export { SurfaceService } from './services/SurfaceService';
export { SurfacePolygonsService } from './services/SurfacePolygonsService';
export { TimeseriesService } from './services/TimeseriesService';
export { WellService } from './services/WellService';
-export { WellCompletionService } from './services/WellCompletionService';
+export { WellCompletionsService } from './services/WellCompletionsService';
diff --git a/frontend/src/api/models/UserInfo.ts b/frontend/src/api/models/UserInfo.ts
index 1d80b9237..efffcd292 100644
--- a/frontend/src/api/models/UserInfo.ts
+++ b/frontend/src/api/models/UserInfo.ts
@@ -4,6 +4,8 @@
export type UserInfo = {
username: string;
+ display_name: (string | null);
+ avatar_b64str: (string | null);
has_sumo_access: boolean;
has_smda_access: boolean;
};
diff --git a/frontend/src/api/models/WellCompletionData.ts b/frontend/src/api/models/WellCompletionData.ts
deleted file mode 100644
index 9d9d24caf..000000000
--- a/frontend/src/api/models/WellCompletionData.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-/* istanbul ignore file */
-/* tslint:disable */
-/* eslint-disable */
-
-import type { WellCompletionDataSet } from './WellCompletionDataSet';
-
-export type WellCompletionData = {
- json_data: WellCompletionDataSet;
-};
-
diff --git a/frontend/src/api/models/WellCompletionDataSet.ts b/frontend/src/api/models/WellCompletionDataSet.ts
deleted file mode 100644
index add799c58..000000000
--- a/frontend/src/api/models/WellCompletionDataSet.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-/* istanbul ignore file */
-/* tslint:disable */
-/* eslint-disable */
-
-import type { WellCompletionUnits } from './WellCompletionUnits';
-import type { WellCompletionWell } from './WellCompletionWell';
-import type { WellCompletionZone } from './WellCompletionZone';
-
-/**
- * Type definition for well completion data set
- */
-export type WellCompletionDataSet = {
- version: string;
- units: WellCompletionUnits;
- stratigraphy: Array;
- timeSteps: Array;
- wells: Array;
-};
-
diff --git a/frontend/src/api/models/WellCompletionUnits.ts b/frontend/src/api/models/WellCompletionUnits.ts
deleted file mode 100644
index 9aa8c07cd..000000000
--- a/frontend/src/api/models/WellCompletionUnits.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-/* istanbul ignore file */
-/* tslint:disable */
-/* eslint-disable */
-
-import type { WellCompletionUnitInfo } from './WellCompletionUnitInfo';
-
-export type WellCompletionUnits = {
- kh: WellCompletionUnitInfo;
-};
-
diff --git a/frontend/src/api/models/WellCompletionsData.ts b/frontend/src/api/models/WellCompletionsData.ts
new file mode 100644
index 000000000..272ec94d2
--- /dev/null
+++ b/frontend/src/api/models/WellCompletionsData.ts
@@ -0,0 +1,19 @@
+/* istanbul ignore file */
+/* tslint:disable */
+/* eslint-disable */
+
+import type { WellCompletionsUnits } from './WellCompletionsUnits';
+import type { WellCompletionsWell } from './WellCompletionsWell';
+import type { WellCompletionsZone } from './WellCompletionsZone';
+
+/**
+ * Type definition for well completions data
+ */
+export type WellCompletionsData = {
+ version: string;
+ units: WellCompletionsUnits;
+ stratigraphy: Array;
+ timeSteps: Array;
+ wells: Array;
+};
+
diff --git a/frontend/src/api/models/WellCompletionUnitInfo.ts b/frontend/src/api/models/WellCompletionsUnitInfo.ts
similarity index 74%
rename from frontend/src/api/models/WellCompletionUnitInfo.ts
rename to frontend/src/api/models/WellCompletionsUnitInfo.ts
index b3b6e2512..8eafff910 100644
--- a/frontend/src/api/models/WellCompletionUnitInfo.ts
+++ b/frontend/src/api/models/WellCompletionsUnitInfo.ts
@@ -2,7 +2,7 @@
/* tslint:disable */
/* eslint-disable */
-export type WellCompletionUnitInfo = {
+export type WellCompletionsUnitInfo = {
unit: string;
decimalPlaces: number;
};
diff --git a/frontend/src/api/models/WellCompletionsUnits.ts b/frontend/src/api/models/WellCompletionsUnits.ts
new file mode 100644
index 000000000..26ea69dfa
--- /dev/null
+++ b/frontend/src/api/models/WellCompletionsUnits.ts
@@ -0,0 +1,10 @@
+/* istanbul ignore file */
+/* tslint:disable */
+/* eslint-disable */
+
+import type { WellCompletionsUnitInfo } from './WellCompletionsUnitInfo';
+
+export type WellCompletionsUnits = {
+ kh: WellCompletionsUnitInfo;
+};
+
diff --git a/frontend/src/api/models/WellCompletionWell.ts b/frontend/src/api/models/WellCompletionsWell.ts
similarity index 87%
rename from frontend/src/api/models/WellCompletionWell.ts
rename to frontend/src/api/models/WellCompletionsWell.ts
index d0ee31077..d9afb14c2 100644
--- a/frontend/src/api/models/WellCompletionWell.ts
+++ b/frontend/src/api/models/WellCompletionsWell.ts
@@ -4,7 +4,7 @@
import type { Completions } from './Completions';
-export type WellCompletionWell = {
+export type WellCompletionsWell = {
name: string;
attributes: Record;
completions: Record;
diff --git a/frontend/src/api/models/WellCompletionZone.ts b/frontend/src/api/models/WellCompletionsZone.ts
similarity index 56%
rename from frontend/src/api/models/WellCompletionZone.ts
rename to frontend/src/api/models/WellCompletionsZone.ts
index 15202c456..02047e005 100644
--- a/frontend/src/api/models/WellCompletionZone.ts
+++ b/frontend/src/api/models/WellCompletionsZone.ts
@@ -2,9 +2,9 @@
/* tslint:disable */
/* eslint-disable */
-export type WellCompletionZone = {
+export type WellCompletionsZone = {
name: string;
color: string;
- subzones: (Array | null);
+ subzones: (Array | null);
};
diff --git a/frontend/src/api/services/DefaultService.ts b/frontend/src/api/services/DefaultService.ts
index 6cc8012e0..71efe87fa 100644
--- a/frontend/src/api/services/DefaultService.ts
+++ b/frontend/src/api/services/DefaultService.ts
@@ -69,13 +69,22 @@ export class DefaultService {
/**
* Logged In User
+ * @param includeGraphApiInfo Set to true to include user avatar and display name from Microsoft Graph API
* @returns UserInfo Successful Response
* @throws ApiError
*/
- public loggedInUser(): CancelablePromise {
+ public loggedInUser(
+ includeGraphApiInfo: boolean = false,
+ ): CancelablePromise {
return this.httpRequest.request({
method: 'GET',
url: '/logged_in_user',
+ query: {
+ 'includeGraphApiInfo': includeGraphApiInfo,
+ },
+ errors: {
+ 422: `Validation Error`,
+ },
});
}
diff --git a/frontend/src/api/services/WellCompletionService.ts b/frontend/src/api/services/WellCompletionsService.ts
similarity index 72%
rename from frontend/src/api/services/WellCompletionService.ts
rename to frontend/src/api/services/WellCompletionsService.ts
index 99c919b59..23bf1ecf6 100644
--- a/frontend/src/api/services/WellCompletionService.ts
+++ b/frontend/src/api/services/WellCompletionsService.ts
@@ -1,31 +1,31 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
-import type { WellCompletionData } from '../models/WellCompletionData';
+import type { WellCompletionsData } from '../models/WellCompletionsData';
import type { CancelablePromise } from '../core/CancelablePromise';
import type { BaseHttpRequest } from '../core/BaseHttpRequest';
-export class WellCompletionService {
+export class WellCompletionsService {
constructor(public readonly httpRequest: BaseHttpRequest) {}
/**
- * Get Well Completion Data
+ * Get Well Completions Data
* @param caseUuid Sumo case uuid
* @param ensembleName Ensemble name
* @param realization Optional realization to include. If not specified, all realizations will be returned.
- * @returns WellCompletionData Successful Response
+ * @returns WellCompletionsData Successful Response
* @throws ApiError
*/
- public getWellCompletionData(
+ public getWellCompletionsData(
caseUuid: string,
ensembleName: string,
realization?: (number | null),
- ): CancelablePromise {
+ ): CancelablePromise {
return this.httpRequest.request({
method: 'GET',
- url: '/well_completion/well_completion_data/',
+ url: '/well_completions/well_completions_data/',
query: {
'case_uuid': caseUuid,
'ensemble_name': ensembleName,
diff --git a/frontend/src/framework/Module.tsx b/frontend/src/framework/Module.tsx
index 770447afe..9d825f3b1 100644
--- a/frontend/src/framework/Module.tsx
+++ b/frontend/src/framework/Module.tsx
@@ -44,13 +44,15 @@ export class Module {
private _syncableSettingKeys: SyncSettingKey[];
private _channelsDef: BroadcastChannelsDef;
private _drawPreviewFunc: DrawPreviewFunc | null;
+ private _description: string | null;
constructor(
name: string,
defaultTitle: string,
syncableSettingKeys: SyncSettingKey[] = [],
broadcastChannelsDef: BroadcastChannelsDef = {},
- drawPreviewFunc: DrawPreviewFunc | null = null
+ drawPreviewFunc: DrawPreviewFunc | null = null,
+ description: string | null = null
) {
this._name = name;
this._defaultTitle = defaultTitle;
@@ -63,6 +65,7 @@ export class Module {
this._syncableSettingKeys = syncableSettingKeys;
this._channelsDef = broadcastChannelsDef;
this._drawPreviewFunc = drawPreviewFunc;
+ this._description = description;
}
getDrawPreviewFunc(): DrawPreviewFunc | null {
@@ -73,14 +76,18 @@ export class Module {
return this._importState;
}
- getName() {
+ getName(): string {
return this._name;
}
- getDefaultTitle() {
+ getDefaultTitle(): string {
return this._defaultTitle;
}
+ getDescription(): string | null {
+ return this._description;
+ }
+
setWorkbench(workbench: Workbench): void {
this._workbench = workbench;
}
diff --git a/frontend/src/framework/ModuleRegistry.ts b/frontend/src/framework/ModuleRegistry.ts
index b3c470b0d..92e3edc22 100644
--- a/frontend/src/framework/ModuleRegistry.ts
+++ b/frontend/src/framework/ModuleRegistry.ts
@@ -11,6 +11,7 @@ export type RegisterModuleOptions = {
syncableSettingKeys?: SyncSettingKey[];
broadcastChannelsDef?: BroadcastChannelsDef;
preview?: DrawPreviewFunc;
+ description?: string;
};
export class ModuleNotFoundError extends Error {
@@ -38,7 +39,8 @@ export class ModuleRegistry {
options.defaultTitle,
options.syncableSettingKeys,
options.broadcastChannelsDef,
- options.preview || null
+ options.preview ?? null,
+ options.description ?? null
);
this._registeredModules[options.moduleName] = module;
return module;
diff --git a/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/crashView.tsx b/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/crashView.tsx
index 179ed3ad5..51d067680 100644
--- a/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/crashView.tsx
+++ b/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/crashView.tsx
@@ -1,8 +1,8 @@
import React from "react";
-import { ArrowPathIcon, DocumentMagnifyingGlassIcon, FaceFrownIcon, MegaphoneIcon } from "@heroicons/react/20/solid";
import { Button } from "@lib/components/Button";
import { Dialog } from "@lib/components/Dialog";
+import { BugReport, Info, MoodBad, Refresh } from "@mui/icons-material";
export type FormattedErrorProps = {
moduleName: string;
@@ -79,24 +79,20 @@ export const CrashView: React.FC = (props) => {
return (
-
+
{props.error.message}
The above error made your module instance crash. Unfortunately, this means that its state is lost. You
can try to reset the instance to its initial state in order to start over.
- }
- >
+ }>
Reset to initial state
- }>
+ }>
Show error details
- }>
+ }>
Report error
diff --git a/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/header.tsx b/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/header.tsx
index c9acc4517..eb0f42009 100644
--- a/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/header.tsx
+++ b/frontend/src/framework/internal/components/Content/private-components/ViewWrapper/private-components/header.tsx
@@ -2,8 +2,8 @@ import React from "react";
import { ModuleInstance } from "@framework/ModuleInstance";
import { SyncSettingKey, SyncSettingsMeta } from "@framework/SyncSettings";
-import { XMarkIcon } from "@heroicons/react/20/solid";
import { isDevMode } from "@lib/utils/devMode";
+import { Close } from "@mui/icons-material";
export type HeaderProps = {
moduleInstance: ModuleInstance;
@@ -38,6 +38,10 @@ export const Header: React.FC = (props) => {
return unsubscribeFunc;
}, []);
+ function handlePointerUp(e: React.PointerEvent) {
+ e.stopPropagation();
+ }
+
return (