diff --git a/backend_py/primary/primary/routers/well/router.py b/backend_py/primary/primary/routers/well/router.py index 2ae3e34bf..499925316 100644 --- a/backend_py/primary/primary/routers/well/router.py +++ b/backend_py/primary/primary/routers/well/router.py @@ -12,7 +12,6 @@ from primary.services.utils.authenticated_user import AuthenticatedUser from primary.auth.auth_helper import AuthHelper from primary.services.sumo_access.case_inspector import CaseInspector -from primary.services.smda_access.types import WellBoreHeader, WellBoreTrajectory from primary.services.ssdl_access.well_access import WellAccess as SsdlWellAccess @@ -36,7 +35,7 @@ async def get_drilled_wellbore_headers( case_inspector = await CaseInspector.from_case_uuid_async(authenticated_user.get_sumo_access_token(), case_uuid) field_identifier = (await case_inspector.get_field_identifiers_async())[0] - well_access: Union[WellAccess, mocked_drogon_smda_access.WellAccess] + well_access: Union[SmdaWellAccess, MockedSmdaWellAccess] if field_identifier == "DROGON": # Handle DROGON well_access = MockedSmdaWellAccess(authenticated_user.get_smda_access_token()) @@ -57,7 +56,7 @@ async def get_field_well_trajectories( """Get well trajectories for field""" case_inspector = await CaseInspector.from_case_uuid_async(authenticated_user.get_sumo_access_token(), case_uuid) field_identifier = (await case_inspector.get_field_identifiers_async())[0] - well_access: Union[WellAccess, mocked_drogon_smda_access.WellAccess] + well_access: Union[SmdaWellAccess, MockedSmdaWellAccess] if field_identifier == "DROGON": # Handle DROGON well_access = MockedSmdaWellAccess(authenticated_user.get_smda_access_token()) diff --git a/backend_py/primary/primary/services/sumo_access/seismic_access.py b/backend_py/primary/primary/services/sumo_access/seismic_access.py index e2ee81178..ec8bc2450 100644 --- a/backend_py/primary/primary/services/sumo_access/seismic_access.py +++ b/backend_py/primary/primary/services/sumo_access/seismic_access.py @@ -1,14 +1,14 @@ import logging - + from typing import List - + from fmu.sumo.explorer import TimeFilter, TimeType from fmu.sumo.explorer.objects import Case from fmu.sumo.explorer.objects.cube_collection import CubeCollection from ._helpers import create_sumo_client, create_sumo_case_async from .seismic_types import SeismicCubeMeta, VdsHandle - + LOGGER = logging.getLogger(__name__) @@ -25,31 +25,30 @@ async def from_case_uuid_async(cls, access_token: str, case_uuid: str, iteration return SeismicAccess(case=case, case_uuid=case_uuid, iteration_name=iteration_name) async def get_seismic_cube_meta_list_async(self) -> List[SeismicCubeMeta]: - + seismic_cube_collection: CubeCollection = self._case.cubes.filter(iteration=self._iteration_name, realization=0) seismic_cube_meta_list: List[SeismicCubeMeta] = [] - # Todo: Handle observed cube per realization - - # Get metadata for simulated cube for the the first valid realization - realizations = self.get_realizations() - if not realizations: - raise NoDataError( - f"No valid realizations found for case {self._case_uuid,}, iteration {self._iteration_name}", - Service.SUMO, + async for cube in seismic_cube_collection: + t_start = cube["data"].get("time", {}).get("t0", {}).get("value", None) + t_end = cube["data"].get("time", {}).get("t1", {}).get("value", None) + + if not t_start and not t_end: + raise ValueError(f"Cube {cube['data']['tagname']} has no time information") + + if t_start and not t_end: + iso_string_or_time_interval = t_start + + else: + iso_string_or_time_interval = f"{t_start}/{t_end}" + + seismic_meta = SeismicCubeMeta( + seismic_attribute=cube["data"].get("tagname"), + iso_date_or_interval=iso_string_or_time_interval, + is_observation=cube["data"]["is_observation"], + is_depth=cube["data"]["vertical_domain"] == "depth", ) - seismic_cube_realization_collection: CubeCollection = self._case.cubes.filter( - iteration=self._iteration_name, realization=self.get_realizations()[0] - ) - async for cube in seismic_cube_realization_collection: - seismic_meta = get_seismic_cube_meta(cube, False) - seismic_cube_meta_list.append(seismic_meta) - - # Get metadata for observed cubes on case level (preprocessed) - seismic_cube_preprocessed_collection: CubeCollection = self._case.cubes.filter(stage="case") - async for cube in seismic_cube_preprocessed_collection: - seismic_meta = get_seismic_cube_meta(cube, True) seismic_cube_meta_list.append(seismic_meta) return seismic_cube_meta_list - + async def get_vds_handle_async( self, seismic_attribute: str, @@ -75,61 +74,34 @@ async def get_vds_handle_async( end=timestamp_arr[1], exact=True, ) - if observed: - cube_collection: CubeCollection = self._case.cubes.filter( - tagname=seismic_attribute, time=time_filter, is_observation=observed, stage="case" - ) - else: - cube_collection: CubeCollection = self._case.cubes.filter( - tagname=seismic_attribute, - realization=realization, - iteration=self._iteration_name, - time=time_filter, - is_observation=False, # Does not work for observed. Only handles observed on case level? - ) + + cube_collection: CubeCollection = self._case.cubes.filter( + tagname=seismic_attribute, + realization=realization, + iteration=self._iteration_name, + time=time_filter, + # is_observation=observed, # Does not work for observed. Only handles observed on case level? + ) + # Filter on observed cubes = [] async for cube in cube_collection: if cube["data"]["is_observation"] == observed: cubes.append(cube) break - + if not cubes: raise ValueError(f"Cube {seismic_attribute} not found in case {self._case_uuid}") if len(cubes) > 1: raise ValueError(f"Multiple cubes found for {seismic_attribute} in case {self._case_uuid}") cube = cubes[0] - + return VdsHandle( sas_token=cube.sas, vds_url=clean_vds_url(cube.url), ) - - + + def clean_vds_url(vds_url: str) -> str: """clean vds url""" - return vds_url.replace(":443", "") - - -def get_seismic_cube_meta(sumo_cube_meta: Cube, is_observation: bool) -> SeismicCubeMeta: - t_start = sumo_cube_meta["data"].get("time", {}).get("t0", {}).get("value", None) - t_end = sumo_cube_meta["data"].get("time", {}).get("t1", {}).get("value", None) - - if not t_start and not t_end: - raise ValueError(f"Cube {sumo_cube_meta['data']['tagname']} has no time information") - - if t_start and not t_end: - iso_string_or_time_interval = t_start - - else: - iso_string_or_time_interval = f"{t_start}/{t_end}" - - seismic_meta = SeismicCubeMeta( - seismic_attribute=sumo_cube_meta["data"].get("tagname"), - iso_date_or_interval=iso_string_or_time_interval, - # is_observation=sumo_cube_meta["data"]["is_observation"], - is_observation=is_observation, - is_depth=sumo_cube_meta["data"]["vertical_domain"] == "depth", - ) - return seismic_meta - \ No newline at end of file + return vds_url.replace(":443", "") \ No newline at end of file