Skip to content

Commit

Permalink
Bug fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
rubenthoms committed Jun 11, 2024
1 parent 1165715 commit e7d3ffa
Show file tree
Hide file tree
Showing 21 changed files with 437 additions and 391 deletions.
5 changes: 2 additions & 3 deletions backend_py/primary/primary/routers/well/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from primary.services.utils.authenticated_user import AuthenticatedUser
from primary.auth.auth_helper import AuthHelper
from primary.services.sumo_access.case_inspector import CaseInspector
from primary.services.smda_access.types import WellBoreHeader, WellBoreTrajectory

from primary.services.ssdl_access.well_access import WellAccess as SsdlWellAccess

Expand All @@ -36,7 +35,7 @@ async def get_drilled_wellbore_headers(

case_inspector = await CaseInspector.from_case_uuid_async(authenticated_user.get_sumo_access_token(), case_uuid)
field_identifier = (await case_inspector.get_field_identifiers_async())[0]
well_access: Union[WellAccess, mocked_drogon_smda_access.WellAccess]
well_access: Union[SmdaWellAccess, MockedSmdaWellAccess]
if field_identifier == "DROGON":
# Handle DROGON
well_access = MockedSmdaWellAccess(authenticated_user.get_smda_access_token())
Expand All @@ -57,7 +56,7 @@ async def get_field_well_trajectories(
"""Get well trajectories for field"""
case_inspector = await CaseInspector.from_case_uuid_async(authenticated_user.get_sumo_access_token(), case_uuid)
field_identifier = (await case_inspector.get_field_identifiers_async())[0]
well_access: Union[WellAccess, mocked_drogon_smda_access.WellAccess]
well_access: Union[SmdaWellAccess, MockedSmdaWellAccess]
if field_identifier == "DROGON":
# Handle DROGON
well_access = MockedSmdaWellAccess(authenticated_user.get_smda_access_token())
Expand Down
102 changes: 37 additions & 65 deletions backend_py/primary/primary/services/sumo_access/seismic_access.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import logging

from typing import List

from fmu.sumo.explorer import TimeFilter, TimeType
from fmu.sumo.explorer.objects import Case
from fmu.sumo.explorer.objects.cube_collection import CubeCollection

from ._helpers import create_sumo_client, create_sumo_case_async
from .seismic_types import SeismicCubeMeta, VdsHandle

LOGGER = logging.getLogger(__name__)


Expand All @@ -25,31 +25,30 @@ async def from_case_uuid_async(cls, access_token: str, case_uuid: str, iteration
return SeismicAccess(case=case, case_uuid=case_uuid, iteration_name=iteration_name)

async def get_seismic_cube_meta_list_async(self) -> List[SeismicCubeMeta]:

seismic_cube_collection: CubeCollection = self._case.cubes.filter(iteration=self._iteration_name, realization=0)
seismic_cube_meta_list: List[SeismicCubeMeta] = []
# Todo: Handle observed cube per realization

# Get metadata for simulated cube for the the first valid realization
realizations = self.get_realizations()
if not realizations:
raise NoDataError(
f"No valid realizations found for case {self._case_uuid,}, iteration {self._iteration_name}",
Service.SUMO,
async for cube in seismic_cube_collection:
t_start = cube["data"].get("time", {}).get("t0", {}).get("value", None)
t_end = cube["data"].get("time", {}).get("t1", {}).get("value", None)

if not t_start and not t_end:
raise ValueError(f"Cube {cube['data']['tagname']} has no time information")

if t_start and not t_end:
iso_string_or_time_interval = t_start

else:
iso_string_or_time_interval = f"{t_start}/{t_end}"

seismic_meta = SeismicCubeMeta(
seismic_attribute=cube["data"].get("tagname"),
iso_date_or_interval=iso_string_or_time_interval,
is_observation=cube["data"]["is_observation"],
is_depth=cube["data"]["vertical_domain"] == "depth",
)
seismic_cube_realization_collection: CubeCollection = self._case.cubes.filter(
iteration=self._iteration_name, realization=self.get_realizations()[0]
)
async for cube in seismic_cube_realization_collection:
seismic_meta = get_seismic_cube_meta(cube, False)
seismic_cube_meta_list.append(seismic_meta)

# Get metadata for observed cubes on case level (preprocessed)
seismic_cube_preprocessed_collection: CubeCollection = self._case.cubes.filter(stage="case")
async for cube in seismic_cube_preprocessed_collection:
seismic_meta = get_seismic_cube_meta(cube, True)
seismic_cube_meta_list.append(seismic_meta)
return seismic_cube_meta_list

async def get_vds_handle_async(
self,
seismic_attribute: str,
Expand All @@ -75,61 +74,34 @@ async def get_vds_handle_async(
end=timestamp_arr[1],
exact=True,
)
if observed:
cube_collection: CubeCollection = self._case.cubes.filter(
tagname=seismic_attribute, time=time_filter, is_observation=observed, stage="case"
)
else:
cube_collection: CubeCollection = self._case.cubes.filter(
tagname=seismic_attribute,
realization=realization,
iteration=self._iteration_name,
time=time_filter,
is_observation=False, # Does not work for observed. Only handles observed on case level?
)

cube_collection: CubeCollection = self._case.cubes.filter(
tagname=seismic_attribute,
realization=realization,
iteration=self._iteration_name,
time=time_filter,
# is_observation=observed, # Does not work for observed. Only handles observed on case level?
)

# Filter on observed
cubes = []
async for cube in cube_collection:
if cube["data"]["is_observation"] == observed:
cubes.append(cube)
break

if not cubes:
raise ValueError(f"Cube {seismic_attribute} not found in case {self._case_uuid}")
if len(cubes) > 1:
raise ValueError(f"Multiple cubes found for {seismic_attribute} in case {self._case_uuid}")
cube = cubes[0]

return VdsHandle(
sas_token=cube.sas,
vds_url=clean_vds_url(cube.url),
)


def clean_vds_url(vds_url: str) -> str:
"""clean vds url"""
return vds_url.replace(":443", "")


def get_seismic_cube_meta(sumo_cube_meta: Cube, is_observation: bool) -> SeismicCubeMeta:
t_start = sumo_cube_meta["data"].get("time", {}).get("t0", {}).get("value", None)
t_end = sumo_cube_meta["data"].get("time", {}).get("t1", {}).get("value", None)

if not t_start and not t_end:
raise ValueError(f"Cube {sumo_cube_meta['data']['tagname']} has no time information")

if t_start and not t_end:
iso_string_or_time_interval = t_start

else:
iso_string_or_time_interval = f"{t_start}/{t_end}"

seismic_meta = SeismicCubeMeta(
seismic_attribute=sumo_cube_meta["data"].get("tagname"),
iso_date_or_interval=iso_string_or_time_interval,
# is_observation=sumo_cube_meta["data"]["is_observation"],
is_observation=is_observation,
is_depth=sumo_cube_meta["data"]["vertical_domain"] == "depth",
)
return seismic_meta

return vds_url.replace(":443", "")
12 changes: 11 additions & 1 deletion frontend/src/framework/GlobalAtoms.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,14 @@ export const EnsembleRealizationFilterFunctionAtom = atom<EnsembleRealizationFil
realizationFilterSet.getRealizationFilterForEnsembleIdent(ensembleIdent).getFilteredRealizations();
});

export const RealizationFilterSetAtom = atomWithCompare<RealizationFilterSet | null>(null, isEqual);
export const RealizationFilterSetAtom = atomWithCompare<RealizationFilterSet | null>(null, (a, b) => {
if (a === null && b === null) {
return true;
}

if (a === null || b === null) {
return false;
}

return a.isEqual(b);
});
19 changes: 19 additions & 0 deletions frontend/src/framework/RealizationFilterSet.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { isEqual } from "lodash";

import { EnsembleIdent } from "./EnsembleIdent";
import { EnsembleSet } from "./EnsembleSet";
import { RealizationFilter } from "./RealizationFilter";
Expand Down Expand Up @@ -44,4 +46,21 @@ export class RealizationFilterSet {

return filter;
}

isEqual(other: RealizationFilterSet): boolean {
if (
this._ensembleIdentStringRealizationFilterMap.size !== other._ensembleIdentStringRealizationFilterMap.size
) {
return false;
}

for (const [ensembleIdentString, realizationFilter] of this._ensembleIdentStringRealizationFilterMap) {
const otherRealizationFilter = other._ensembleIdentStringRealizationFilterMap.get(ensembleIdentString);
if (!otherRealizationFilter || isEqual(realizationFilter, otherRealizationFilter)) {
return false;
}
}

return true;
}
}
4 changes: 2 additions & 2 deletions frontend/src/framework/WorkbenchSession.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,11 @@ export function useEnsembleRealizationFilterFunc(
);
}

const unsubFunc = workbenchSession.subscribe(
const unsubscribeFunc = workbenchSession.subscribe(
WorkbenchSessionEvent.RealizationFilterSetChanged,
handleEnsembleRealizationFilterSetChanged
);
return unsubFunc;
return unsubscribeFunc;
},
[workbenchSession]
);
Expand Down
Loading

0 comments on commit e7d3ffa

Please sign in to comment.