From 21879f5fcfb84c8238e3a205451fc81ad659c444 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B8rgen=20Herje?= <82032112+jorgenherje@users.noreply.github.com> Date: Wed, 18 Sep 2024 09:06:41 +0200 Subject: [PATCH 1/3] Inplace volumetric matrix plot and table modules, with new inplace endpoints back-end (#684) Co-authored-by: Hans Kallekleiv <16436291+HansKallekleiv@users.noreply.github.com> Co-authored-by: Ruben Thoms --- backend_py/primary/poetry.lock | 46 +- .../routers/inplace_volumetrics/converters.py | 177 +++++ .../routers/inplace_volumetrics/router.py | 215 +++--- .../routers/inplace_volumetrics/schemas.py | 158 +++++ .../_conversion/_conversion.py | 234 +++++++ .../inplace_volumetrics_assembler/_utils.py | 524 ++++++++++++++ .../inplace_volumetrics_assembler.py | 520 ++++++++++++++ .../sumo_access/inplace_volumetrics_access.py | 377 +++++----- .../sumo_access/inplace_volumetrics_types.py | 189 +++++ backend_py/primary/pyproject.toml | 1 + .../test_create_row_filtered_volumetric_df.py | 137 ++++ ...cal_df_to_statistical_result_table_data.py | 86 +++ ...te_calculated_volume_column_expressions.py | 70 ++ ...ed_statistical_result_table_data_polars.py | 104 +++ ...ce_volumetric_table_data_from_result_df.py | 62 ++ ..._per_group_summed_realization_volume_df.py | 98 +++ ...test_create_property_column_expressions.py | 82 +++ ...est_create_volumetric_df_per_fluid_zone.py | 52 ++ ...create_volumetric_summed_fluid_zones_df.py | 57 ++ .../utils/test_inplace_volumetrics_utils.py | 317 +++++++++ frontend/src/api/index.ts | 18 +- .../models/Body_get_realizations_response.ts | 10 - ...t_aggregated_per_realization_table_data.ts | 12 + ...t_get_aggregated_statistical_table_data.ts | 12 + ...icsCategoricalMetaData.ts => FluidZone.ts} | 10 +- .../InplaceStatisticalVolumetricTableData.ts | 17 + ...calVolumetricTableDataPerFluidSelection.ts | 14 + .../api/models/InplaceVolumetricResultName.ts | 25 + .../api/models/InplaceVolumetricStatistic.ts | 15 + .../api/models/InplaceVolumetricTableData.ts | 17 + ...aceVolumetricTableDataPerFluidSelection.ts | 14 + .../models/InplaceVolumetricsIdentifier.ts | 10 + .../InplaceVolumetricsIdentifierWithValues.ts | 14 + .../InplaceVolumetricsTableDefinition.ts | 17 + .../models/InplaceVolumetricsTableMetaData.ts | 11 - .../src/api/models/RepeatedTableColumnData.ts | 18 + frontend/src/api/models/TableColumnData.ts | 14 + .../api/models/TableColumnStatisticalData.ts | 14 + .../api/services/InplaceVolumetricsService.ts | 106 ++- frontend/src/assets/volumeDefinitions.ts | 29 + frontend/src/framework/GlobalAtoms.ts | 23 + frontend/src/framework/SyncSettings.ts | 4 + frontend/src/framework/WorkbenchServices.ts | 6 + .../EnsembleDropdown/ensembleDropdown.tsx | 2 +- .../EnsembleSelect/ensembleSelect.tsx | 2 +- .../ModuleInstanceLog/moduleInstanceLog.tsx | 2 +- .../types/inplaceVolumetricsFilter.ts | 9 + .../src/lib/components/Dropdown/dropdown.tsx | 79 ++- .../PendingWrapper/pendingWrapper.tsx | 4 +- frontend/src/lib/components/Select/select.tsx | 260 ++++--- frontend/src/lib/components/Table/table.tsx | 645 ++++++++++++++---- .../src/lib/components/TagPicker/index.ts | 2 + .../lib/components/TagPicker/tagPicker.tsx | 340 +++++++++ frontend/src/lib/utils/fixupUserSelection.ts | 15 + frontend/src/main.css | 9 + .../src/modules/DistributionPlot/view.tsx | 2 +- .../modules/InplaceVolumetrics/interfaces.ts | 34 - .../modules/InplaceVolumetrics/loadModule.tsx | 10 - .../modules/InplaceVolumetrics/queryHooks.tsx | 63 -- .../settings/atoms/baseAtoms.ts | 11 - .../InplaceVolumetrics/settings/settings.tsx | 222 ------ .../src/modules/InplaceVolumetrics/view.tsx | 122 ---- .../channelDefs.ts | 6 +- .../InplaceVolumetricsPlot/interfaces.ts | 55 ++ .../InplaceVolumetricsPlot/loadModule.tsx | 15 + .../InplaceVolumetricsPlot/preview.svg | 135 ++++ .../InplaceVolumetricsPlot/preview.tsx | 7 + .../registerModule.ts | 15 +- .../settings/atoms/baseAtoms.ts | 21 + .../settings/atoms/derivedAtoms.ts | 230 +++++++ .../settings/atoms/queryAtoms.ts | 54 ++ .../settings/settings.tsx | 177 +++++ .../settings/utils/plotDimensionUtils.ts | 111 +++ .../InplaceVolumetricsPlot/typesAndEnums.ts | 17 + .../view/atoms/baseAtoms.ts | 25 + .../view/atoms/derivedAtoms.ts | 65 ++ .../view/atoms/interfaceEffects.ts | 53 ++ .../view/atoms/queryAtoms.ts | 52 ++ .../hooks/useMakeViewStatusWriterMessages.ts | 30 + .../view/hooks/usePlotBuilder.ts | 100 +++ .../view/hooks/usePublishToDataChannels.ts | 90 +++ .../view/utils/convergenceCalculation.ts | 37 + .../view/utils/plotComponentUtils.ts | 348 ++++++++++ .../InplaceVolumetricsPlot/view/view.tsx | 78 +++ .../InplaceVolumetricsTable/interfaces.ts | 47 ++ .../InplaceVolumetricsTable/loadModule.tsx | 15 + .../InplaceVolumetricsTable/preview.svg | 47 ++ .../InplaceVolumetricsTable/preview.tsx | 7 + .../InplaceVolumetricsTable/registerModule.ts | 21 + .../settings/atoms/baseAtoms.ts | 23 + .../settings/atoms/derivedAtoms.ts | 164 +++++ .../settings/atoms/queryAtoms.ts | 54 ++ .../settings/settings.tsx | 176 +++++ .../view/atoms/baseAtoms.ts | 20 + .../view/atoms/derivedAtoms.ts | 83 +++ .../view/atoms/interfaceEffects.ts | 43 ++ .../view/atoms/queryAtoms.ts | 77 +++ .../hooks/useMakeViewStatusWriterMessages.ts | 74 ++ .../view/hooks/useTableBuilder.ts | 46 ++ .../view/utils/tableComponentUtils.ts | 224 ++++++ .../InplaceVolumetricsTable/view/view.tsx | 99 +++ frontend/src/modules/MyModule2/view.tsx | 49 +- .../view/atoms/derivedAtoms.ts | 15 - .../view/atoms/queryAtoms.ts | 6 +- frontend/src/modules/SubsurfaceMap/view.tsx | 1 + frontend/src/modules/_shared/Figure.tsx | 64 +- .../InplaceVolumetrics/PlotBuilder.tsx | 212 ++++++ .../_shared/InplaceVolumetrics/Table.ts | 195 ++++++ .../InplaceVolumetrics/TableCollection.ts | 49 ++ .../TableDefinitionsAccessor.ts | 225 ++++++ .../fixupUserSelectedIdentifierValues.ts | 50 ++ .../_shared/InplaceVolumetrics/queryHooks.ts | 232 +++++++ .../InplaceVolumetrics/sortResultNames.ts | 25 + .../_shared/InplaceVolumetrics/tableUtils.ts | 236 +++++++ .../_shared/InplaceVolumetrics/types.ts | 73 ++ .../volumetricStringUtils.ts | 28 + .../index.ts | 1 + .../inplaceVolumetricsFilterComponent.tsx | 290 ++++++++ .../src/modules/_shared/ensembleNameUtils.ts | 5 +- .../utils => _shared}/histogram.tsx | 25 +- .../_shared/utils/numberSuffixFormatting.ts | 16 + frontend/src/modules/registerAllModules.ts | 3 +- frontend/tests/ct/select.test.tsx | 48 +- 123 files changed, 9473 insertions(+), 1085 deletions(-) create mode 100644 backend_py/primary/primary/routers/inplace_volumetrics/converters.py create mode 100644 backend_py/primary/primary/routers/inplace_volumetrics/schemas.py create mode 100644 backend_py/primary/primary/services/inplace_volumetrics_assembler/_conversion/_conversion.py create mode 100644 backend_py/primary/primary/services/inplace_volumetrics_assembler/_utils.py create mode 100644 backend_py/primary/primary/services/inplace_volumetrics_assembler/inplace_volumetrics_assembler.py create mode 100644 backend_py/primary/primary/services/sumo_access/inplace_volumetrics_types.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/test_create_row_filtered_volumetric_df.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_convert_statistical_df_to_statistical_result_table_data.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_calculated_volume_column_expressions.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_grouped_statistical_result_table_data_polars.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_inplace_volumetric_table_data_from_result_df.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_per_group_summed_realization_volume_df.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_property_column_expressions.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_df_per_fluid_zone.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_summed_fluid_zones_df.py create mode 100644 backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_inplace_volumetrics_utils.py delete mode 100644 frontend/src/api/models/Body_get_realizations_response.ts create mode 100644 frontend/src/api/models/Body_post_get_aggregated_per_realization_table_data.ts create mode 100644 frontend/src/api/models/Body_post_get_aggregated_statistical_table_data.ts rename frontend/src/api/models/{InplaceVolumetricsCategoricalMetaData.ts => FluidZone.ts} (52%) create mode 100644 frontend/src/api/models/InplaceStatisticalVolumetricTableData.ts create mode 100644 frontend/src/api/models/InplaceStatisticalVolumetricTableDataPerFluidSelection.ts create mode 100644 frontend/src/api/models/InplaceVolumetricResultName.ts create mode 100644 frontend/src/api/models/InplaceVolumetricStatistic.ts create mode 100644 frontend/src/api/models/InplaceVolumetricTableData.ts create mode 100644 frontend/src/api/models/InplaceVolumetricTableDataPerFluidSelection.ts create mode 100644 frontend/src/api/models/InplaceVolumetricsIdentifier.ts create mode 100644 frontend/src/api/models/InplaceVolumetricsIdentifierWithValues.ts create mode 100644 frontend/src/api/models/InplaceVolumetricsTableDefinition.ts delete mode 100644 frontend/src/api/models/InplaceVolumetricsTableMetaData.ts create mode 100644 frontend/src/api/models/RepeatedTableColumnData.ts create mode 100644 frontend/src/api/models/TableColumnData.ts create mode 100644 frontend/src/api/models/TableColumnStatisticalData.ts create mode 100644 frontend/src/assets/volumeDefinitions.ts create mode 100644 frontend/src/framework/types/inplaceVolumetricsFilter.ts create mode 100644 frontend/src/lib/components/TagPicker/index.ts create mode 100644 frontend/src/lib/components/TagPicker/tagPicker.tsx create mode 100644 frontend/src/lib/utils/fixupUserSelection.ts delete mode 100644 frontend/src/modules/InplaceVolumetrics/interfaces.ts delete mode 100644 frontend/src/modules/InplaceVolumetrics/loadModule.tsx delete mode 100644 frontend/src/modules/InplaceVolumetrics/queryHooks.tsx delete mode 100644 frontend/src/modules/InplaceVolumetrics/settings/atoms/baseAtoms.ts delete mode 100644 frontend/src/modules/InplaceVolumetrics/settings/settings.tsx delete mode 100644 frontend/src/modules/InplaceVolumetrics/view.tsx rename frontend/src/modules/{InplaceVolumetrics => InplaceVolumetricsPlot}/channelDefs.ts (57%) create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/interfaces.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/loadModule.tsx create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/preview.svg create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/preview.tsx rename frontend/src/modules/{InplaceVolumetrics => InplaceVolumetricsPlot}/registerModule.ts (54%) create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/settings/atoms/baseAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/settings/atoms/derivedAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/settings/atoms/queryAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/settings/settings.tsx create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/settings/utils/plotDimensionUtils.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/typesAndEnums.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/atoms/baseAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/atoms/derivedAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/atoms/interfaceEffects.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/atoms/queryAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/hooks/useMakeViewStatusWriterMessages.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/hooks/usePlotBuilder.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/hooks/usePublishToDataChannels.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/utils/convergenceCalculation.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/utils/plotComponentUtils.ts create mode 100644 frontend/src/modules/InplaceVolumetricsPlot/view/view.tsx create mode 100644 frontend/src/modules/InplaceVolumetricsTable/interfaces.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/loadModule.tsx create mode 100644 frontend/src/modules/InplaceVolumetricsTable/preview.svg create mode 100644 frontend/src/modules/InplaceVolumetricsTable/preview.tsx create mode 100644 frontend/src/modules/InplaceVolumetricsTable/registerModule.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/settings/atoms/baseAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/settings/atoms/derivedAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/settings/atoms/queryAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/settings/settings.tsx create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/atoms/baseAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/atoms/derivedAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/atoms/interfaceEffects.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/atoms/queryAtoms.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/hooks/useMakeViewStatusWriterMessages.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/hooks/useTableBuilder.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/utils/tableComponentUtils.ts create mode 100644 frontend/src/modules/InplaceVolumetricsTable/view/view.tsx create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/PlotBuilder.tsx create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/Table.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/TableCollection.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/TableDefinitionsAccessor.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/fixupUserSelectedIdentifierValues.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/queryHooks.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/sortResultNames.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/tableUtils.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/types.ts create mode 100644 frontend/src/modules/_shared/InplaceVolumetrics/volumetricStringUtils.ts create mode 100644 frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/index.ts create mode 100644 frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/inplaceVolumetricsFilterComponent.tsx rename frontend/src/modules/{DistributionPlot/utils => _shared}/histogram.tsx (76%) create mode 100644 frontend/src/modules/_shared/utils/numberSuffixFormatting.ts diff --git a/backend_py/primary/poetry.lock b/backend_py/primary/poetry.lock index fda2cdab3..05846eed7 100644 --- a/backend_py/primary/poetry.lock +++ b/backend_py/primary/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -2323,6 +2323,47 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "polars" +version = "1.6.0" +description = "Blazingly fast DataFrame library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "polars-1.6.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6d1665c23e3574ebd47a26a5d7b619e6e73e53718c3b0bfd7d08b6a0a4ae7daa"}, + {file = "polars-1.6.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d7f3abf085adf034720b358119c4c8e144bcc2d96010b7e7d0afa11b80da383c"}, + {file = "polars-1.6.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a166adb429f8ee099c9d803e7470a80c76368437a8b272c67cef9eef6d5e9da1"}, + {file = "polars-1.6.0-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:1c811b772c9476f7f0bb4445a8387d2ab6d86f5e79140b1bfba914a32788d261"}, + {file = "polars-1.6.0-cp38-abi3-win_amd64.whl", hash = "sha256:ffae15ffa80fda5cc3af44a340b565bcf7f2ab6d7854d3f967baf505710c78e2"}, + {file = "polars-1.6.0.tar.gz", hash = "sha256:d7e8d5e577883a9755bc3be92ecbf6f20bced68267bdb8bdb440120e905cc19c"}, +] + +[package.extras] +adbc = ["adbc-driver-manager[dbapi]", "adbc-driver-sqlite[dbapi]"] +all = ["polars[async,cloudpickle,database,deltalake,excel,fsspec,graph,iceberg,numpy,pandas,plot,pyarrow,pydantic,style,timezone]"] +async = ["gevent"] +calamine = ["fastexcel (>=0.9)"] +cloudpickle = ["cloudpickle"] +connectorx = ["connectorx (>=0.3.2)"] +database = ["nest-asyncio", "polars[adbc,connectorx,sqlalchemy]"] +deltalake = ["deltalake (>=0.15.0)"] +excel = ["polars[calamine,openpyxl,xlsx2csv,xlsxwriter]"] +fsspec = ["fsspec"] +gpu = ["cudf-polars-cu12"] +graph = ["matplotlib"] +iceberg = ["pyiceberg (>=0.5.0)"] +numpy = ["numpy (>=1.16.0)"] +openpyxl = ["openpyxl (>=3.0.0)"] +pandas = ["pandas", "polars[pyarrow]"] +plot = ["altair (>=5.4.0)"] +pyarrow = ["pyarrow (>=7.0.0)"] +pydantic = ["pydantic"] +sqlalchemy = ["polars[pandas]", "sqlalchemy"] +style = ["great-tables (>=0.8.0)"] +timezone = ["backports-zoneinfo", "tzdata"] +xlsx2csv = ["xlsx2csv (>=0.8.0)"] +xlsxwriter = ["xlsxwriter"] + [[package]] name = "portalocker" version = "2.7.0" @@ -2733,7 +2774,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3513,4 +3553,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "9e4c2116a592ae012bb3be220132f9465b018c80ebfa3b66c963f3c944965fcb" +content-hash = "d1d506c6d9e81b2679b2e300c32f18e5f8109e69cb924f18a641c4522f070a0f" diff --git a/backend_py/primary/primary/routers/inplace_volumetrics/converters.py b/backend_py/primary/primary/routers/inplace_volumetrics/converters.py new file mode 100644 index 000000000..02b33f57c --- /dev/null +++ b/backend_py/primary/primary/routers/inplace_volumetrics/converters.py @@ -0,0 +1,177 @@ +from primary.services.sumo_access.inplace_volumetrics_types import ( + FluidZone, + Statistic, + InplaceVolumetricsIdentifier, + InplaceVolumetricsIdentifierWithValues, + InplaceVolumetricsTableDefinition, + InplaceVolumetricTableDataPerFluidSelection, + InplaceStatisticalVolumetricTableDataPerFluidSelection, +) + +from . import schemas + + +def convert_schema_to_identifiers_with_values( + identifiers_with_values: list[schemas.InplaceVolumetricsIdentifierWithValues], +) -> list[InplaceVolumetricsIdentifierWithValues]: + converted = [] + for identifier_with_values in identifiers_with_values: + identifier = _convert_schema_to_identifier(identifier_with_values.identifier) + values = identifier_with_values.values + converted.append(InplaceVolumetricsIdentifierWithValues(identifier, values)) + return converted + + +def convert_schema_to_fluid_zones(fluid_zones: list[schemas.FluidZone]) -> list[FluidZone]: + """Converts the fluid zones from the API format to the sumo service format""" + return [FluidZone(fluid_zone.value) for fluid_zone in fluid_zones] + + +def convert_schema_to_identifiers( + identifiers: list[schemas.InplaceVolumetricsIdentifier] | None, +) -> list[InplaceVolumetricsIdentifier] | None: + """Converts the identifiers from the API format to the sumo service format""" + if identifiers is None: + return None + + return [_convert_schema_to_identifier(identifier) for identifier in identifiers] + + +def _convert_schema_to_identifier(identifier: schemas.InplaceVolumetricsIdentifier) -> InplaceVolumetricsIdentifier: + """Converts the identifier from the API format to the sumo service format""" + return InplaceVolumetricsIdentifier(identifier.value) + + +def _convert_fluid_zones_to_schema(fluid_zones: list[FluidZone]) -> list[schemas.FluidZone]: + """Converts the fluid zones from the sumo service to the API format""" + return [schemas.FluidZone(fluid_zone.value) for fluid_zone in fluid_zones] + + +def _convert_result_names_to_schema(result_names: list[str]) -> list[schemas.InplaceVolumetricResultName]: + """Converts the result names from the sumo service to the API format""" + return [schemas.InplaceVolumetricResultName(result_name) for result_name in result_names] + + +def _convert_identifier_string_to_schema(identifier_string: str) -> schemas.InplaceVolumetricsIdentifier: + """Converts the identifier string from the sumo service to the API format""" + return schemas.InplaceVolumetricsIdentifier(identifier_string) + + +def to_api_table_definitions( + table_definitions: list[InplaceVolumetricsTableDefinition], +) -> list[schemas.InplaceVolumetricsTableDefinition]: + """Converts the table definitions from the sumo service to the API format""" + return [ + schemas.InplaceVolumetricsTableDefinition( + tableName=table_definition.table_name, + fluidZones=_convert_fluid_zones_to_schema(table_definition.fluid_zones), + resultNames=_convert_result_names_to_schema(table_definition.result_names), + identifiersWithValues=[ + schemas.InplaceVolumetricsIdentifierWithValues( + identifier=_convert_identifier_string_to_schema(identifier_with_values.identifier), + values=identifier_with_values.values, + ) + for identifier_with_values in table_definition.identifiers_with_values + ], + ) + for table_definition in table_definitions + ] + + +def convert_table_data_per_fluid_selection_to_schema( + table_per_fluid_selection: InplaceVolumetricTableDataPerFluidSelection, +) -> schemas.InplaceVolumetricTableDataPerFluidSelection: + """Converts the table data from the sumo service to the schema format""" + + tables: list[schemas.InplaceVolumetricTableData] = [] + + for table in table_per_fluid_selection.table_data_per_fluid_selection: + selector_columns = [ + schemas.RepeatedTableColumnData( + columnName=column.column_name, + uniqueValues=column.unique_values, + indices=column.indices, + ) + for column in table.selector_columns + ] + + result_columns = [ + schemas.TableColumnData(columnName=column.column_name, columnValues=column.values) + for column in table.result_columns + ] + + tables.append( + schemas.InplaceVolumetricTableData( + fluidSelectionName=table.fluid_selection_name, + selectorColumns=selector_columns, + resultColumns=result_columns, + ) + ) + + return schemas.InplaceVolumetricTableDataPerFluidSelection(tableDataPerFluidSelection=tables) + + +def convert_statistical_table_data_per_fluid_selection_to_schema( + table_data_per_fluid_selection: InplaceStatisticalVolumetricTableDataPerFluidSelection, +) -> schemas.InplaceStatisticalVolumetricTableDataPerFluidSelection: + """Converts the table data from the sumo service to the schema format""" + + tables: list[schemas.InplaceStatisticalVolumetricTableData] = [] + + for table in table_data_per_fluid_selection.table_data_per_fluid_selection: + selector_columns = [ + schemas.RepeatedTableColumnData( + columnName=column.column_name, + uniqueValues=column.unique_values, + indices=column.indices, + ) + for column in table.selector_columns + ] + + result_columns_statistics = [ + schemas.TableColumnStatisticalData( + columnName=column.column_name, + statisticValues=_convert_statistic_values_dict_to_schema(column.statistic_values), + ) + for column in table.result_column_statistics + ] + + tables.append( + schemas.InplaceStatisticalVolumetricTableData( + fluidSelectionName=table.fluid_selection_name, + selectorColumns=selector_columns, + resultColumnStatistics=result_columns_statistics, + ) + ) + + return schemas.InplaceStatisticalVolumetricTableDataPerFluidSelection(tableDataPerFluidSelection=tables) + + +def _convert_statistic_values_dict_to_schema( + statistic_values: dict[Statistic, list[float]], +) -> dict[schemas.InplaceVolumetricStatistic, list[float]]: + """Converts the statistic values dictionary from the service layer format to API format""" + return { + _convert_statistic_enum_to_inplace_volumetric_statistic_enum(statistic): values + for statistic, values in statistic_values.items() + } + + +def _convert_statistic_enum_to_inplace_volumetric_statistic_enum( + statistic: Statistic, +) -> schemas.InplaceVolumetricStatistic: + """Converts the statistic enum from the service layer format to API enum""" + if statistic == Statistic.MEAN: + return schemas.InplaceVolumetricStatistic.MEAN + if statistic == Statistic.STD_DEV: + return schemas.InplaceVolumetricStatistic.STD_DEV + if statistic == Statistic.MIN: + return schemas.InplaceVolumetricStatistic.MIN + if statistic == Statistic.MAX: + return schemas.InplaceVolumetricStatistic.MAX + if statistic == Statistic.P10: + return schemas.InplaceVolumetricStatistic.P10 + if statistic == Statistic.P90: + return schemas.InplaceVolumetricStatistic.P90 + + raise ValueError(f"Unknown statistic value: {statistic.value}") diff --git a/backend_py/primary/primary/routers/inplace_volumetrics/router.py b/backend_py/primary/primary/routers/inplace_volumetrics/router.py index 5cee21e5c..fd4879b87 100644 --- a/backend_py/primary/primary/routers/inplace_volumetrics/router.py +++ b/backend_py/primary/primary/routers/inplace_volumetrics/router.py @@ -1,97 +1,150 @@ -from typing import List, Optional, Sequence -from fastapi import APIRouter, Depends, Query, HTTPException +import logging +from typing import Annotated -from primary.services.sumo_access.inplace_volumetrics_access import ( - InplaceVolumetricsAccess, - InplaceVolumetricsTableMetaData, - InplaceVolumetricsCategoricalMetaData, -) +from fastapi import APIRouter, Depends, Query, Body, Response -from primary.services.sumo_access.generic_types import EnsembleScalarResponse +from primary.services.inplace_volumetrics_assembler.inplace_volumetrics_assembler import ( + InplaceVolumetricsAssembler, +) +from primary.services.sumo_access.inplace_volumetrics_access import InplaceVolumetricsAccess from primary.services.utils.authenticated_user import AuthenticatedUser - from primary.auth.auth_helper import AuthHelper +from primary.utils.response_perf_metrics import ResponsePerfMetrics +from . import schemas +from . import converters + +LOGGER = logging.getLogger(__name__) router = APIRouter() -@router.get("/table_names_and_descriptions/", tags=["inplace_volumetrics"]) -async def get_table_names_and_descriptions( - # fmt:off - authenticated_user: AuthenticatedUser = Depends(AuthHelper.get_authenticated_user), - case_uuid: str = Query(description="Sumo case uuid"), - ensemble_name: str = Query(description="Ensemble name"), - # fmt:on -) -> List[InplaceVolumetricsTableMetaData]: - """Get all volumetric tables for a given ensemble.""" +@router.get("/table_definitions/", tags=["inplace_volumetrics"]) +async def get_table_definitions( + authenticated_user: Annotated[AuthenticatedUser, Depends(AuthHelper.get_authenticated_user)], + case_uuid: Annotated[str, Query(description="Sumo case uuid")], + ensemble_name: Annotated[str, Query(description="Ensemble name")], +) -> list[schemas.InplaceVolumetricsTableDefinition]: + """Get the volumetric tables definitions for a given ensemble.""" + access = await InplaceVolumetricsAccess.from_case_uuid_async( + authenticated_user.get_sumo_access_token(), case_uuid, ensemble_name + ) + assembler = InplaceVolumetricsAssembler(access) + tables = await assembler.get_volumetric_table_metadata_async() + return converters.to_api_table_definitions(tables) + + +@router.post("/get_aggregated_per_realization_table_data/", tags=["inplace_volumetrics"]) +# pylint: disable=too-many-arguments +async def post_get_aggregated_per_realization_table_data( + response: Response, + authenticated_user: Annotated[AuthenticatedUser, Depends(AuthHelper.get_authenticated_user)], + case_uuid: Annotated[str, Query(description="Sumo case uuid")], + ensemble_name: Annotated[str, Query(description="Ensemble name")], + table_name: Annotated[str, Query(description="Table name")], + result_names: Annotated[list[str], Query(description="The name of the volumetric results")], + fluid_zones: Annotated[list[schemas.FluidZone], Query(description="The fluid zones to aggregate by")], + identifiers_with_values: Annotated[ + list[schemas.InplaceVolumetricsIdentifierWithValues], + Body(embed=True, description="Selected identifiers and wanted values"), + ], + accumulate_fluid_zones: Annotated[bool, Query(description="Whether to accumulate fluid zones")], + group_by_identifiers: Annotated[ + list[schemas.InplaceVolumetricsIdentifier] | None, Query(description="The identifiers to group table data by") + ] = None, + realizations: Annotated[ + list[int] | None, + Query( + description="Optional list of realizations to include. If not specified, all realizations will be returned." + ), + ] = None, +) -> schemas.InplaceVolumetricTableDataPerFluidSelection: + """ + Get aggregated volumetric data for a given table with data per realization based on requested results and categories/index filter. + + Note: This endpoint is a post endpoint because the list of identifiers with values can be quite large and may exceed the query string limit. + As the endpoint is post, the identifiers with values object is kept for convenience. + """ + perf_metrics = ResponsePerfMetrics(response) access = await InplaceVolumetricsAccess.from_case_uuid_async( authenticated_user.get_sumo_access_token(), case_uuid, ensemble_name ) - table_names = await access.get_table_names_and_metadata() - if len(table_names) == 0: - raise HTTPException(status_code=404, detail="No volumetric tables found") - - return table_names - - -@router.post("/realizations_response/", tags=["inplace_volumetrics"]) -async def get_realizations_response( - # fmt:off - authenticated_user: AuthenticatedUser = Depends(AuthHelper.get_authenticated_user), - case_uuid: str = Query(description="Sumo case uuid"), - ensemble_name: str = Query(description="Ensemble name"), - table_name: str = Query(description="Table name"), - response_name:str = Query(description="Response name"), - categorical_filter:Optional[List[InplaceVolumetricsCategoricalMetaData]] = None, - realizations: Optional[Sequence[int]] = None, - # fmt:on -) -> EnsembleScalarResponse: - """Get response for a given table and index filter.""" + + perf_metrics.record_lap("get-access") + + assembler = InplaceVolumetricsAssembler(access) + + data = await assembler.create_accumulated_by_selection_per_realization_volumetric_table_data_async( + table_name=table_name, + result_names=set(result_names), + fluid_zones=converters.convert_schema_to_fluid_zones(fluid_zones), + group_by_identifiers=converters.convert_schema_to_identifiers(group_by_identifiers), + realizations=realizations, + identifiers_with_values=converters.convert_schema_to_identifiers_with_values(identifiers_with_values), + accumulate_fluid_zones=accumulate_fluid_zones, + ) + + perf_metrics.record_lap("calculate-accumulated-data") + + LOGGER.info(f"Got aggregated volumetric data in: {perf_metrics.to_string()}") + + return converters.convert_table_data_per_fluid_selection_to_schema(data) + + +@router.post("/get_aggregated_statistical_table_data/", tags=["inplace_volumetrics"]) +# pylint: disable=too-many-arguments +async def post_get_aggregated_statistical_table_data( + response: Response, + authenticated_user: Annotated[AuthenticatedUser, Depends(AuthHelper.get_authenticated_user)], + case_uuid: Annotated[str, Query(description="Sumo case uuid")], + ensemble_name: Annotated[str, Query(description="Ensemble name")], + table_name: Annotated[str, Query(description="Table name")], + result_names: Annotated[list[str], Query(description="The name of the volumetric results")], + fluid_zones: Annotated[list[schemas.FluidZone], Query(description="The fluid zones to aggregate by")], + identifiers_with_values: Annotated[ + list[schemas.InplaceVolumetricsIdentifierWithValues], + Body(embed=True, description="Selected identifiers and wanted values"), + ], + accumulate_fluid_zones: Annotated[bool, Query(description="Whether to accumulate fluid zones")], + group_by_identifiers: Annotated[ + list[schemas.InplaceVolumetricsIdentifier] | None, Query(description="The identifiers to group table data by") + ] = None, + realizations: Annotated[ + list[int] | None, + Query( + description="Optional list of realizations to include. If not specified, all realizations will be returned." + ), + ] = None, +) -> schemas.InplaceStatisticalVolumetricTableDataPerFluidSelection: + """ + Get statistical volumetric data across selected realizations for a given table based on requested results and categories/index filter. + + Note: This endpoint is a post endpoint because the list of identifiers with values can be quite large and may exceed the query string limit. + As the endpoint is post, the identifiers with values object is kept for convenience. + """ + perf_metrics = ResponsePerfMetrics(response) + access = await InplaceVolumetricsAccess.from_case_uuid_async( authenticated_user.get_sumo_access_token(), case_uuid, ensemble_name ) - response = access.get_response(table_name, response_name, categorical_filter, realizations) - return response - - -# class StatisticFunction(str, Enum): -# MEAN = "MEAN" -# MIN = "MIN" -# MAX = "MAX" -# P10 = "P10" -# P90 = "P90" -# P50 = "P50" - - -# class StatisticValueObject(BaseModel): -# statistic_function: StatisticFunction -# values: List[float] - - -# class InplaceVolumetricsStatisticResponse(BaseModel): -# realizations: List[int] -# value_objects: List[StatisticValueObject] -# # unit: str -# # is_rate: bool - - -# @router.get("/statistic_response/", tags=["inplace_volumetrics"]) -# def get_statistic_response( -# # fmt:off -# authenticated_user: AuthenticatedUser = Depends(AuthHelper.get_authenticated_user), -# case_uuid: str = Query(description="Sumo case uuid"), -# ensemble_name: str = Query(description="Ensemble name"), -# table_name: str = Query(description="Table name"), -# response_name:str = Query(description="Response name"), -# statistic_functions: Optional[Sequence[StatisticFunction]] = Query(None, description="Optional list of statistics to calculate. If not specified, all statistics will be calculated."), -# realizations: Optional[Sequence[int]] = Query(None,description="Realizations"), -# # fmt:on -# ) -> List[InplaceVolumetricsStatisticResponse]: -# """Get statistical response for a given table and index filter.""" -# access = InplaceVolumetricsAccess(authenticated_user.get_sumo_access_token(), case_uuid, ensemble_name) -# response = access.get_response(table_name, realizations, response_name) # , index_filter) -# # service_stat_funcs_to_compute = _to_service_statistic_functions(statistic_functions) -# # statistics = compute_inplace_statistics(response, response_name, service_stat_funcs_to_compute) -# return response + + perf_metrics.record_lap("get-access") + + assembler = InplaceVolumetricsAssembler(access) + + data = await assembler.create_accumulated_by_selection_statistical_volumetric_table_data_async( + table_name=table_name, + result_names=set(result_names), + fluid_zones=converters.convert_schema_to_fluid_zones(fluid_zones), + group_by_identifiers=converters.convert_schema_to_identifiers(group_by_identifiers), + realizations=realizations, + identifiers_with_values=converters.convert_schema_to_identifiers_with_values(identifiers_with_values), + accumulate_fluid_zones=accumulate_fluid_zones, + ) + + perf_metrics.record_lap("calculate-accumulated-data") + + LOGGER.info(f"Got aggregated volumetric data in: {perf_metrics.to_string()}") + + return converters.convert_statistical_table_data_per_fluid_selection_to_schema(data) diff --git a/backend_py/primary/primary/routers/inplace_volumetrics/schemas.py b/backend_py/primary/primary/routers/inplace_volumetrics/schemas.py new file mode 100644 index 000000000..9a8e39976 --- /dev/null +++ b/backend_py/primary/primary/routers/inplace_volumetrics/schemas.py @@ -0,0 +1,158 @@ +from enum import Enum, StrEnum + +from pydantic import BaseModel + + +class InplaceVolumetricsIdentifier(str, Enum): + ZONE = "ZONE" + REGION = "REGION" + FACIES = "FACIES" + LICENSE = "LICENSE" + + +class InplaceVolumetricsIdentifierWithValues(BaseModel): + """Unique values for an index column in a volumetric table + All values should ideally be strings, but it is common to see integers, especially for REGION""" + + identifier: InplaceVolumetricsIdentifier + values: list[str | int] + + +class InplaceVolumetricStatistic(StrEnum): + """ + Definition of possible statistics for a result column in an inplace volumetrics table + """ + + MEAN = "mean" + STD_DEV = "stddev" + MAX = "max" + MIN = "min" + P10 = "p10" + P90 = "p90" + + +class FluidZone(StrEnum): + OIL = "Oil" + GAS = "Gas" + WATER = "Water" + + +class InplaceVolumetricResultName(str, Enum): + """Allowed volumetric response names""" + + BULK = "BULK" + NET = "NET" + PORO = "PORO" + PORO_NET = "PORO_NET" + PORV = "PORV" + HCPV = "HCPV" + STOIIP = "STOIIP" + GIIP = "GIIP" + NTG = "NTG" + ASSOCIATEDGAS = "ASSOCIATEDGAS" + ASSOCIATEDOIL = "ASSOCIATEDOIL" + BO = "BO" + BG = "BG" + SW = "SW" + STOIIP_TOTAL = "STOIIP_TOTAL" + GIIP_TOTAL = "GIIP_TOTAL" + + +class InplaceVolumetricsTableDefinition(BaseModel): + """Definition of a volumetric table""" + + tableName: str + fluidZones: list[FluidZone] + resultNames: list[InplaceVolumetricResultName] + identifiersWithValues: list[InplaceVolumetricsIdentifierWithValues] + + +class InplaceVolumetricDataEntry(BaseModel): + result_values: list[float] + index_values: list[str | int] + + +class InplaceVolumetricData(BaseModel): + vol_table_name: str + result_name: str + realizations: list[int] + index_names: list[str] + entries: list[InplaceVolumetricDataEntry] + + +class RepeatedTableColumnData(BaseModel): + """ + Data for a single column in a volumetric table + + Length of index list should be equal to the number of rows in the table + + - unique_values: List of unique values in the column + - indices: List of indices, in unique_values list, for each row in the table + """ + + columnName: str + uniqueValues: list[str | int] + indices: list[int] + + +class TableColumnData(BaseModel): + """ + Data for a single column in a volumetric table + + Length of column values should be equal to the number of rows in the table + """ + + columnName: str + columnValues: list[float] + + +class TableColumnStatisticalData(BaseModel): + """ + Statistical data for a single result column in a volumetric table + + Length of column values should be equal to the number of rows in the table + """ + + columnName: str + statisticValues: dict[InplaceVolumetricStatistic, list[float]] + + +class InplaceVolumetricTableData(BaseModel): + """Volumetric data for a single table + + Contains data for a single fluid zone, e.g. Oil, Gas, Water, or sum of fluid zones + """ + + fluidSelectionName: str # Oil, Gas, Water or "Oil + Gas", etc. + selectorColumns: list[RepeatedTableColumnData] # Index columns and realizations + resultColumns: list[TableColumnData] + + +class InplaceStatisticalVolumetricTableData(BaseModel): + """ + Statistical volumetric data for single volume table + + Contains data for a single fluid zone, e.g. Oil, Gas, Water, or sum of fluid zones + """ + + fluidSelectionName: str # Oil, Gas, Water or "Oil + Gas", etc. + selectorColumns: list[RepeatedTableColumnData] # Index columns and realizations + resultColumnStatistics: list[TableColumnStatisticalData] + + +class InplaceVolumetricTableDataPerFluidSelection(BaseModel): + """Volumetric data for a single table per fluid selection + + Fluid selection can be single fluid zones, e.g. Oil, Gas, Water, or sum of fluid zones - Oil + Gas + Water + """ + + tableDataPerFluidSelection: list[InplaceVolumetricTableData] + + +class InplaceStatisticalVolumetricTableDataPerFluidSelection(BaseModel): + """Statistical volumetric data for a single table per fluid selection + + Fluid selection can be single fluid zones, e.g. Oil, Gas, Water, or sum of fluid zones - Oil + Gas + Water + """ + + tableDataPerFluidSelection: list[InplaceStatisticalVolumetricTableData] diff --git a/backend_py/primary/primary/services/inplace_volumetrics_assembler/_conversion/_conversion.py b/backend_py/primary/primary/services/inplace_volumetrics_assembler/_conversion/_conversion.py new file mode 100644 index 000000000..da682dd63 --- /dev/null +++ b/backend_py/primary/primary/services/inplace_volumetrics_assembler/_conversion/_conversion.py @@ -0,0 +1,234 @@ +from typing import Iterable + +import re + +from primary.services.sumo_access.inplace_volumetrics_types import ( + CalculatedVolume, + FluidZone, + FluidSelection, + Property, + InplaceVolumetricsIdentifier, +) +from primary.services.sumo_access.inplace_volumetrics_access import ALLOWED_RAW_VOLUMETRIC_COLUMNS + +""" +This file contains helper functions for conversion between different data types used in the Inplace Volumetrics provider + +The table data from Sumo retrieves raw_volumetric_columns with suffixes for fluid zones, e.g. "STOIIP_OIL", "STOIIP_GAS", "STOIIP_WATER" + +Conversion is made back and forth: + +- Raw volumetric columns converted into volume names, without suffixes and a list of available fluid zones. +Based on list of volume names, the available properties are determined. The list of volume names and properties equals the results. + +- A list of results is converted back to list of volume names and properties. The needed volume names to calculated a property is found, +and a complete list of volume names can be combined with list of fluid zones to get a list of raw volumetric columns. + + +Terms: +- Front-end: `results` = volume_names + properties (w/o suffixes) +- Back-end: + - all_volume_names = volume_names + get_required_volume_names_from_properties(properties) + - `raw_volumetric_column_names` = create_list_of_raw_volumetric_column_names(all_volume_names, fluid_zones) + +""" + + +def get_identifier_from_string(identifier_str: str) -> InplaceVolumetricsIdentifier | None: + """ + Function to convert string to InplaceVolumetricsIdentifier + """ + if identifier_str in InplaceVolumetricsIdentifier.__members__: + return InplaceVolumetricsIdentifier(identifier_str) + + return None + + +def create_fluid_selection_name(fluid_selection: FluidSelection, fluid_zones: list[FluidZone]) -> str: + if fluid_selection != FluidSelection.ACCUMULATED: + return fluid_selection.value + + return "+".join([fluid_zone.value for fluid_zone in fluid_zones]) + + +def get_fluid_zone_from_selection(fluid_selection: FluidSelection) -> FluidZone | None: + # Check if the value is among FluidZone options + if fluid_selection in FluidZone.__members__.values(): + return FluidZone(fluid_selection) + else: + return None + + +def convert_fluid_zone_to_fluid_selection(fluid_zone: FluidZone) -> FluidSelection: + return FluidSelection(fluid_zone) + + +def get_calculated_volumes_among_result_names(result_names: Iterable[str]) -> list[str]: + """ + Function to get calculated volumes among result names + """ + possible_calculated_volumes = set() + for calculated_volume in result_names: + if calculated_volume in CalculatedVolume.__members__: + possible_calculated_volumes.add(calculated_volume) + + return list(possible_calculated_volumes) + + +def get_required_volume_names_from_calculated_volumes(calculated_volumes: Iterable[str]) -> list[str]: + """ + Function to convert calculated volumes to list of required volume names + + NOTE: This function lists all volume names needed, but fluid zone is not considered + """ + volume_names = set() + if "STOIIP_TOTAL" in calculated_volumes: + volume_names.update(["STOIIP", "ASSOCIATEDOIL"]) + if "GIIP_TOTAL" in calculated_volumes: + volume_names.update(["GIIP", "ASSOCIATEDGAS"]) + + return list(volume_names) + + +def get_properties_among_result_names(result_names: Iterable[str]) -> list[str]: + """ + Function to get properties among result names + """ + + properties = set() + for result_name in result_names: + if result_name in Property.__members__: + properties.add(result_name) + + return list(properties) + + +def get_required_volume_names_from_properties(properties: Iterable[str]) -> list[str]: + """ + Function to convert properties to list of required volume names + """ + + volume_names = set() + for property in properties: + volume_names.update(get_required_volume_names_from_property(property)) + + return list(volume_names) + + +def get_required_volume_names_from_property(property: str) -> list[str]: + """ + Function to convert property to list of required volume names + """ + + if property == "NTG": + return ["BULK", "NET"] + if property == "PORO": + return ["BULK", "PORV"] + if property == "PORO_NET": + return ["PORV", "NET"] + if property == "SW": + return ["HCPV", "PORV"] + if property == "BO": + return ["HCPV", "STOIIP"] + if property == "BG": + return ["HCPV", "GIIP"] + else: + raise ValueError(f"Unhandled property: {property}") + + +def get_available_properties_from_volume_names(volume_names: Iterable[str]) -> list[str]: + """ + Function to get available properties from volume names + """ + + properties = set() + if set(["BULK", "NET"]).issubset(volume_names): + properties.add(Property.NTG.value) + if set(["PORV", "BULK"]).issubset(volume_names): + properties.add(Property.PORO.value) + if set(["PORV", "NET"]).issubset(volume_names): + properties.add(Property.PORO_NET.value) + if set(["HCPV", "PORV"]).issubset(volume_names): + properties.add(Property.SW.value) + if set(["HCPV", "STOIIP"]).issubset(volume_names): + properties.add(Property.BO.value) + if set(["HCPV", "GIIP"]).issubset(volume_names): + properties.add(Property.BG.value) + + return list(properties) + + +def get_volume_names_from_raw_volumetric_column_names(raw_volumetric_column_names: Iterable[str]) -> list[str]: + """ + Function to get volume names from volumetric column names + + Raw volumetric columns have suffixes for fluid zones, e.g. "STOIIP_OIL", "STOIIP_GAS", "STOIIP_WATER" + """ + + volume_names = set() + + # Clean volume names for suffixes + for column_name in raw_volumetric_column_names: + cleaned_name = re.sub(r"_(OIL|GAS|WATER)", "", column_name) + volume_names.add(cleaned_name) + + # Add total HC responses + if set(["STOIIP", "ASSOCIATEDOIL"]).issubset(volume_names): + volume_names.add("STOIIP_TOTAL") + if set(["GIIP", "ASSOCIATEDGAS"]).issubset(volume_names): + volume_names.add("GIIP_TOTAL") + + return list(volume_names) + + +def get_fluid_zones(raw_volumetric_column_names: Iterable[str]) -> list[FluidZone]: + """ + Function to get fluid zones from raw volumetric column names + """ + full_set = {FluidZone.OIL, FluidZone.GAS, FluidZone.WATER} + fluid_zones: set[FluidZone] = set() + for column_name in raw_volumetric_column_names: + if "_OIL" in column_name: + fluid_zones.add(FluidZone.OIL) + elif "_GAS" in column_name: + fluid_zones.add(FluidZone.GAS) + elif "_WATER" in column_name: + fluid_zones.add(FluidZone.WATER) + + if fluid_zones == full_set: + break + + return list(fluid_zones) + + +def create_raw_volumetric_columns_from_volume_names_and_fluid_zones( + volume_names: set[str], fluid_zones: list[FluidZone] +) -> list[str]: + """ + Function to create raw volumetric columns from volume names and fluid zones + """ + + volumetric_columns = [] + + for volume_name in volume_names: + columns = create_raw_volumetric_columns_from_volume_name_and_fluid_zones(volume_name, fluid_zones) + volumetric_columns.extend(columns) + + return volumetric_columns + + +def create_raw_volumetric_columns_from_volume_name_and_fluid_zones( + volume_name: str, fluid_zones: list[FluidZone] +) -> list[str]: + """ + Function to create raw volumetric columns from volume name and fluid zones + """ + + volumetric_columns = [] + + for fluid_zone in fluid_zones: + candidate_column = f"{volume_name}_{fluid_zone.value.upper()}" + if candidate_column in ALLOWED_RAW_VOLUMETRIC_COLUMNS: + volumetric_columns.append(candidate_column) + + return volumetric_columns diff --git a/backend_py/primary/primary/services/inplace_volumetrics_assembler/_utils.py b/backend_py/primary/primary/services/inplace_volumetrics_assembler/_utils.py new file mode 100644 index 000000000..4e3161e32 --- /dev/null +++ b/backend_py/primary/primary/services/inplace_volumetrics_assembler/_utils.py @@ -0,0 +1,524 @@ +from typing import Callable + +import numpy as np +import polars as pl + +from primary.services.sumo_access.inplace_volumetrics_types import ( + FluidZone, + InplaceVolumetricTableData, + InplaceVolumetricsIdentifier, + InplaceVolumetricResultName, + RepeatedTableColumnData, + Statistic, + TableColumnData, + TableColumnStatisticalData, +) + +from primary.services.sumo_access.inplace_volumetrics_access import InplaceVolumetricsAccess +from ..service_exceptions import Service, InvalidParameterError + +""" +This file contains general utility functions for the Inplace Volumetrics provider + +The methods can be used to calculate, aggregate and create data for the Inplace Volumetrics provider +""" + + +def get_valid_result_names_from_list(result_names: list[str]) -> list[str]: + """ + Get valid result names from list of result names + """ + valid_result_names = [] + for result_name in result_names: + if result_name in InplaceVolumetricResultName.__members__: + valid_result_names.append(result_name) + return valid_result_names + + +def create_per_group_summed_realization_volume_df( + volume_df: pl.DataFrame, + group_by_identifiers: list[InplaceVolumetricsIdentifier] | None, +) -> pl.DataFrame: + """ + Create volume DataFrame with sum per selected group. The sum volumes are grouped per realization, i.e. a column named "REAL" + should always be among the output columns. + + Note that selector columns are not aggregated, only the volume columns are aggregated. Thus the selector columns not among + group by identifiers is excluded from the output. + + After accumulating the sum, the properties can be calculated across realizations for each group. + """ + if "REAL" not in volume_df.columns: + raise ValueError("REAL column not found in volume DataFrame") + + # Group by each of the identifier (always accumulate by realization - i.e. max one value per realization) + columns_to_group_by_for_sum = ["REAL"] + if group_by_identifiers: + columns_to_group_by_for_sum = list({elm.value for elm in group_by_identifiers} | {"REAL"}) + + # Selector columns should not be aggregated + possible_selector_columns = InplaceVolumetricsAccess.get_possible_selector_columns() + + # Selector columns not in group by will be excluded, these should not be aggregated + per_group_summed_df = volume_df.group_by(columns_to_group_by_for_sum).agg( + [pl.sum("*").exclude(possible_selector_columns)] + ) + + return per_group_summed_df + + +def _get_statistical_function_expression(statistic: Statistic) -> Callable[[pl.Expr], pl.Expr] | None: + """ + Get statistical function Polars expression based on statistic enum + + Note: Inverted P10 and P90 according to oil industry standards + """ + statistical_function_expression_map: dict[Statistic, Callable[[pl.Expr], pl.Expr]] = { + Statistic.MEAN: lambda col: col.mean(), + Statistic.MIN: lambda col: col.min(), + Statistic.MAX: lambda col: col.max(), + Statistic.STD_DEV: lambda col: col.std(), + Statistic.P10: lambda col: col.quantile(0.9, "linear"), # Inverted P10 and P90 + Statistic.P90: lambda col: col.quantile(0.1, "linear"), # Inverted P10 and P90 + } + + return statistical_function_expression_map.get(statistic) + + +def _create_statistical_expression(statistic: Statistic, column_name: str, drop_nans: bool = True) -> pl.Expr: + """ + Generate the Polars expression for the given statistic. + """ + base_col = pl.col(column_name) + if drop_nans: + base_col = base_col.drop_nans() + stat_func_expr = _get_statistical_function_expression(statistic) + if stat_func_expr is None: + raise ValueError(f"Unsupported statistic: {statistic}") + return stat_func_expr(base_col).alias(f"{column_name}_{statistic}") + + +def _create_statistic_aggregation_expressions( + result_columns: list[str], statistics: list[Statistic], drop_nans: bool = True +) -> list[pl.Expr]: + """ + Create Polars expressions for aggregation of result columns + """ + expressions = [] + for column_name in result_columns: + for statistic in statistics: + expressions.append(_create_statistical_expression(statistic, column_name, drop_nans)) + return expressions + + +def _convert_statistical_df_to_statistical_result_table_data( + statistical_df: pl.DataFrame, + valid_result_names: list[str], + requested_statistics: list[Statistic], +) -> tuple[list[RepeatedTableColumnData], list[TableColumnStatisticalData]]: + """ + Convert statistical DataFrame to statistical result table data + + Expect the statistical DataFrame to have one unique column per requested statistic per result name, i.e. "result_name_mean", "result_name_stddev", etc. + """ + possible_selector_columns = InplaceVolumetricsAccess.get_possible_selector_columns() + + # Build selector columns from statistical table + selector_column_data_list: list[RepeatedTableColumnData] = [] + final_selector_columns = [name for name in possible_selector_columns if name in statistical_df.columns] + for column_name in final_selector_columns: + column = statistical_df[column_name] + selector_column_data_list.append(_create_repeated_table_column_data_from_polars_column(column_name, column)) + + # Fill statistics for each result + results_statistical_data_dict: dict[str, TableColumnStatisticalData] = {} + available_statistic_column_names = statistical_df.columns + for result_name in valid_result_names: + result_statistical_data = TableColumnStatisticalData(column_name=result_name, statistic_values={}) + for statistic in requested_statistics: + statistic_column_name = f"{result_name}_{statistic}" + if statistic_column_name not in available_statistic_column_names: + raise ValueError(f"Column {statistic_column_name} not found in statistical table") + + statistic_array = statistical_df[statistic_column_name].fill_null(np.nan) + result_statistical_data.statistic_values[statistic] = statistic_array.to_list() + + # Add result statistical data to dictionary + results_statistical_data_dict[result_name] = result_statistical_data + + # Create list of results statistical data from dictionary values + results_statistical_data_list: list[TableColumnStatisticalData] = list(results_statistical_data_dict.values()) + + # Validate length of columns + _validate_length_of_statistics_data_lists(selector_column_data_list, results_statistical_data_list) + + return (selector_column_data_list, results_statistical_data_list) + + +def create_grouped_statistical_result_table_data_polars( + result_df: pl.DataFrame, + group_by_identifiers: list[InplaceVolumetricsIdentifier] | None, +) -> tuple[list[RepeatedTableColumnData], list[TableColumnStatisticalData]]: + """ + Create result table data with statistics across column values based on group by identifiers selection. The + statistics are calculated across all values per grouping, thus the output will have one row per group. + + To get statistics across all realizations, the input result df must be pre-processed to contain non-duplicate "REAL" values + per group when grouping with group_by_identifiers. + + The order of the arrays in the statistical data lists will match the order of the rows in the selector column data list. + + Statistics: Mean, stddev, min, max, p10, p90 + + Parameters: + - result_df: Dataframe with selector columns and result columns + - group_by_identifiers: list of identifiers to group by, should be equal to the group by used used to pre-process the input result df + + Returns: + - Tuple with selector column data list and results statistical data list + """ + if group_by_identifiers == []: + raise InvalidParameterError("Group by identifiers must be a non-empty list or None", Service.GENERAL) + + possible_selector_columns = InplaceVolumetricsAccess.get_possible_selector_columns() + valid_selector_columns = [elm for elm in possible_selector_columns if elm in result_df.columns] + + # Find valid result names in df + valid_result_names = [elm for elm in result_df.columns if elm not in valid_selector_columns] + + # Define statistical aggregation expressions + requested_statistics = [ + Statistic.MEAN, + Statistic.STD_DEV, + Statistic.MIN, + Statistic.MAX, + Statistic.P10, + Statistic.P90, + ] + statistic_aggregation_expressions = _create_statistic_aggregation_expressions( + valid_result_names, requested_statistics + ) + + # Groupby and aggregate result df + # - Expect the result df to have one unique column per statistic per result name, i.e. "result_name_mean", "result_name_stddev", etc. + per_group_statistical_df: pl.DataFrame | None = None + if group_by_identifiers is None: + # If no grouping, aggregate entire df using expressions in select + # Only keep the result name columns and its statistics (i.e. keep no identifier columns) + per_group_statistical_df = result_df.select(statistic_aggregation_expressions) + else: + group_by_identifier_values = list(set([elm.value for elm in group_by_identifiers])) + # Perform aggregation per grouping + per_group_statistical_df = ( + result_df.select(group_by_identifier_values + valid_result_names) + .group_by(group_by_identifier_values) + .agg(statistic_aggregation_expressions) + ) + + # Convert statistical DataFrame to statistical result table data + selector_column_data_list, results_statistical_data_list = _convert_statistical_df_to_statistical_result_table_data( + per_group_statistical_df, valid_result_names, requested_statistics + ) + + return (selector_column_data_list, results_statistical_data_list) + + +def _validate_length_of_statistics_data_lists( + selector_column_data_list: list[RepeatedTableColumnData], + result_statistical_data_list: list[TableColumnStatisticalData], +) -> None: + """ + Verify that the length of the statistical data lists are equal. I.e. equal number of rows in each list. + + NOTE: Allows empty lists + """ + if len(selector_column_data_list) == 0 and len(result_statistical_data_list) == 0: + return + + expected_num_rows = 0 + if len(selector_column_data_list) != 0: + expected_num_rows = len(selector_column_data_list[0].indices) + else: + expected_num_rows = len(next(iter(result_statistical_data_list[0].statistic_values.values()))) + + for selector_column_data in selector_column_data_list: + num_rows = len(selector_column_data.indices) + if num_rows != expected_num_rows: + raise ValueError( + f"Length of {selector_column_data.column_name} column data list does not match expected number of rows: {expected_num_rows}. Got: {num_rows}" + ) + for result_statistical_data in result_statistical_data_list: + for statistic, statistic_values in result_statistical_data.statistic_values.items(): + if len(statistic_values) != expected_num_rows: + result_name = result_statistical_data.column_name + raise ValueError( + f"Number of {result_name} statistic {statistic.value} values does not match expected number of rows: {expected_num_rows}. Got: {len(statistic_values)}" + ) + + +def _create_repeated_table_column_data_from_polars_column( + column_name: str, column_values: pl.Series +) -> RepeatedTableColumnData: + """ + Create repeated table column data from column name and column values as Polars Series + + Note that the unique values are not sorted, but the indices vector is built to preserve order + in the input column values. + """ + + # unique() method might not preserve the order of the unique values + unique_values: list[str | int] = column_values.unique().to_list() + value_to_index_map = {value: index for index, value in enumerate(unique_values)} + indices: list[int] = [value_to_index_map[value] for value in column_values.to_list()] + + return RepeatedTableColumnData(column_name=column_name, unique_values=unique_values, indices=indices) + + +def create_inplace_volumetric_table_data_from_result_df( + result_df: pl.DataFrame, selection_name: str +) -> InplaceVolumetricTableData: + """ + Create Inplace Volumetric Table Data from result DataFrame, selection name and specified selector columns + """ + if result_df.is_empty(): + return InplaceVolumetricTableData(fluid_selection_name=selection_name, selector_columns=[], result_columns=[]) + + possible_selector_columns = InplaceVolumetricsAccess.get_possible_selector_columns() + existing_selector_columns = [name for name in result_df.columns if name in possible_selector_columns] + selector_column_data_list: list[RepeatedTableColumnData] = [] + for column_name in existing_selector_columns: + column = result_df[column_name] + selector_column_data_list.append(_create_repeated_table_column_data_from_polars_column(column_name, column)) + + existing_result_column_names = [name for name in result_df.columns if name not in existing_selector_columns] + result_column_data_list: list[TableColumnData] = [] + for column_name in existing_result_column_names: + result_column_data_list.append( + TableColumnData(column_name=column_name, values=result_df[column_name].to_list()) + ) + + return InplaceVolumetricTableData( + fluid_selection_name=selection_name, + selector_columns=selector_column_data_list, + result_columns=result_column_data_list, + ) + + +def create_volumetric_df_per_fluid_zone( + fluid_zones: list[FluidZone], + volumetric_df: pl.DataFrame, +) -> dict[FluidZone, pl.DataFrame]: + """ + Create a volumetric DataFrame per fluid zone + + Extracts the columns for each fluid zone and creates a new DataFrame for each fluid zone, with + the same identifier columns and REAL column as the original table. + + The fluid columns are stripped of the fluid zone suffix. + + Returns: + dict[FluidZone, pl.DataFrame]: A dictionary with fluid zone as key and volumetric DataFrame as value + + + Example: + - Input: + - fluid_zone: [FluidZone.OIL, FluidZone.GAS] + - volumetric_df: pl.DataFrame + - volumetric_df.columns = ["REAL", "ZONE", "REGION", "FACIES", "STOIIP_OIL", "GIIP_GAS", "HCPV_OIL", "HCPV_GAS", "HCPV_WATER"] + + - Output: + - df_dict: dict[FluidZone, pl.DataFrame]: + - df_dict[FluidZone.OIL]: volumetric_df_oil + volumetric_df_oil.columns = ["REAL", "ZONE", "REGION", "FACIES", "STOIIP", "HCPV"] + - df_dict[FluidZone.GAS]: volumetric_df_gas + - volumetric_df_gas.columns = ["REAL", "ZONE", "REGION", "FACIES", "GIIP", "HCPV"] + + """ + column_names: list[str] = volumetric_df.columns + + # Iterate over column_names to keep order of volumetric_df.columns + possible_selector_columns = InplaceVolumetricsAccess.get_possible_selector_columns() + selector_columns = [col for col in column_names if col in possible_selector_columns] + + fluid_zone_to_df_map: dict[FluidZone, pl.DataFrame] = {} + for fluid_zone in fluid_zones: + fluid_zone_suffix = f"_{fluid_zone.value.upper()}" + fluid_columns = [name for name in column_names if name.endswith(fluid_zone_suffix)] + + if not fluid_columns: + continue + + # Mapping old column with suffix to new column without fluid zone suffix, e.g. "HCPV_OIL" -> "HCPV" + columns_rename_map: dict[str, str] = {col: col.removesuffix(fluid_zone_suffix) for col in fluid_columns} + fluid_zone_df = volumetric_df.select(selector_columns + fluid_columns).rename(columns_rename_map) + + # Place DataFrame into fluid zone map + fluid_zone_to_df_map[fluid_zone] = fluid_zone_df + return fluid_zone_to_df_map + + +def create_volumetric_summed_fluid_zones_df( + volumetric_df: pl.DataFrame, + fluid_zones: list[FluidZone], +) -> pl.DataFrame: + """ + Creating a volumetric DataFrame summed across fluid zones + + Extract the columns for each fluid zone and create a new DataFrame where each fluid zone per volume column is summed. + + The fluid columns are stripped of the fluid zone suffix. + + Example: + - Input: + - fluid_zone: [FluidZone.OIL, FluidZone.GAS] + - volumetric_df: pl.DataFrame + - volumetric_df.columns = ["REAL", "ZONE", "REGION", "FACIES", "STOIIP_OIL", "GIIP_GAS", "HCPV_OIL", "HCPV_GAS", "HCPV_WATER"] + + - Output: + - volumetric_df_across_fluid_zones: pl.DataFrame + - volumetric_df_across_fluid_zones.columns = ["REAL", "ZONE", "REGION", "FACIES", "STOIIP", "GIIP", "HCPV"] + """ + + # Iterate over column_names to keep order of volumetric_df.columns + possible_selector_columns = InplaceVolumetricsAccess.get_possible_selector_columns() + valid_selector_columns = [col for col in volumetric_df.columns if col in possible_selector_columns] + + # Get volume names among columns + volumetric_names_with_fluid_zone = [col for col in volumetric_df.columns if col not in valid_selector_columns] + + # Extract set of volume names without fluid zone suffix + suffixes_to_remove = [f"_{fluid_zone.value.upper()}" for fluid_zone in fluid_zones] + volumetric_names = list( + { + name.removesuffix(suffix) # Remove the suffix if it exists + for name in volumetric_names_with_fluid_zone + for suffix in suffixes_to_remove + if name.endswith(suffix) # Only remove if the suffix is present + } + ) + + # Per volume name without fluid zone suffix, sum the columns with the same name + volume_name_sum_expressions: list[pl.Expr] = [] + for volume_name in volumetric_names: + # Get volume columns with selected fluid zones + volume_columns_with_suffix = [ + col + for col in volumetric_df.columns + if col.startswith(volume_name) and any(col.endswith(suffix) for suffix in suffixes_to_remove) + ] + + if not volume_columns_with_suffix: + continue + + # Sum columns with the same volume name + volume_name_sum_expression: pl.Expr = pl.col(volume_columns_with_suffix[0]) + for col in volume_columns_with_suffix[1:]: + volume_name_sum_expression = volume_name_sum_expression + pl.col(col) + + # Add sum expression to list + volume_name_sum_expressions.append(volume_name_sum_expression.alias(volume_name)) + + # Create df with selector columns and summed volume columns using expressions + column_names_and_expressions: list[str | pl.Expr] = valid_selector_columns + volume_name_sum_expressions + volumetric_across_fluid_zones_df = volumetric_df.select(column_names_and_expressions) + + return volumetric_across_fluid_zones_df + + +def _create_named_expression_with_nan_for_inf(expr: pl.Expr, name: str) -> pl.Expr: + """ + Replace inf values with nan in a Polars expression and assign a new name + + returns: New expression with inf values replaced with nan and assigned a new name + """ + return pl.when(expr.is_infinite()).then(np.nan).otherwise(expr).alias(name) + + +def create_property_column_expressions( + volume_df_columns: list[str], properties: list[str], fluid_zone: FluidZone | None = None +) -> list[pl.Expr]: + """ + Create Polars expressions for property columns base available volume columns. + + If one of the volume names needed for a property is not found, the property expressions is not provided + + Args: + - volume_df_columns (list[str]): list of column names of volume pl.Dataframe + - properties (list[str]): Name of the properties to calculate + + Returns: + - list[pl.Expr]: list of Polars expressions for property columns + + """ + calculated_property_expressions: list[pl.Expr] = [] + + # NOTE: If one of the volume names needed for a property is not found, the property array is not calculated + # TODO: Consider "/"-operator vs pl.col().truediv() for division, e.g. pl.col("NET").truediv(pl.col("BULK")) + if "BO" in properties and fluid_zone == FluidZone.OIL and set(["HCPV", "STOIIP"]).issubset(volume_df_columns): + expression = pl.col("HCPV") / pl.col("STOIIP") + calculated_property_expressions.append(_create_named_expression_with_nan_for_inf(expression, "BO")) + if "BG" in properties and fluid_zone == FluidZone.GAS and set(["HCPV", "GIIP"]).issubset(volume_df_columns): + expression = pl.col("HCPV") / pl.col("GIIP") + calculated_property_expressions.append(_create_named_expression_with_nan_for_inf(expression, "BG")) + if "NTG" in properties and set(["BULK", "NET"]).issubset(volume_df_columns): + ntg_expression = pl.col("NET") / pl.col("BULK") + calculated_property_expressions.append(_create_named_expression_with_nan_for_inf(ntg_expression, "NTG")) + if "PORO" in properties and set(["BULK", "PORV"]).issubset(volume_df_columns): + poro_expression = pl.col("PORV") / pl.col("BULK") + calculated_property_expressions.append(_create_named_expression_with_nan_for_inf(poro_expression, "PORO")) + if "PORO_NET" in properties and set(["PORV", "NET"]).issubset(volume_df_columns): + poro_net_expression = pl.col("PORV") / pl.col("NET") + calculated_property_expressions.append( + _create_named_expression_with_nan_for_inf(poro_net_expression, "PORO_NET") + ) + if "SW" in properties and set(["HCPV", "PORV"]).issubset(volume_df_columns): + # NOTE: HCPV/PORV = 0/0 = Nan -> 1 - Nan = Nan, if HCPV = 0 and PORV = 0 -> SW = 1 it must be handled + sw_expression = 1 - pl.col("HCPV") / pl.col("PORV") + calculated_property_expressions.append(_create_named_expression_with_nan_for_inf(sw_expression, "SW")) + + return calculated_property_expressions + + +def create_calculated_volume_column_expressions( + volume_df_columns: list[str], calculated_volumes: list[str], fluid_zone: FluidZone | None = None +) -> list[pl.Expr]: + """ + Create Polars expressions for calculated volume columns base available volume columns. + + Args: + - volume_df_columns (list[str]): list of column names of volume pl.DataFrame + - calculated_volumes (list[str]): Name of the volume column to calculate + + Returns: + - list[pl.Expr]: list of Polars expressions for calculated volume columns + + """ + calculated_volume_expressions: list[pl.Expr] = [] + + # Handle STOIIP_TOTAL and GIIP_TOTAL + if "STOIIP_TOTAL" in calculated_volumes: + stoiip_total_expression: pl.Expr | None = None + if fluid_zone is None and set(["STOIIP", "ASSOCIATEDOIL"]).issubset(volume_df_columns): + stoiip_total_expression = pl.col("STOIIP") + pl.col("ASSOCIATEDOIL") + if fluid_zone == FluidZone.OIL and "STOIIP" in volume_df_columns: + stoiip_total_expression = pl.col("STOIIP") + if fluid_zone == FluidZone.GAS and "ASSOCIATEDOIL" in volume_df_columns: + stoiip_total_expression = pl.col("ASSOCIATEDOIL") + if stoiip_total_expression is not None: + calculated_volume_expressions.append( + _create_named_expression_with_nan_for_inf(stoiip_total_expression, "STOIIP_TOTAL") + ) + if "GIIP_TOTAL" in calculated_volumes: + giip_total_expression: pl.Expr | None = None + if fluid_zone is None and set(["GIIP", "ASSOCIATEDGAS"]).issubset(volume_df_columns): + giip_total_expression = pl.col("GIIP") + pl.col("ASSOCIATEDGAS") + if fluid_zone == FluidZone.GAS and "GIIP" in volume_df_columns: + giip_total_expression = pl.col("GIIP") + if fluid_zone == FluidZone.OIL and "ASSOCIATEDGAS" in volume_df_columns: + giip_total_expression = pl.col("ASSOCIATEDGAS") + if giip_total_expression is not None: + calculated_volume_expressions.append( + _create_named_expression_with_nan_for_inf(giip_total_expression, "GIIP_TOTAL") + ) + + return calculated_volume_expressions diff --git a/backend_py/primary/primary/services/inplace_volumetrics_assembler/inplace_volumetrics_assembler.py b/backend_py/primary/primary/services/inplace_volumetrics_assembler/inplace_volumetrics_assembler.py new file mode 100644 index 000000000..74e4645b6 --- /dev/null +++ b/backend_py/primary/primary/services/inplace_volumetrics_assembler/inplace_volumetrics_assembler.py @@ -0,0 +1,520 @@ +import asyncio + +import pyarrow as pa +import polars as pl + +from primary.services.sumo_access.inplace_volumetrics_access import ( + InplaceVolumetricsAccess, + IGNORED_IDENTIFIER_COLUMN_VALUES, +) +from primary.services.sumo_access.inplace_volumetrics_types import ( + CategorizedResultNames, + FluidZone, + FluidSelection, + InplaceVolumetricsIdentifier, + InplaceVolumetricsIdentifierWithValues, + InplaceVolumetricsTableDefinition, + InplaceStatisticalVolumetricTableData, + InplaceVolumetricsTableDefinition, + InplaceVolumetricTableData, + InplaceVolumetricTableDataPerFluidSelection, + InplaceStatisticalVolumetricTableDataPerFluidSelection, +) + +from ._conversion._conversion import ( + create_raw_volumetric_columns_from_volume_names_and_fluid_zones, + get_available_properties_from_volume_names, + get_calculated_volumes_among_result_names, + get_fluid_zones, + get_identifier_from_string, + get_properties_among_result_names, + get_required_volume_names_from_calculated_volumes, + get_required_volume_names_from_properties, + get_volume_names_from_raw_volumetric_column_names, + get_fluid_zone_from_selection, + create_fluid_selection_name, + convert_fluid_zone_to_fluid_selection, +) + +from ._utils import ( + create_calculated_volume_column_expressions, + create_property_column_expressions, + create_volumetric_summed_fluid_zones_df, + create_grouped_statistical_result_table_data_polars, + create_volumetric_df_per_fluid_zone, + create_per_group_summed_realization_volume_df, + create_inplace_volumetric_table_data_from_result_df, + get_valid_result_names_from_list, +) + +from ..service_exceptions import Service, InvalidParameterError + +import logging +from webviz_pkg.core_utils.perf_timer import PerfTimer + +LOGGER = logging.getLogger(__name__) + + +class InplaceVolumetricsAssembler: + """ + This class provides an interface for interacting with definitions used in front-end for assembling and providing + metadata and inplace volumetrics table data + + The class interacts with the InplaceVolumetricsAccess class to retrieve data from Sumo and assemble it into a format + that can be used in the front-end. It also performs validation of the data and aggregation methods where needed. + + The provider contains conversion from result names, properties and fluid zones into volumetric column names that can + be used to fetch data from Sumo. + + Front-end: results = volume_columns + properties + + Sumo: volumetric_column_names = results + fluid_zones + + + """ + + def __init__(self, inplace_volumetrics_access: InplaceVolumetricsAccess): + self._inplace_volumetrics_access = inplace_volumetrics_access + + async def get_volumetric_table_metadata_async(self) -> list[InplaceVolumetricsTableDefinition]: + vol_table_names = await self._inplace_volumetrics_access.get_inplace_volumetrics_table_names_async() + + async def get_named_inplace_volumetrics_table_async(table_name: str) -> dict[str, pa.Table]: + return { + table_name: await self._inplace_volumetrics_access.get_inplace_volumetrics_table_async( + table_name, column_names=None + ) + } + + tasks = [ + asyncio.create_task(get_named_inplace_volumetrics_table_async(vol_table_name)) + for vol_table_name in vol_table_names + ] + tables = await asyncio.gather(*tasks) + print(tables, len(tables)) + + tables_info: list[InplaceVolumetricsTableDefinition] = [] + for table_result in tables: + table_name, table = list(table_result.items())[0] + + non_volume_columns = self._inplace_volumetrics_access.get_possible_selector_columns() + + # Get raw volume names + raw_volumetric_column_names = [name for name in table.column_names if name not in non_volume_columns] + + fluid_zones = get_fluid_zones(raw_volumetric_column_names) + volume_names = get_volume_names_from_raw_volumetric_column_names(raw_volumetric_column_names) + available_property_names = get_available_properties_from_volume_names(volume_names) + result_names = volume_names + available_property_names + valid_result_names = get_valid_result_names_from_list(result_names) + + identifiers_with_values = [] + for identifier_name in self._inplace_volumetrics_access.get_possible_identifier_columns(): + identifier = get_identifier_from_string(identifier_name) + if identifier is not None and identifier_name in table.column_names: + identifier_values = table[identifier_name].unique().to_pylist() + filtered_identifier_values = [ + value for value in identifier_values if value not in IGNORED_IDENTIFIER_COLUMN_VALUES + ] + identifiers_with_values.append( + InplaceVolumetricsIdentifierWithValues( + identifier=identifier, + values=filtered_identifier_values, + ) + ) + tables_info.append( + InplaceVolumetricsTableDefinition( + table_name=table_name, + fluid_zones=fluid_zones, + result_names=valid_result_names, + identifiers_with_values=identifiers_with_values, + ) + ) + return tables_info + + async def create_accumulated_by_selection_per_realization_volumetric_table_data_async( + self, + table_name: str, + result_names: set[str], + fluid_zones: list[FluidZone], + identifiers_with_values: list[InplaceVolumetricsIdentifierWithValues], + group_by_identifiers: list[InplaceVolumetricsIdentifier] | None, + realizations: list[int] | None, + accumulate_fluid_zones: bool = False, + ) -> InplaceVolumetricTableDataPerFluidSelection: + if group_by_identifiers == []: + raise InvalidParameterError("Group by identifiers must be non-empty list or None", Service.GENERAL) + if realizations == []: + raise InvalidParameterError("Realizations must be non-empty list or None", Service.GENERAL) + + # Create volume df per fluid zone and retrieve volume names and valid properties among requested result names + ( + volume_df_per_fluid_selection, + categorized_requested_result_names, + ) = await self._get_volume_df_per_fluid_selection_and_categorized_result_names_async( + table_name, result_names, fluid_zones, realizations, identifiers_with_values, accumulate_fluid_zones + ) + + # Perform aggregation per result table + # - Aggregate by each requested group_by_identifier + table_data_per_fluid_selection: list[InplaceVolumetricTableData] = [] + for fluid_selection, volume_df in volume_df_per_fluid_selection.items(): + # Create per group summed realization values + per_group_summed_realization_df = create_per_group_summed_realization_volume_df( + volume_df, group_by_identifiers + ) + + # Create result df - requested volumes and calculated properties + per_realization_accumulated_result_df = InplaceVolumetricsAssembler._create_result_dataframe_polars( + per_group_summed_realization_df, categorized_requested_result_names, fluid_selection + ) + + fluid_selection_name = create_fluid_selection_name(fluid_selection, fluid_zones) + + table_data_per_fluid_selection.append( + create_inplace_volumetric_table_data_from_result_df( + per_realization_accumulated_result_df, fluid_selection_name + ) + ) + + return InplaceVolumetricTableDataPerFluidSelection( + table_data_per_fluid_selection=table_data_per_fluid_selection + ) + + async def create_accumulated_by_selection_statistical_volumetric_table_data_async( + self, + table_name: str, + result_names: set[str], + fluid_zones: list[FluidZone], + identifiers_with_values: list[InplaceVolumetricsIdentifierWithValues], + group_by_identifiers: list[InplaceVolumetricsIdentifier] | None, + realizations: list[int] | None, + accumulate_fluid_zones: bool = False, + ) -> InplaceStatisticalVolumetricTableDataPerFluidSelection: + if group_by_identifiers == []: + raise InvalidParameterError("Group by identifiers must be non-empty list or None", Service.GENERAL) + if realizations == []: + raise InvalidParameterError("Realizations must be non-empty list or None", Service.GENERAL) + + # Create volume df per fluid zone and retrieve volume names and valid properties among requested result names + ( + volume_df_per_fluid_selection, + categorized_requested_result_names, + ) = await self._get_volume_df_per_fluid_selection_and_categorized_result_names_async( + table_name, result_names, fluid_zones, realizations, identifiers_with_values, accumulate_fluid_zones + ) + + # Perform aggregation per result table + # - Aggregate by each requested group_by_identifier + statistical_table_data_per_fluid_selection: list[InplaceStatisticalVolumetricTableData] = [] + for fluid_selection, volume_df in volume_df_per_fluid_selection.items(): + # Create per group summed realization values + per_group_summed_realization_df = create_per_group_summed_realization_volume_df( + volume_df, group_by_identifiers + ) + + # Create result df - requested volumes and calculated properties + per_realization_accumulated_result_df = InplaceVolumetricsAssembler._create_result_dataframe_polars( + per_group_summed_realization_df, categorized_requested_result_names, fluid_selection + ) + + # Create statistical table data from df + selector_column_data_list, result_column_data_list = create_grouped_statistical_result_table_data_polars( + per_realization_accumulated_result_df, + group_by_identifiers, + ) + + fluid_selection_name = create_fluid_selection_name(fluid_selection, fluid_zones) + + statistical_table_data_per_fluid_selection.append( + InplaceStatisticalVolumetricTableData( + fluid_selection_name=fluid_selection_name, + selector_columns=selector_column_data_list, + result_column_statistics=result_column_data_list, + ) + ) + + return InplaceStatisticalVolumetricTableDataPerFluidSelection( + table_data_per_fluid_selection=statistical_table_data_per_fluid_selection + ) + + async def _get_volume_df_per_fluid_selection_and_categorized_result_names_async( + self, + table_name: str, + result_names: set[str], + fluid_zones: list[FluidZone], + realizations: list[int] | None, + identifiers_with_values: list[InplaceVolumetricsIdentifierWithValues], + accumulate_fluid_zones: bool, + ) -> tuple[dict[FluidSelection, pl.DataFrame], CategorizedResultNames]: + """ + Utility function to get volume table data as pl.DataFrame per fluid selection, and a list of volume names and properties among the requested result names. + + The function returns a dictionary with fluid selection as key and a volumetric DataFrame as value. The volumetric DataFrame contains the requested + volume names among result names, and all necessary volumes to calculate properties. + + Note: If accumulate_fluid_zones is True, the function will exclude BO and BG from valid properties. + + Calculation of volume names and properties, and creation of the results is handled outside this function. + """ + # Check for empty identifier selections + has_empty_identifier_selection = any( + not identifier_with_values.values for identifier_with_values in identifiers_with_values + ) + if has_empty_identifier_selection: + raise InvalidParameterError( + "Each provided identifier column must have at least one selected value", Service.GENERAL + ) + + # Detect properties and find volume names needed to calculate properties + properties = get_properties_among_result_names(result_names) + required_volume_names_for_properties = get_required_volume_names_from_properties(properties) + + # Detect calculated volumes among result names and find volume names needed for calculation + calculated_volume_names = get_calculated_volumes_among_result_names(result_names) + required_volume_names_for_calculated_volumes = get_required_volume_names_from_calculated_volumes( + calculated_volume_names + ) + + # Extract volume names among result names + volume_names = list(set(result_names) - set(properties) - set(calculated_volume_names)) + + # Find all volume names needed from Sumo + all_volume_names = set( + volume_names + required_volume_names_for_properties + required_volume_names_for_calculated_volumes + ) + + # Get volume table per fluid selection - requested volumes and volumes needed for properties + volume_df_per_fluid_selection: dict[ + FluidSelection, pl.DataFrame + ] = await self._create_volume_df_per_fluid_selection( + table_name, all_volume_names, fluid_zones, realizations, identifiers_with_values, accumulate_fluid_zones + ) + + # If accumulate_fluid_zones is True, exclude BO and BG from valid properties + valid_properties = properties + if accumulate_fluid_zones: + valid_properties = [prop for prop in properties if prop not in ["BO", "BG"]] + + return volume_df_per_fluid_selection, CategorizedResultNames( + volume_names=volume_names, calculated_volume_names=calculated_volume_names, property_names=valid_properties + ) + + @staticmethod + def _create_result_dataframe_polars( + volume_df: pl.DataFrame, + categorized_requested_result_names: CategorizedResultNames, + fluid_selection: FluidSelection, + ) -> pl.DataFrame: + """ + Create a result dataframe from the volume table and requested properties + + If volume names needed for properties are not available in the volume dataframe, the function will skip the property + + The result dataframe contains the requested volume names and calculated properties + """ + # Convert fluid selection to fluid zone + fluid_zone: FluidZone | None = get_fluid_zone_from_selection(fluid_selection) + + # Find valid selector columns and volume names + possible_selector_columns = InplaceVolumetricsAccess.get_possible_selector_columns() + available_selector_columns = [col for col in possible_selector_columns if col in volume_df.columns] + requested_volume_names = categorized_requested_result_names.volume_names + available_requested_volume_names = [name for name in requested_volume_names if name in volume_df.columns] + + # Create calculated volume column expressions + requested_calculated_volume_names = categorized_requested_result_names.calculated_volume_names + calculated_volume_column_expressions: list[pl.Expr] = create_calculated_volume_column_expressions( + volume_df.columns, requested_calculated_volume_names, fluid_zone + ) + + # Create property column expressions + requested_properties = categorized_requested_result_names.property_names + property_column_expressions: list[pl.Expr] = create_property_column_expressions( + volume_df.columns, requested_properties, fluid_zone + ) + + # Create result dataframe, select columns and calculate volumes + properties + column_names_and_expressions = ( + available_selector_columns + + available_requested_volume_names + + calculated_volume_column_expressions + + property_column_expressions + ) + result_df = volume_df.select(column_names_and_expressions) + + return result_df + + async def _create_volume_df_per_fluid_selection( + self, + table_name: str, + volume_names: set[str], + fluid_zones: list[FluidZone], + realizations: list[int] | None, + identifiers_with_values: list[InplaceVolumetricsIdentifierWithValues] = [], + accumulate_fluid_zones: bool = False, + ) -> dict[FluidSelection, pl.DataFrame]: + """ + This function creates a volumetric DataFrame per fluid selection + + The requested volume names are the set of result names and necessary result names to calculate properties. + Calculation of properties are handled outside this function. + + The dataframe is created by filtering the raw volumetric table based on the identifiers and realizations and then + accumulate the volumes across fluid zones. + + Input: + - table_name: str - Name of the table in Sumo + - volume_names: set[str] - All volume names needed from Sumo, including volume names needed for properties + - fluid_zones: list[FluidZone] - Fluid zones to create volumetric tables for + - realizations: list[int] - Realizations to include in the volumetric table + - identifiers_with_values: list[InplaceVolumetricsIdentifierWithValues] - Identifier values to filter the volumetric table, i.e. row filtering + - accumulate_fluid_zones: bool - Whether to accumulate the volumes across fluid zones + """ + + # Create the raw volumetric columns from all volume names and fluid zones + raw_volumetric_column_names = create_raw_volumetric_columns_from_volume_names_and_fluid_zones( + volume_names, fluid_zones + ) + + if not raw_volumetric_column_names: + # Combination of volume names and fluid zones did not result in any raw volumetric columns + return {} + + timer = PerfTimer() + # Get the raw volumetric table as DataFrame, filtered on identifiers and realizations + raw_volumetrics_df: pl.DataFrame = await self._get_inplace_volumetrics_table_as_polars_df_async( + table_name=table_name, volumetric_columns=set(raw_volumetric_column_names) + ) + row_filtered_raw_volumetrics_df = InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name=table_name, + inplace_volumetrics_df=raw_volumetrics_df, + realizations=realizations, + identifiers_with_values=identifiers_with_values, + ) + + if row_filtered_raw_volumetrics_df is None: + # No data found for the given identifiers and realizations + return {} + + timer_create_raw_df = timer.lap_ms() + print(f"Time creating raw DataFrame: {timer_create_raw_df}ms") + + # Build a new table with one merged column per result and additional fluid zone column is created. + # I.e. where result column has values per fluid zone appended after each other. Num rows is then original num rows * num fluid zones + # E.g.: + # + # filtered_table.column_names = ["REAL", "ZONE", "REGION", "FACIES", "LICENSE", "STOIIP_OIL", "GIIP_GAS", "HCPV_OIL", "HCPV_GAS", "HCPV_WATER"] + # fluid_zones = [FluidZone.OIL, FluidZone.GAS, FluidZone.WATER] + # ["REAL", "ZONE", "REGION", "FACIES", "LICENSE", "STOIIP", "BO", "HCPV"] + volume_df_per_fluid_selection: dict[FluidSelection, pl.DataFrame] = {} + if accumulate_fluid_zones and len(fluid_zones) > 1: + # Build volume df summed across fluid zones + volumetric_summed_fluid_zones_df = create_volumetric_summed_fluid_zones_df( + row_filtered_raw_volumetrics_df, fluid_zones + ) + + volume_df_per_fluid_selection[FluidSelection.ACCUMULATED] = volumetric_summed_fluid_zones_df + return volume_df_per_fluid_selection + + # Handle each fluid zone separately + volume_df_per_fluid_zone: dict[FluidZone, pl.DataFrame] = create_volumetric_df_per_fluid_zone( + fluid_zones, row_filtered_raw_volumetrics_df + ) + + # Build volume df per fluid zone + for fluid_zone, volume_df in volume_df_per_fluid_zone.items(): + fluid_selection = convert_fluid_zone_to_fluid_selection(fluid_zone) + volume_df_per_fluid_selection[fluid_selection] = volume_df + + return volume_df_per_fluid_selection + + @staticmethod + def _create_row_filtered_volumetric_df( + table_name: str, + inplace_volumetrics_df: pl.DataFrame, + realizations: list[int] | None, + identifiers_with_values: list[InplaceVolumetricsIdentifierWithValues] = [], + ) -> pl.DataFrame | None: + """ + Create DataFrame filtered on identifier values and realizations + + The function filters the provided inplace volumetric DataFrame based on the identifiers and realizations provided. + If realizations is None, all realizations are included. + """ + if realizations is not None and len(realizations) == 0: + raise InvalidParameterError("Realizations must be a non-empty list or None", Service.GENERAL) + + column_names = inplace_volumetrics_df.columns + + # If any identifier column name is not found in the table, raise an error + for elm in identifiers_with_values: + identifier_column_name = elm.identifier.value + if identifier_column_name not in column_names: + raise ValueError(f"Identifier column name {identifier_column_name} not found in table {table_name}") + + timer = PerfTimer() + column_names = inplace_volumetrics_df.columns + + # Build mask for rows - default all rows + num_rows = inplace_volumetrics_df.height + mask = pl.Series([True] * num_rows) + + # Mask/filter out rows with ignored identifier values + for identifier_name in InplaceVolumetricsIdentifier: + if identifier_name.value in column_names: + ignored_identifier_values_mask = inplace_volumetrics_df[identifier_name.value].is_in( + IGNORED_IDENTIFIER_COLUMN_VALUES + ) + mask = mask & ~ignored_identifier_values_mask + + # Add mask for realizations + if realizations is not None: + # Check if every element in realizations exists in inplace_volumetrics_df["REAL"] + real_values_set = set(inplace_volumetrics_df["REAL"].to_list()) + missing_realizations_set = set(realizations) - real_values_set + + if missing_realizations_set: + raise ValueError( + f"Missing data error: The following realization values do not exist in 'REAL' column: {list(missing_realizations_set)}" + ) + + realization_mask = inplace_volumetrics_df["REAL"].is_in(realizations) + mask = mask & realization_mask + + # Add mask for each identifier filter + for identifier_with_values in identifiers_with_values: + if not identifier_with_values.values: + mask = pl.Series([False] * num_rows) + break + + identifier_column_name = identifier_with_values.identifier.value + identifier_mask = inplace_volumetrics_df[identifier_column_name].is_in(identifier_with_values.values) + mask = mask & identifier_mask + + filtered_df = inplace_volumetrics_df.filter(mask) + time_row_filtering = timer.lap_ms() + print(f"DATAFRAME row filtering (based on selectors): {time_row_filtering}ms") + + return filtered_df + + async def _get_inplace_volumetrics_table_as_polars_df_async( + self, table_name: str, volumetric_columns: set[str] + ) -> pl.DataFrame: + """ + Get the inplace volumetrics table as Polars DataFrame + """ + + # Get the inplace volumetrics table from collection in Sumo + # + # NOTE: + # Soft vs hard fail depends on detail level when building the volumetric columns from retrieved result names + fluid zones + # - Soft fail: get_inplace_volumetrics_table_no_throw_async() does not require matching volumetric column names + # - Hard fail: get_inplace_volumetrics_table_async() throws an exception if requested column names are not found + inplace_volumetrics_table: pa.Table = ( + await self._inplace_volumetrics_access.get_inplace_volumetrics_table_no_throw_async( + table_name=table_name, column_names=volumetric_columns + ) + ) + + return pl.DataFrame(inplace_volumetrics_table) diff --git a/backend_py/primary/primary/services/sumo_access/inplace_volumetrics_access.py b/backend_py/primary/primary/services/sumo_access/inplace_volumetrics_access.py index e95f13cb4..7735f8304 100644 --- a/backend_py/primary/primary/services/sumo_access/inplace_volumetrics_access.py +++ b/backend_py/primary/primary/services/sumo_access/inplace_volumetrics_access.py @@ -1,70 +1,48 @@ -import logging -from enum import Enum -from io import BytesIO -from typing import List, Optional, Sequence, Union +import asyncio +from typing import List, Optional -from concurrent.futures import ThreadPoolExecutor -import pandas as pd - -import pyarrow as pa -import pyarrow.compute as pc -import pyarrow.parquet as pq from fmu.sumo.explorer.objects import Case, TableCollection -from pydantic import ConfigDict, BaseModel - -from ._helpers import create_sumo_client, create_sumo_case_async -from .generic_types import EnsembleScalarResponse - -# from fmu.sumo.explorer.objects.table import AggregatedTable - - -LOGGER = logging.getLogger(__name__) - - -class PossibleInplaceVolumetricsCategoricalColumnNames(str, Enum): - ZONE = "ZONE" - REGION = "REGION" - FACIES = "FACIES" - LICENSE = "LICENSE" - - @classmethod - def has_value(cls, value: str) -> bool: - return value in cls._value2member_map_ - - -class PossibleInplaceVolumetricsNumericalColumnNames(str, Enum): - BULK_OIL = "BULK_OIL" - BULK_WATER = "BULK_WATER" - BULK_GAS = "BULK_GAS" - NET_OIL = "NET_OIL" - NET_WATER = "NET_WATER" - NET_GAS = "NET_GAS" - PORV_OIL = "PORV_OIL" - PORV_WATER = "PORV_WATER" - PORV_GAS = "PORV_GAS" - HCPV_OIL = "HCPV_OIL" - HCPV_GAS = "HCPV_GAS" - STOIIP_OIL = "STOIIP_OIL" - GIIP_GAS = "GIIP_GAS" - ASSOCIATEDGAS_OIL = "ASSOCIATEDGAS_OIL" - ASSOCIATEDOIL_GAS = "ASSOCIATEDOIL_GAS" - - @classmethod - def has_value(cls, value: str) -> bool: - return value in cls._value2member_map_ - -class InplaceVolumetricsCategoricalMetaData(BaseModel): - name: str - unique_values: List[Union[str, int, float]] - model_config = ConfigDict(from_attributes=True) +import pyarrow as pa +from webviz_pkg.core_utils.perf_timer import PerfTimer -class InplaceVolumetricsTableMetaData(BaseModel): - name: str - categorical_column_metadata: List[InplaceVolumetricsCategoricalMetaData] - numerical_column_names: List[str] - model_config = ConfigDict(from_attributes=True) +from ._helpers import create_sumo_client, create_sumo_case_async +from ..service_exceptions import ( + Service, + NoDataError, + InvalidDataError, +) + +# Index column values to ignore, i.e. remove from the volumetric tables +IGNORED_IDENTIFIER_COLUMN_VALUES = ["Totals"] + +# Allowed raw volumetric columns - from FMU Standard: +# Ref: https://github.com/equinor/fmu-dataio/blob/66e9683de5943d1b982c14ac926cf13007fc2bad/src/fmu/dataio/export/rms/volumetrics.py#L25-L47 +ALLOWED_RAW_VOLUMETRIC_COLUMNS = [ + "REAL", + "ZONE", + "REGION", + "LICENSE", + "FACIES", + "BULK_OIL", + "NET_OIL", + "PORV_OIL", + "HCPV_OIL", + "STOIIP_OIL", + "ASSOCIATEDGAS_OIL", + "BULK_GAS", + "NET_GAS", + "PORV_GAS", + "HCPV_GAS", + "GIIP_GAS", + "ASSOCIATEDOIL_GAS", + "BULK_TOTAL", + "NET_TOTAL", + "PORV_TOTAL", +] + +POSSIBLE_IDENTIFIER_COLUMNS = ["ZONE", "REGION", "FACIES", "LICENSE"] class InplaceVolumetricsAccess: @@ -81,117 +59,206 @@ async def from_case_uuid_async( case: Case = await create_sumo_case_async(client=sumo_client, case_uuid=case_uuid, want_keepalive_pit=False) return InplaceVolumetricsAccess(case=case, case_uuid=case_uuid, iteration_name=iteration_name) - async def get_table_names_and_metadata(self) -> List[InplaceVolumetricsTableMetaData]: - """Retrieve the available volumetric tables names and corresponding metadata for the case""" - vol_table_collections: TableCollection = self._case.tables.filter( - aggregation="collection", tagname="vol", iteration=self._iteration_name + @staticmethod + def get_possible_identifier_columns() -> List[str]: + return POSSIBLE_IDENTIFIER_COLUMNS + + @staticmethod + def get_possible_selector_columns() -> List[str]: + """ + The identifier columns and REAL column represent the selector columns of the volumetric table. + """ + return InplaceVolumetricsAccess.get_possible_identifier_columns() + ["REAL"] + + async def get_inplace_volumetrics_table_names_async(self) -> List[str]: + vol_table_collection = self._case.tables.filter( + aggregation="collection", + tagname=["vol", "volumes", "inplace"], + iteration=self._iteration_name, ) + table_names = await vol_table_collection.names_async + return table_names - vol_tables_metadata = [] - table_names = await vol_table_collections.names_async - for vol_table_name in table_names: - vol_table_collection: TableCollection = self._case.tables.filter( - aggregation="collection", - name=vol_table_name, - tagname="vol", - iteration=self._iteration_name, - ) - numerical_column_names = [ - col - for col in vol_table_collection.columns - if PossibleInplaceVolumetricsNumericalColumnNames.has_value(col) - ] - first_numerical_column_table = self.get_table(vol_table_name, numerical_column_names[0]) - categorical_column_metadata = [ - InplaceVolumetricsCategoricalMetaData( - name=col, - unique_values=pc.unique(first_numerical_column_table[col]).to_pylist(), - ) - for col in vol_table_collection.columns - if PossibleInplaceVolumetricsCategoricalColumnNames.has_value(col) - ] - vol_table_metadata = InplaceVolumetricsTableMetaData( - name=vol_table_name, - categorical_column_metadata=categorical_column_metadata, - numerical_column_names=numerical_column_names, - ) + async def get_inplace_volumetrics_table_no_throw_async( + self, table_name: str, column_names: Optional[set[str]] = None + ) -> Optional[pa.Table]: + """ + Get inplace volumetrics data for list of columns for given case and iteration as a pyarrow table. - vol_tables_metadata.append(vol_table_metadata) + The volumes are fetched from collection in Sumo and put together in a single table, i.e. a column per response. - return vol_tables_metadata + Note: This method does not throw an exception if requested column names are not found. - def get_table(self, table_name: str, column_name: str) -> pa.Table: - vol_table_collection: TableCollection = self._case.tables.filter( + Returns: + pa.Table with columns: ZONE, REGION, FACIES, REAL, and the available requested column names. + """ + # Get collection of tables per requested column + requested_columns = column_names if column_names is None else list(column_names) + vol_table_collection = self._case.tables.filter( aggregation="collection", name=table_name, - tagname="vol", + tagname=["vol", "volumes", "inplace"], iteration=self._iteration_name, - column=column_name, + column=requested_columns, + ) + + # Assemble tables into a single table + vol_table: pa.Table = await self._assemble_volumetrics_table_collection_into_single_table_async( + vol_table_collection=vol_table_collection, + table_name=table_name, + column_names=column_names, ) - if not vol_table_collection: - print(f"No aggregated volumetric tables found {self._case_uuid}, {table_name}, {column_name}") - print("Aggregating manually from realization tables...") - full_table = self.temporary_aggregate_from_realization_tables(table_name) - return full_table.select([column_name, "REAL", "FACIES", "ZONE", "REGION"]) - - if len(vol_table_collection) > 1: - raise ValueError(f"None or multiple volumetric tables found {self._case_uuid}, {table_name}, {column_name}") - vol_table = vol_table_collection[0] - byte_stream: BytesIO = vol_table.blob - table: pa.Table = pq.read_table(byte_stream) - return table - - def temporary_aggregate_from_realization_tables(self, table_name: str) -> pa.Table: - """Temporary function to aggregate from realization tables when no aggregated table is available - Assume Sumo will handle this in the future""" - vol_table_collection: TableCollection = self._case.tables.filter( - stage="realization", + + return vol_table + + async def get_inplace_volumetrics_table_async( + self, table_name: str, column_names: Optional[set[str]] = None + ) -> pa.Table: + """ + Get inplace volumetrics data for list of columns for given case and iteration as a pyarrow table. + + The volumes are fetched from collection in Sumo and put together in a single table, i.e. a column per response. + + Returns: + pa.Table with columns: ZONE, REGION, FACIES, REAL, and the requested column names. + """ + + # Get collection of tables per requested column + requested_columns = column_names if column_names is None else list(column_names) + vol_table_collection = self._case.tables.filter( + aggregation="collection", name=table_name, - tagname="vol", + tagname=["vol", "volumes", "inplace"], iteration=self._iteration_name, + column=requested_columns, ) - if not vol_table_collection: - raise ValueError(f"No volumetric realization tables found {self._case_uuid}, {table_name}") - ### Using ThreadPoolExecutor to parallelize the download of the tables + # Expected columns + # - "REAL" is not an index in metadata, but is an expected column in the tables from collection + expected_repeated_collection_columns = set(self.get_possible_selector_columns()) - def worker(idx: int) -> pd.DataFrame: - vol_table = vol_table_collection[idx] - print(f"Downloading table: {table_name} for realization {vol_table.realization}") - byte_stream: BytesIO = vol_table.blob + # Find column names not among collection columns + collection_columns = await vol_table_collection.columns_async + remaining_collection_columns = set(collection_columns) - expected_repeated_collection_columns - table: pd.DataFrame = pd.read_csv(byte_stream) - table["REAL"] = vol_table.realization - return table + if column_names is not None and column_names != remaining_collection_columns: + missing_column_names = column_names - remaining_collection_columns + raise InvalidDataError( + f"Missing requested columns: {missing_column_names}, in the volumetric table collection {self._case_uuid}, {table_name}", + Service.SUMO, + ) + + # Assemble tables into a single table + vol_table: pa.Table = await self._assemble_volumetrics_table_collection_into_single_table_async( + vol_table_collection=vol_table_collection, + table_name=table_name, + column_names=column_names, + ) - with ThreadPoolExecutor() as executor: - tables = list(executor.map(worker, list(range(len(vol_table_collection))))) - tables = pd.concat(tables) - tables = pa.Table.from_pandas(tables) + # Validate the table columns + expected_table_columns = expected_repeated_collection_columns + if column_names: + expected_table_columns.update(column_names) + if not expected_table_columns.issubset(set(vol_table.column_names)): + missing_columns = expected_table_columns - set(vol_table.column_names) + raise InvalidDataError( + f"Missing columns: {missing_columns}, in the assembled volumetric table {self._case_uuid}, {table_name}", + Service.SUMO, + ) - return tables + return vol_table - def get_response( + async def _assemble_volumetrics_table_collection_into_single_table_async( self, + vol_table_collection: TableCollection, table_name: str, - column_name: str, - categorical_filters: Optional[List[InplaceVolumetricsCategoricalMetaData]] = None, - realizations: Optional[Sequence[int]] = None, - ) -> EnsembleScalarResponse: - """Retrieve the volumetric response for the given table name and column name""" - table = self.get_table(table_name, column_name) - if realizations is not None: - mask = pc.is_in(table["REAL"], value_set=pa.array(realizations)) - table = table.filter(mask) - if categorical_filters is not None: - for category in categorical_filters: - mask = pc.is_in(table[category.name], value_set=pa.array(category.unique_values)) - table = table.filter(mask) - print(table) - - summed_on_real_table = table.group_by("REAL").aggregate([(column_name, "sum")]).sort_by("REAL") - - return EnsembleScalarResponse( - realizations=summed_on_real_table["REAL"].to_pylist(), - values=summed_on_real_table[f"{column_name}_sum"].to_pylist(), + column_names: Optional[set[str]] = None, + ) -> pa.Table: + """ + Retrieve the inplace volumetrics tables from Sumo and assemble them into a single table. + + Index columns: ZONE, REGION, FACIES, REAL, LICENSE + Volume columns: column_names + + """ + timer = PerfTimer() + timer.lap_ms() + num_tables_in_collection = await vol_table_collection.length_async() + vol_table_columns = await vol_table_collection.columns_async + if num_tables_in_collection == 0: + raise NoDataError( + f"No inplace volumetrics tables found in case={self._case_uuid}, iteration={self._iteration_name}, table_name={table_name}, column_names={column_names}", + Service.SUMO, + ) + time_num_tables_and_collection_columns = timer.lap_ms() + + # Download tables in parallel + tasks = [asyncio.create_task(table.to_arrow_async()) for table in vol_table_collection] + arrow_tables: list[pa.Table] = await asyncio.gather(*tasks) + time_async_download_ms = timer.lap_ms() + + if len(arrow_tables) == 0: + raise NoDataError( + f"No inplace volumetrics tables found in case={self._case_uuid}, iteration={self._iteration_name}, table_name={table_name}, column_names={column_names}", + Service.SUMO, + ) + + # Expected selector columns + possible_selector_columns = set(self.get_possible_selector_columns()) + expected_selector_columns = possible_selector_columns.intersection(vol_table_columns) + + # Initialize volumetric table + volumes_table: pa.Table | None = None + + # Build table by adding response columns + for volume_table in arrow_tables: + # Find volumes among columns - expect only one volume column + volume_names_set = set(volume_table.column_names) - expected_selector_columns + + if column_names is None and len(volume_names_set) == 0: + # When no column names are specified, we skip tables with only selector columns and no volume columns + # E.g. if a selector columns is incorrectly added as a volume column - we skip the table + continue + + # When requesting volume columns, we expect one volume name per table in the collection + if len(volume_names_set) == 0: + raise InvalidDataError( + f"Table {table_name} has collection without volume column. Collection only has columns defined as selectors: {volume_table.column_names}", + Service.SUMO, + ) + if len(volume_names_set) != 1: + raise InvalidDataError( + f"Table {table_name} has collection with more than one column for volume: {volume_names_set}", + Service.SUMO, + ) + + volume_name = list(volume_names_set)[0] + if volume_name not in ALLOWED_RAW_VOLUMETRIC_COLUMNS: + # Skip invalid volume columns + continue + + # Initialize table with first valid volume column + if volumes_table is None: + volumes_table = volume_table + continue + + # Add volume column to table + volume_column = volume_table[volume_name] + volumes_table = volumes_table.append_column(volume_name, volume_column) + + time_build_single_table_ms = timer.lap_ms() + + if volumes_table is None: + raise NoDataError( + f"No valid inplace volumetrics tables found in case={self._case_uuid}, iteration={self._iteration_name}, table_name={table_name}, column_names={column_names}", + Service.SUMO, + ) + + print( + f"Access Volumetric collection tables: count tables and column names: {time_num_tables_and_collection_columns}ms, " + f"collection download: {time_async_download_ms}ms, " + f"assemble into single table: {time_build_single_table_ms}ms, " + f"Total time: {timer.elapsed_ms()}ms" ) + + return volumes_table diff --git a/backend_py/primary/primary/services/sumo_access/inplace_volumetrics_types.py b/backend_py/primary/primary/services/sumo_access/inplace_volumetrics_types.py new file mode 100644 index 000000000..a4071df31 --- /dev/null +++ b/backend_py/primary/primary/services/sumo_access/inplace_volumetrics_types.py @@ -0,0 +1,189 @@ +from enum import StrEnum +from dataclasses import dataclass +from typing import Dict, List, Union + + +# NOTE: +# - AccumulateByEach -> InplaceVolumetricsIndexNames +# - Later on: InplaceVolumetricsIndexNames -> InplaceVolumetricsIdentifier +# - response -> result(s) +# - results = volume (directly from SUMO columns w/o suffix) + property (calculated from volumes) + + +class InplaceVolumetricResultName(StrEnum): + """Allowed volumetric response names""" + + BULK = "BULK" + NET = "NET" + PORO = "PORO" + PORO_NET = "PORO_NET" + PORV = "PORV" + HCPV = "HCPV" + STOIIP = "STOIIP" + GIIP = "GIIP" + NTG = "NTG" + ASSOCIATEDGAS = "ASSOCIATEDGAS" + ASSOCIATEDOIL = "ASSOCIATEDOIL" + BO = "BO" + BG = "BG" + SW = "SW" + STOIIP_TOTAL = "STOIIP_TOTAL" + GIIP_TOTAL = "GIIP_TOTAL" + + +class InplaceVolumetricsIdentifier(StrEnum): + """ + Definition of valid index names for an inplace volumetrics table + """ + + ZONE = "ZONE" + REGION = "REGION" + FACIES = "FACIES" + LICENSE = "LICENSE" + + +class FluidZone(StrEnum): + OIL = "Oil" + GAS = "Gas" + WATER = "Water" + + +class FluidSelection(StrEnum): + OIL = "Oil" + GAS = "Gas" + WATER = "Water" + ACCUMULATED = "Accumulated" + + +class CalculatedVolume(StrEnum): + STOIIP_TOTAL = "STOIIP_TOTAL" + GIIP_TOTAL = "GIIP_TOTAL" + + +class Property(StrEnum): + NTG = "NTG" + PORO = "PORO" + PORO_NET = "PORO_NET" + SW = "SW" + BO = "BO" + BG = "BG" + + +class Statistic(StrEnum): + """ + Definition of possible statistics for a result column in an inplace volumetrics table + """ + + MEAN = "mean" + STD_DEV = "stddev" + MAX = "max" + MIN = "min" + P10 = "p10" + P90 = "p90" + + +@dataclass +class CategorizedResultNames: + """ + Class to hold categorized result names + + Attributes: + - volume_names: List[str] - Basic volume names among result names + - calculated_volume_names: List[str] - Calculated volume names among result names (STOIIP_TOTAL, GIIP_TOTAL) + - property_names: List[str] - Property names among result names + """ + + volume_names: List[str] + calculated_volume_names: List[str] + property_names: List[str] + + +@dataclass +class InplaceVolumetricsIdentifierWithValues: + """ + Unique values for an identifier column in an inplace volumetrics table + + NOTE: Ideally all values should be strings, but it is possible that some values are integers - especially for REGION + """ + + identifier: InplaceVolumetricsIdentifier + values: List[Union[str, int]] # List of values: str or int + + +@dataclass +class InplaceVolumetricsTableDefinition: + """Definition of a volumetric table""" + + table_name: str + identifiers_with_values: List[InplaceVolumetricsIdentifierWithValues] + result_names: List[str] + fluid_zones: List[FluidZone] + + +@dataclass +class RepeatedTableColumnData: + """Definition of a column with repeated column data""" + + column_name: str + unique_values: List[str | int] # ["Valysar", "Therys", "Volon"] + indices: List[int] # [0, 1, 1, 1, 2, 2, 2]. Length = number of rows in the table + + +@dataclass +class TableColumnData: + column_name: str + values: List[float] # Column values Length = number of rows in the table + + +@dataclass +class TableColumnStatisticalData: + column_name: str + statistic_values: Dict[Statistic, List[float]] # Statistics values Length = number of rows in the table + + +@dataclass +class InplaceVolumetricTableData: + """Volumetric data for a single table + + Contains data for a single fluid zone, e.g. Oil, Gas, Water, or sum of fluid zones + """ + + # fluid_zones: List[FluidZone] # Oil, Gas, Water or "Oil + Gas", etc. + fluid_selection_name: str # Oil, Gas, Water or "Oil + Gas", etc. + selector_columns: List[RepeatedTableColumnData] # Index columns and realizations + result_columns: List[TableColumnData] + + +@dataclass +class InplaceStatisticalVolumetricTableData: + """ + Statistical volumetric data for single volume table + + Contains data for a single fluid zone, e.g. Oil, Gas, Water, or sum of fluid zones + """ + + fluid_selection_name: str # Oil, Gas, Water or "Oil + Gas", etc. + selector_columns: List[RepeatedTableColumnData] # Index columns and realizations + result_column_statistics: List[TableColumnStatisticalData] + + +@dataclass +class InplaceVolumetricTableDataPerFluidSelection: + """ + Volumetric data for a single table per fluid selection + + Fluid selection can be single fluid zones, e.g. Oil, Gas, Water, or sum of fluid zones - Oil + Gas + Water + """ + + table_data_per_fluid_selection: List[InplaceVolumetricTableData] + + +@dataclass +class InplaceStatisticalVolumetricTableDataPerFluidSelection: + """ + Statistical volumetric data for a single table per fluid selection + + Fluid selection can be single fluid zones, e.g. Oil, Gas, Water, or sum of fluid zones - Oil + Gas + Water + """ + + table_data_per_fluid_selection: List[InplaceStatisticalVolumetricTableData] diff --git a/backend_py/primary/pyproject.toml b/backend_py/primary/pyproject.toml index f28c866de..240326c1e 100644 --- a/backend_py/primary/pyproject.toml +++ b/backend_py/primary/pyproject.toml @@ -28,6 +28,7 @@ pottery = "^3.0.0" xtgeo = "^3.8.0" core_utils = {path = "../libs/core_utils", develop = true} server_schemas = {path = "../libs/server_schemas", develop = true} +polars = "^1.6.0" [tool.poetry.group.dev.dependencies] black = "^22.12.0" diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/test_create_row_filtered_volumetric_df.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/test_create_row_filtered_volumetric_df.py new file mode 100644 index 000000000..6404f95a5 --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/test_create_row_filtered_volumetric_df.py @@ -0,0 +1,137 @@ +import re +from typing import List +import pytest +import polars as pl + +from primary.services.inplace_volumetrics_assembler.inplace_volumetrics_assembler import InplaceVolumetricsAssembler +from primary.services.sumo_access.inplace_volumetrics_types import ( + InplaceVolumetricsIdentifier, + InplaceVolumetricsIdentifierWithValues, +) +from primary.services.sumo_access.inplace_volumetrics_access import IGNORED_IDENTIFIER_COLUMN_VALUES +from primary.services.service_exceptions import InvalidParameterError + + +@pytest.fixture +def inplace_volumetrics_df() -> pl.DataFrame: + return pl.DataFrame({"REAL": [1, 2, 3], "ZONE": ["A", "B", "C"], "VOLUME": [10, 20, 30]}) + + +def test_create_row_filtered_volumetric_df_no_realizations(inplace_volumetrics_df: pl.DataFrame) -> None: + empty_realizations_list: List[int] = [] + with pytest.raises(InvalidParameterError, match="Realizations must be a non-empty list or None"): + InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name="test_table", inplace_volumetrics_df=inplace_volumetrics_df, realizations=empty_realizations_list + ) + + # assert result_df is None + + +def test_create_row_filtered_volumetric_df_no_data_found(inplace_volumetrics_df: pl.DataFrame) -> None: + with pytest.raises( + ValueError, + match=re.escape("Missing data error: The following realization values do not exist in 'REAL' column: [4, 5]"), + ): + InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name="test_table", inplace_volumetrics_df=inplace_volumetrics_df, realizations=[4, 5] + ) + + +def test_create_row_filtered_volumetric_df_with_realizations(inplace_volumetrics_df: pl.DataFrame) -> None: + valid_realizations = [1, 2] + result_df = InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name="test_table", inplace_volumetrics_df=inplace_volumetrics_df, realizations=valid_realizations + ) + + expected_df = pl.DataFrame({"REAL": [1, 2], "ZONE": ["A", "B"], "VOLUME": [10, 20]}) + + assert result_df is not None + assert result_df.sort("REAL").equals(expected_df) + + +def test_create_row_filtered_volumetric_df_with_identifiers(inplace_volumetrics_df: pl.DataFrame) -> None: + identifiers_with_values = [ + InplaceVolumetricsIdentifierWithValues(identifier=InplaceVolumetricsIdentifier("ZONE"), values=["A", "C"]) + ] + result_df = InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name="test_table", + inplace_volumetrics_df=inplace_volumetrics_df, + realizations=None, + identifiers_with_values=identifiers_with_values, + ) + + expected_df = pl.DataFrame({"REAL": [1, 3], "ZONE": ["A", "C"], "VOLUME": [10, 30]}) + + assert result_df is not None + assert result_df.sort("REAL").equals(expected_df) + + +def test_create_row_filtered_volumetric_df_missing_identifier_column(inplace_volumetrics_df: pl.DataFrame) -> None: + identifiers_with_values = [ + InplaceVolumetricsIdentifierWithValues(identifier=InplaceVolumetricsIdentifier("REGION"), values=["X", "Y"]) + ] + with pytest.raises(ValueError, match="Identifier column name REGION not found in table test_table"): + InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name="test_table", + inplace_volumetrics_df=inplace_volumetrics_df, + realizations=None, + identifiers_with_values=identifiers_with_values, + ) + + +def test_create_row_filtered_volumetric_df_with_ignored_identifier_values() -> None: + # IGNORED_IDENTIFIER_COLUMN_VALUES = ["Totals"] + ignored_value = IGNORED_IDENTIFIER_COLUMN_VALUES[0] + + inplace_volumetrics_df = pl.DataFrame( + {"REAL": [1, 2, 3], "ZONE": ["A", "B", ignored_value], "VOLUME": [10, 20, 30]} + ) + + identifiers_with_values = [ + InplaceVolumetricsIdentifierWithValues( + identifier=InplaceVolumetricsIdentifier("ZONE"), values=["A", "B", ignored_value] + ) + ] + + result_df = InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name="test_table", + inplace_volumetrics_df=inplace_volumetrics_df, + realizations=None, + identifiers_with_values=identifiers_with_values, + ) + + expected_df = pl.DataFrame({"REAL": [1, 2], "ZONE": ["A", "B"], "VOLUME": [10, 20]}) + + assert result_df is not None + assert result_df.sort("REAL").equals(expected_df) + + +def test_create_row_filtered_volumetric_df_with_realizations_and_identifiers() -> None: + inplace_volumetrics_df = pl.DataFrame( + { + "REAL": [1, 2, 3, 4], + "ZONE": ["A", "B", "C", "D"], + "REGION": ["X", "Y", "Z", "W"], + "VOLUME": [10, 20, 30, 40], + } + ) + + wanted_realizations = [1, 2, 3] + identifiers_with_values = [ + InplaceVolumetricsIdentifierWithValues(identifier=InplaceVolumetricsIdentifier("ZONE"), values=["A", "C", "D"]), + InplaceVolumetricsIdentifierWithValues( + identifier=InplaceVolumetricsIdentifier("REGION"), values=["X", "Y", "Z"] + ), + ] + + expected_df = pl.DataFrame({"REAL": [1, 3], "ZONE": ["A", "C"], "REGION": ["X", "Z"], "VOLUME": [10, 30]}) + + result_df = InplaceVolumetricsAssembler._create_row_filtered_volumetric_df( + table_name="test_table", + inplace_volumetrics_df=inplace_volumetrics_df, + realizations=wanted_realizations, + identifiers_with_values=identifiers_with_values, + ) + + assert result_df is not None + assert result_df.sort("REAL").equals(expected_df) diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_convert_statistical_df_to_statistical_result_table_data.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_convert_statistical_df_to_statistical_result_table_data.py new file mode 100644 index 000000000..ca156550e --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_convert_statistical_df_to_statistical_result_table_data.py @@ -0,0 +1,86 @@ +import pytest +import polars as pl + +from primary.services.inplace_volumetrics_assembler._utils import ( + _convert_statistical_df_to_statistical_result_table_data, +) +from primary.services.sumo_access.inplace_volumetrics_types import Statistic + + +def test_convert_statistical_df_to_statistical_result_table_data() -> None: + # Create a sample statistical DataFrame + data = { + "ZONE": ["A", "B", "A", "B"], + "REGION": ["X", "X", "Y", "Y"], + "result1_mean": [10.0, 20.0, 30.0, 40.0], + "result1_stddev": [1.0, 2.0, 3.0, 4.0], + "result1_min": [5.0, 15.0, 25.0, 35.0], + "result1_max": [15.0, 25.0, 35.0, 45.0], + "result1_p10": [8.0, 18.0, 28.0, 38.0], + "result1_p90": [12.0, 22.0, 32.0, 42.0], + } + statistical_df = pl.DataFrame(data) + + valid_result_names = ["result1"] + requested_statistics = [ + Statistic.MEAN, + Statistic.STD_DEV, + Statistic.MIN, + Statistic.MAX, + Statistic.P10, + Statistic.P90, + ] + + selector_column_data_list, results_statistical_data_list = _convert_statistical_df_to_statistical_result_table_data( + statistical_df, valid_result_names, requested_statistics + ) + + # Assertions (not control of order of the unique values, thus output has to be sorted) + assert len(selector_column_data_list) == 2 + assert selector_column_data_list[0].column_name == "ZONE" + assert sorted(selector_column_data_list[0].unique_values) == ["A", "B"] + assert sorted(selector_column_data_list[0].indices) == [0, 0, 1, 1] + + assert selector_column_data_list[1].column_name == "REGION" + assert sorted(selector_column_data_list[1].unique_values) == ["X", "Y"] + assert sorted(selector_column_data_list[1].indices) == [0, 0, 1, 1] + + assert len(results_statistical_data_list) == 1 + result_statistical_data = results_statistical_data_list[0] + assert result_statistical_data.column_name == "result1" + assert result_statistical_data.statistic_values[Statistic.MEAN] == [10.0, 20.0, 30.0, 40.0] + assert result_statistical_data.statistic_values[Statistic.STD_DEV] == [1.0, 2.0, 3.0, 4.0] + assert result_statistical_data.statistic_values[Statistic.MIN] == [5.0, 15.0, 25.0, 35.0] + assert result_statistical_data.statistic_values[Statistic.MAX] == [15.0, 25.0, 35.0, 45.0] + assert result_statistical_data.statistic_values[Statistic.P10] == [8.0, 18.0, 28.0, 38.0] + assert result_statistical_data.statistic_values[Statistic.P90] == [12.0, 22.0, 32.0, 42.0] + + +def test_convert_statistical_df_to_statistical_result_table_data_missing_column() -> None: + # Create a sample statistical DataFrame with a missing column + data = { + "selector1": ["A", "B", "A", "B"], + "selector2": [1, 2, 1, 2], + "result1_mean": [10.0, 20.0, 30.0, 40.0], + "result1_stddev": [1.0, 2.0, 3.0, 4.0], + "result1_min": [5.0, 15.0, 25.0, 35.0], + "result1_max": [15.0, 25.0, 35.0, 45.0], + "result1_p10": [8.0, 18.0, 28.0, 38.0], + # Missing result1_p90 column + } + statistical_df = pl.DataFrame(data) + + valid_result_names = ["result1"] + requested_statistics = [ + Statistic.MEAN, + Statistic.STD_DEV, + Statistic.MIN, + Statistic.MAX, + Statistic.P10, + Statistic.P90, + ] + + with pytest.raises(ValueError, match="Column result1_p90 not found in statistical table"): + _convert_statistical_df_to_statistical_result_table_data( + statistical_df, valid_result_names, requested_statistics + ) diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_calculated_volume_column_expressions.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_calculated_volume_column_expressions.py new file mode 100644 index 000000000..e9df5bfc1 --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_calculated_volume_column_expressions.py @@ -0,0 +1,70 @@ +import polars as pl +from primary.services.inplace_volumetrics_assembler._utils import ( + create_calculated_volume_column_expressions, + _create_named_expression_with_nan_for_inf, + FluidZone, +) + + +def test_create_calculated_volume_column_expressions_no_fluid_zone() -> None: + volume_df_columns = ["STOIIP", "ASSOCIATEDOIL", "GIIP", "ASSOCIATEDGAS"] + calculated_volumes = ["STOIIP_TOTAL", "GIIP_TOTAL"] + expressions = create_calculated_volume_column_expressions(volume_df_columns, calculated_volumes) + + first_expected_expression = _create_named_expression_with_nan_for_inf( + pl.col("STOIIP") + pl.col("ASSOCIATEDOIL"), "STOIIP_TOTAL" + ) + second_expected_expression = _create_named_expression_with_nan_for_inf( + pl.col("GIIP") + pl.col("ASSOCIATEDGAS"), "GIIP_TOTAL" + ) + + assert len(expressions) == 2 + assert str(expressions[0]) == str(first_expected_expression) + assert str(expressions[1]) == str(second_expected_expression) + + +def test_create_calculated_volume_column_expressions_oil_zone() -> None: + volume_df_columns = ["STOIIP", "ASSOCIATEDOIL", "GIIP", "ASSOCIATEDGAS"] + calculated_volumes = ["STOIIP_TOTAL", "GIIP_TOTAL"] + expressions = create_calculated_volume_column_expressions(volume_df_columns, calculated_volumes, FluidZone.OIL) + + first_expected_expression = _create_named_expression_with_nan_for_inf(pl.col("STOIIP"), "STOIIP_TOTAL") + second_expected_expression = _create_named_expression_with_nan_for_inf(pl.col("ASSOCIATEDGAS"), "GIIP_TOTAL") + + assert len(expressions) == 2 + assert str(expressions[0]) == str(first_expected_expression) + assert str(expressions[1]) == str(second_expected_expression) + + +def test_create_calculated_volume_column_expressions_gas_zone() -> None: + volume_df_columns = ["STOIIP", "ASSOCIATEDOIL", "GIIP", "ASSOCIATEDGAS"] + calculated_volumes = ["GIIP_TOTAL", "STOIIP_TOTAL"] + expressions = create_calculated_volume_column_expressions(volume_df_columns, calculated_volumes, FluidZone.GAS) + + first_expected_expression = _create_named_expression_with_nan_for_inf(pl.col("ASSOCIATEDOIL"), "STOIIP_TOTAL") + second_expected_expression = _create_named_expression_with_nan_for_inf(pl.col("GIIP"), "GIIP_TOTAL") + + assert len(expressions) == 2 + assert str(expressions[0]) == str(first_expected_expression) + assert str(expressions[1]) == str(second_expected_expression) + + +def test_create_calculated_volume_column_expressions_missing_columns() -> None: + volume_df_columns = ["STOIIP"] + calculated_volumes = ["STOIIP_TOTAL", "GIIP_TOTAL"] + expressions = create_calculated_volume_column_expressions(volume_df_columns, calculated_volumes) + + assert len(expressions) == 0 + + +def test_create_calculated_volume_column_expressions_partial_columns() -> None: + volume_df_columns = ["STOIIP", "ASSOCIATEDOIL", "GIIP"] # Missing ASSOCIATEDGAS when fluid_zone is None + calculated_volumes = ["STOIIP_TOTAL", "GIIP_TOTAL"] + expressions = create_calculated_volume_column_expressions(volume_df_columns, calculated_volumes) + + expected_expression = _create_named_expression_with_nan_for_inf( + pl.col("STOIIP") + pl.col("ASSOCIATEDOIL"), "STOIIP_TOTAL" + ) + + assert len(expressions) == 1 + assert str(expressions[0]) == str(expected_expression) diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_grouped_statistical_result_table_data_polars.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_grouped_statistical_result_table_data_polars.py new file mode 100644 index 000000000..3b332d7ec --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_grouped_statistical_result_table_data_polars.py @@ -0,0 +1,104 @@ +from typing import List +import pytest +import polars as pl + +from primary.services.inplace_volumetrics_assembler._utils import ( + create_grouped_statistical_result_table_data_polars, +) +from primary.services.sumo_access.inplace_volumetrics_types import ( + Statistic, + InplaceVolumetricsIdentifier, +) +from primary.services.service_exceptions import InvalidParameterError + + +def test_create_grouped_statistical_result_table_data_polars() -> None: + # Create a sample result DataFrame + data = { + "ZONE": ["A", "B", "A", "B"], + "REGION": ["X", "X", "Y", "Y"], + "REAL": [1, 2, 1, 2], + "result1": [10.0, 20.0, 30.0, 40.0], + } + result_df = pl.DataFrame(data) + + # Group by zone, i.e. get two unique selector values ["A", "B"] + group_by_identifiers = [InplaceVolumetricsIdentifier.ZONE] + + selector_column_data_list, results_statistical_data_list = create_grouped_statistical_result_table_data_polars( + result_df, group_by_identifiers + ) + + # Assertions (not control of order of the unique values, thus output has to be sorted) + assert len(selector_column_data_list) == 1 + assert selector_column_data_list[0].column_name == "ZONE" + assert sorted(selector_column_data_list[0].unique_values) == ["A", "B"] + assert sorted(selector_column_data_list[0].indices) == [0, 1] + + # Extract order of rows in the result data + first_index = selector_column_data_list[0].indices[0] + is_a_zone_first = selector_column_data_list[0].unique_values[first_index] == "A" + + # Expected statistical values + mean_values = [20.0, 30.0] if is_a_zone_first else [30.0, 20.0] + std_dev_values = [14.142135623730951, 14.142135623730951] + min_values = [10.0, 20.0] if is_a_zone_first else [20.0, 10.0] + max_values = [30.0, 40.0] if is_a_zone_first else [40.0, 30.0] + p10_values = [28.0, 38.0] if is_a_zone_first else [38.0, 28.0] + p90_values = [12.0, 22.0] if is_a_zone_first else [22.0, 12.0] + + assert len(results_statistical_data_list) == 1 + result_statistical_data = results_statistical_data_list[0] + assert result_statistical_data.column_name == "result1" + assert result_statistical_data.statistic_values[Statistic.MEAN] == mean_values + assert result_statistical_data.statistic_values[Statistic.STD_DEV] == std_dev_values + assert result_statistical_data.statistic_values[Statistic.MIN] == min_values + assert result_statistical_data.statistic_values[Statistic.MAX] == max_values + assert result_statistical_data.statistic_values[Statistic.P10] == p10_values + assert result_statistical_data.statistic_values[Statistic.P90] == p90_values + + +def test_create_grouped_statistical_result_table_data_polars_no_grouping() -> None: + # Create a sample result DataFrame + data = { + "ZONE": ["A", "A", "B", "B"], + "REGION": ["X", "X", "Y", "Y"], + "REAL": [1, 2, 1, 2], + "result1": [10.0, 20.0, 30.0, 40.0], + } + result_df = pl.DataFrame(data) + + group_by_identifiers = None + + selector_column_data_list, results_statistical_data_list = create_grouped_statistical_result_table_data_polars( + result_df, group_by_identifiers + ) + + # Assertions + assert len(selector_column_data_list) == 0 + + assert len(results_statistical_data_list) == 1 + result_statistical_data = results_statistical_data_list[0] + assert result_statistical_data.column_name == "result1" + assert result_statistical_data.statistic_values[Statistic.MEAN] == [25.0] + assert result_statistical_data.statistic_values[Statistic.STD_DEV] == [12.909944487358056] + assert result_statistical_data.statistic_values[Statistic.MIN] == [10.0] + assert result_statistical_data.statistic_values[Statistic.MAX] == [40.0] + assert result_statistical_data.statistic_values[Statistic.P10] == [37.0] + assert result_statistical_data.statistic_values[Statistic.P90] == [13.0] + + +def test_create_grouped_statistical_result_table_data_polars_empty_grouping_list() -> None: + # Create a sample result DataFrame + data = { + "ZONE": ["A", "A", "B", "B"], + "REGION": ["X", "X", "Y", "Y"], + "REAL": [1, 2, 1, 2], + "result1": [10.0, 20.0, 30.0, 40.0], + } + result_df = pl.DataFrame(data) + + empty_group_by_identifiers_list: List[InplaceVolumetricsIdentifier] = [] + + with pytest.raises(InvalidParameterError, match="Group by identifiers must be a non-empty list or None"): + create_grouped_statistical_result_table_data_polars(result_df, empty_group_by_identifiers_list) diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_inplace_volumetric_table_data_from_result_df.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_inplace_volumetric_table_data_from_result_df.py new file mode 100644 index 000000000..bc781897f --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_inplace_volumetric_table_data_from_result_df.py @@ -0,0 +1,62 @@ +import polars as pl + +from primary.services.inplace_volumetrics_assembler._utils import create_inplace_volumetric_table_data_from_result_df + + +def test_create_inplace_volumetric_table_data_from_result_df() -> None: + # Test case 1: Basic functionality + result_df = pl.DataFrame( + { + "REAL": [1, 2, 3], + "ZONE": ["A", "B", "C"], + "REGION": ["X", "Y", "Z"], + "FACIES": ["F1", "F2", "F3"], + "STOIIP": [100, 200, 300], + "GIIP": [400, 500, 600], + } + ) + selection_name = "test_selection" + + result = create_inplace_volumetric_table_data_from_result_df(result_df, selection_name) + + assert result.fluid_selection_name == selection_name + assert len(result.selector_columns) == 4 + assert len(result.result_columns) == 2 + + +def test_create_inplace_volumetric_table_data_from_result_df_no_selector_columns() -> None: + # Test case 2: No selector columns + result_df = pl.DataFrame({"STOIIP": [100, 200, 300], "GIIP": [400, 500, 600]}) + selection_name = "test_selection_no_selector" + + result = create_inplace_volumetric_table_data_from_result_df(result_df, selection_name) + + assert result.fluid_selection_name == selection_name + assert len(result.selector_columns) == 0 + assert len(result.result_columns) == 2 + + +def test_create_inplace_volumetric_table_data_from_result_df_no_result_columns() -> None: + # Test case 3: No result columns + result_df = pl.DataFrame( + {"REAL": [1, 2, 3], "ZONE": ["A", "B", "C"], "REGION": ["X", "Y", "Z"], "FACIES": ["F1", "F2", "F3"]} + ) + selection_name = "test_selection_no_result" + + result = create_inplace_volumetric_table_data_from_result_df(result_df, selection_name) + + assert result.fluid_selection_name == selection_name + assert len(result.selector_columns) == 4 + assert len(result.result_columns) == 0 + + +def test_create_inplace_volumetric_table_data_from_result_df_empty_df() -> None: + # Test case 4: Empty DataFrame + result_df = pl.DataFrame() + selection_name = "test_selection_empty" + + result = create_inplace_volumetric_table_data_from_result_df(result_df, selection_name) + + assert result.fluid_selection_name == selection_name + assert len(result.selector_columns) == 0 + assert len(result.result_columns) == 0 diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_per_group_summed_realization_volume_df.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_per_group_summed_realization_volume_df.py new file mode 100644 index 000000000..f3814e7fe --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_per_group_summed_realization_volume_df.py @@ -0,0 +1,98 @@ +from typing import List +import pytest +import polars as pl + +from primary.services.inplace_volumetrics_assembler._utils import create_per_group_summed_realization_volume_df +from primary.services.sumo_access.inplace_volumetrics_types import InplaceVolumetricsIdentifier + + +def test_create_per_group_summed_realization_volume_df() -> None: + # Create a sample DataFrame + volume_df = pl.DataFrame( + { + "REAL": [1, 1, 2, 2], + "ZONE": ["A", "A", "B", "B"], + "REGION": ["X", "X", "Y", "Y"], + "VOLUME1": [10, 20, 30, 40], + "VOLUME2": [100, 200, 300, 400], + } + ) + + # Define group by identifiers + group_by_identifiers = [ + InplaceVolumetricsIdentifier.ZONE, + InplaceVolumetricsIdentifier.REGION, + ] + + # Call the function + result_df = create_per_group_summed_realization_volume_df(volume_df, group_by_identifiers) + + # Expected result + expected_df = pl.DataFrame( + { + "REAL": [1, 2], + "ZONE": ["A", "B"], + "REGION": ["X", "Y"], + "VOLUME1": [30, 70], + "VOLUME2": [300, 700], + } + ) + + # Sort result_df and reorder columns + result_df = result_df.sort("REAL", "ZONE", "REGION").select(["REAL", "ZONE", "REGION", "VOLUME1", "VOLUME2"]) + + # Assert the result + assert result_df.equals(expected_df) + + +def test_create_per_group_summed_realization_volume_df_no_group_by_identifiers() -> None: + # Create a sample DataFrame + volume_df = pl.DataFrame( + { + "REAL": [1, 1, 2, 2], + "ZONE": ["A", "A", "B", "B"], + "REGION": ["X", "X", "Y", "Y"], + "VOLUME1": [10, 20, 30, 40], + "VOLUME2": [100, 200, 300, 400], + } + ) + + # Define empty group by identifiers + group_by_identifiers: List[InplaceVolumetricsIdentifier] = [] + + # Call the function + result_df = create_per_group_summed_realization_volume_df(volume_df, group_by_identifiers).sort("REAL") + + # Expected result + expected_df = pl.DataFrame( + { + "REAL": [1, 2], + "VOLUME1": [30, 70], + "VOLUME2": [300, 700], + } + ) + + # Assert the result + assert result_df.equals(expected_df) + + +def test_create_per_group_summed_realization_volume_df_missing_real_column() -> None: + # Create a sample DataFrame without the "REAL" column + volume_df = pl.DataFrame( + { + "ZONE": ["A", "A", "B", "B"], + "REGION": ["X", "X", "Y", "Y"], + "VOLUME1": [10, 20, 30, 40], + "VOLUME2": [100, 200, 300, 400], + } + ) + + # Define group by identifiers + group_by_identifiers = [ + InplaceVolumetricsIdentifier.ZONE, + InplaceVolumetricsIdentifier.REGION, + ] + + # Call the function and expect a ValueError + with pytest.raises(ValueError, match="REAL column not found in volume DataFrame"): + create_per_group_summed_realization_volume_df(volume_df, group_by_identifiers) diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_property_column_expressions.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_property_column_expressions.py new file mode 100644 index 000000000..d0890ea88 --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_property_column_expressions.py @@ -0,0 +1,82 @@ +from typing import List +import polars as pl + +from primary.services.inplace_volumetrics_assembler._utils import create_property_column_expressions +from primary.services.inplace_volumetrics_assembler._utils import _create_named_expression_with_nan_for_inf +from primary.services.inplace_volumetrics_assembler._utils import FluidZone + + +def test_create_property_column_expressions_bo() -> None: + volume_df_columns = ["HCPV", "STOIIP"] + properties = ["BO"] + fluid_zone = FluidZone.OIL + + created_expressions = create_property_column_expressions(volume_df_columns, properties, fluid_zone) + expected_expression = _create_named_expression_with_nan_for_inf(pl.col("HCPV") / pl.col("STOIIP"), "BO") + + assert len(created_expressions) == 1 + assert str(created_expressions[0]) == str(expected_expression) + + +def test_create_property_column_expressions_bg() -> None: + volume_df_columns = ["HCPV", "GIIP"] + properties = ["BG"] + fluid_zone = FluidZone.GAS + + created_expressions = create_property_column_expressions(volume_df_columns, properties, fluid_zone) + expected_expression = _create_named_expression_with_nan_for_inf(pl.col("HCPV") / pl.col("GIIP"), "BG") + + assert len(created_expressions) == 1 + assert str(created_expressions[0]) == str(expected_expression) + + +def test_create_property_column_expressions_ntg() -> None: + volume_df_columns = ["BULK", "NET"] + properties = ["NTG"] + + created_expressions = create_property_column_expressions(volume_df_columns, properties) + expected_expression = _create_named_expression_with_nan_for_inf(pl.col("NET") / pl.col("BULK"), "NTG") + + assert len(created_expressions) == 1 + assert str(created_expressions[0]) == str(expected_expression) + + +def test_create_property_column_expressions_poro_and_poro_net() -> None: + volume_df_columns = ["BULK", "PORV", "NET"] + properties = ["PORO", "PORO_NET"] + + created_expressions = create_property_column_expressions(volume_df_columns, properties) + expected_poro_expression = _create_named_expression_with_nan_for_inf(pl.col("PORV") / pl.col("BULK"), "PORO") + expected_poro_net_expression = _create_named_expression_with_nan_for_inf(pl.col("PORV") / pl.col("NET"), "PORO_NET") + + assert len(created_expressions) == 2 + assert str(created_expressions[0]) == str(expected_poro_expression) + assert str(created_expressions[1]) == str(expected_poro_net_expression) + + +def test_create_property_column_expressions_missing_columns() -> None: + volume_df_columns = ["HCPV", "PORV"] # Missing STOIIP for BO + properties = ["BO", "SW"] + fluid_zone = FluidZone.OIL + + created_expressions = create_property_column_expressions(volume_df_columns, properties, fluid_zone) + expected_expression = _create_named_expression_with_nan_for_inf(1 - pl.col("HCPV") / pl.col("PORV"), "SW") + assert len(created_expressions) == 1 + assert str(created_expressions[0]) == str(expected_expression) + + +def test_create_property_column_expressions_no_properties() -> None: + volume_df_columns = ["HCPV", "STOIIP"] + properties: List[str] = [] + fluid_zone = FluidZone.OIL + + expressions = create_property_column_expressions(volume_df_columns, properties, fluid_zone) + assert len(expressions) == 0 + + +def test_create_property_column_expressions_no_fluid_zone() -> None: + volume_df_columns = ["HCPV", "STOIIP"] + properties = ["BO"] + + expressions = create_property_column_expressions(volume_df_columns, properties) + assert len(expressions) == 0 diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_df_per_fluid_zone.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_df_per_fluid_zone.py new file mode 100644 index 000000000..b7282cf02 --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_df_per_fluid_zone.py @@ -0,0 +1,52 @@ +import pytest +import polars as pl + +from primary.services.inplace_volumetrics_assembler._utils import create_volumetric_df_per_fluid_zone +from primary.services.sumo_access.inplace_volumetrics_types import FluidZone + + +@pytest.fixture +def volumetric_df() -> pl.DataFrame: + data = { + "REAL": [1, 2, 3], + "ZONE": ["A", "B", "C"], + "REGION": ["X", "Y", "Z"], + "FACIES": ["F1", "F2", "F3"], + "STOIIP_OIL": [100, 200, 300], + "GIIP_GAS": [400, 500, 600], + "HCPV_OIL": [700, 800, 900], + "HCPV_GAS": [1000, 1100, 1200], + "HCPV_WATER": [1300, 1400, 1500], + } + return pl.DataFrame(data) + + +def test_create_volumetric_df_per_fluid_zone(volumetric_df: pl.DataFrame) -> None: + fluid_zones = [FluidZone.OIL, FluidZone.GAS] + result = create_volumetric_df_per_fluid_zone(fluid_zones, volumetric_df) + + assert FluidZone.OIL in result + assert FluidZone.GAS in result + + oil_df = result[FluidZone.OIL] + gas_df = result[FluidZone.GAS] + + assert oil_df.columns == ["REAL", "ZONE", "REGION", "FACIES", "STOIIP", "HCPV"] + assert gas_df.columns == ["REAL", "ZONE", "REGION", "FACIES", "GIIP", "HCPV"] + + assert oil_df.shape == (3, 6) + assert gas_df.shape == (3, 6) + + assert oil_df["STOIIP"].to_list() == [100, 200, 300] + assert oil_df["HCPV"].to_list() == [700, 800, 900] + + assert gas_df["GIIP"].to_list() == [400, 500, 600] + assert gas_df["HCPV"].to_list() == [1000, 1100, 1200] + + +def test_create_volumetric_df_per_fluid_zone_no_fluid_columns(volumetric_df: pl.DataFrame) -> None: + fluid_zones = [FluidZone.OIL, FluidZone.GAS] + volumetric_df = volumetric_df.select(["REAL", "ZONE", "REGION", "FACIES"]) # Removing fluid columns + result = create_volumetric_df_per_fluid_zone(fluid_zones, volumetric_df) + + assert not result diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_summed_fluid_zones_df.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_summed_fluid_zones_df.py new file mode 100644 index 000000000..1d15cf21d --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_create_volumetric_summed_fluid_zones_df.py @@ -0,0 +1,57 @@ +import pytest +import polars as pl + +from primary.services.inplace_volumetrics_assembler._utils import create_volumetric_summed_fluid_zones_df +from primary.services.sumo_access.inplace_volumetrics_types import FluidZone + + +@pytest.fixture +def volumetric_df() -> pl.DataFrame: + data = { + "REAL": [1, 2, 3], + "ZONE": ["A", "B", "C"], + "REGION": ["X", "Y", "Z"], + "FACIES": ["F1", "F2", "F3"], + "STOIIP_OIL": [100, 200, 300], + "GIIP_GAS": [400, 500, 600], + "HCPV_OIL": [700, 800, 900], + "HCPV_GAS": [1000, 1100, 1200], + "HCPV_WATER": [1300, 1400, 1500], + } + return pl.DataFrame(data) + + +def test_create_volumetric_summed_fluid_zones_df(volumetric_df: pl.DataFrame) -> None: + + fluid_zones = [FluidZone.OIL, FluidZone.GAS] + result = create_volumetric_summed_fluid_zones_df(volumetric_df, fluid_zones) + + assert sorted(result.columns) == sorted(["REAL", "ZONE", "REGION", "FACIES", "STOIIP", "GIIP", "HCPV"]) + assert result.shape == (3, 7) + + assert result["STOIIP"].to_list() == [100, 200, 300] + assert result["GIIP"].to_list() == [400, 500, 600] + assert result["HCPV"].to_list() == [1700, 1900, 2100] # Should exclude HCPV_WATER + + +def test_create_volumetric_summed_fluid_zones_df_no_fluid_columns(volumetric_df: pl.DataFrame) -> None: + fluid_zones = [FluidZone.OIL, FluidZone.GAS] + volumetric_df = volumetric_df.select(["REAL", "ZONE", "REGION", "FACIES"]) # Removing fluid columns + result = create_volumetric_summed_fluid_zones_df(volumetric_df, fluid_zones) + + assert sorted(result.columns) == sorted(["REAL", "ZONE", "REGION", "FACIES"]) + assert result.shape == (3, 4) + + +def test_create_volumetric_summed_fluid_zones_df_partial_fluid_columns(volumetric_df: pl.DataFrame) -> None: + fluid_zones = [FluidZone.OIL, FluidZone.GAS] + volumetric_df = volumetric_df.select( + ["REAL", "ZONE", "REGION", "FACIES", "STOIIP_OIL", "HCPV_OIL"] + ) # Partial fluid columns + result = create_volumetric_summed_fluid_zones_df(volumetric_df, fluid_zones) + + assert sorted(result.columns) == sorted(["REAL", "ZONE", "REGION", "FACIES", "STOIIP", "HCPV"]) + assert result.shape == (3, 6) + + assert result["STOIIP"].to_list() == [100, 200, 300] + assert result["HCPV"].to_list() == [700, 800, 900] diff --git a/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_inplace_volumetrics_utils.py b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_inplace_volumetrics_utils.py new file mode 100644 index 000000000..941401c8a --- /dev/null +++ b/backend_py/primary/tests/unit/inplace_volumetrics_assembler/utils/test_inplace_volumetrics_utils.py @@ -0,0 +1,317 @@ +from typing import List +import pytest +import polars as pl +import numpy as np + +from primary.services.sumo_access.inplace_volumetrics_types import ( + RepeatedTableColumnData, + Statistic, + TableColumnStatisticalData, +) + +from primary.services.inplace_volumetrics_assembler._utils import ( + _create_named_expression_with_nan_for_inf, + _create_repeated_table_column_data_from_polars_column, + _create_statistical_expression, + _create_statistic_aggregation_expressions, + _get_statistical_function_expression, + get_valid_result_names_from_list, + _validate_length_of_statistics_data_lists, +) + + +def test_get_valid_result_names_from_list() -> None: + """ + Valid result names are found in InplaceVolumetricResultName enum. + """ + + requested_result_names = [ + "STOIIP", + "GIIP", + "FIRST_INVALID_RESULT_NAME", + "NTG", + "BG", + "SW", + "SECOND_INVALID_RESULT_NAME", + ] + + # Valid result names from InplaceVolumetricResultName enum + exepected_valid_result_names = ["STOIIP", "GIIP", "NTG", "BG", "SW"] + valid_result_names = get_valid_result_names_from_list(requested_result_names) + + assert valid_result_names == exepected_valid_result_names + + +def test_get_statistical_function_expression() -> None: + test_col = pl.col("Test Column") + + # Get the statistical functions + mean_func = _get_statistical_function_expression(Statistic.MEAN) + min_func = _get_statistical_function_expression(Statistic.MIN) + max_func = _get_statistical_function_expression(Statistic.MAX) + std_dev_func = _get_statistical_function_expression(Statistic.STD_DEV) + p10_func = _get_statistical_function_expression(Statistic.P10) + p90_func = _get_statistical_function_expression(Statistic.P90) + + # Assert the functions are not None + assert mean_func is not None + assert min_func is not None + assert max_func is not None + assert std_dev_func is not None + assert p10_func is not None + assert p90_func is not None + + # Assert the expressions are correct + assert mean_func(test_col).meta.eq(test_col.mean()) + assert min_func(test_col).meta.eq(test_col.min()) + assert max_func(test_col).meta.eq(test_col.max()) + assert std_dev_func(test_col).meta.eq(test_col.std()) + assert p10_func(test_col).meta.eq(test_col.quantile(0.9, "linear")) + assert p90_func(test_col).meta.eq(test_col.quantile(0.1, "linear")) + + +def test_create_statistical_expression_drop_nans() -> None: + expr_mean = pl.col("Test Column").drop_nans().mean().alias(f"Test Column_{Statistic.MEAN.value}") + expr_min = pl.col("Test Column").drop_nans().min().alias(f"Test Column_{Statistic.MIN.value}") + expr_max = pl.col("Test Column").drop_nans().max().alias(f"Test Column_{Statistic.MAX.value}") + expr_std_dev = pl.col("Test Column").drop_nans().std().alias(f"Test Column_{Statistic.STD_DEV.value}") + expr_p10 = pl.col("Test Column").drop_nans().quantile(0.9, "linear").alias(f"Test Column_{Statistic.P10.value}") + expr_p90 = pl.col("Test Column").drop_nans().quantile(0.1, "linear").alias(f"Test Column_{Statistic.P90.value}") + + assert _create_statistical_expression(Statistic.MEAN, "Test Column").meta.eq(expr_mean) + assert _create_statistical_expression(Statistic.MIN, "Test Column").meta.eq(expr_min) + assert _create_statistical_expression(Statistic.MAX, "Test Column").meta.eq(expr_max) + assert _create_statistical_expression(Statistic.STD_DEV, "Test Column").meta.eq(expr_std_dev) + assert _create_statistical_expression(Statistic.P10, "Test Column").meta.eq(expr_p10) + assert _create_statistical_expression(Statistic.P90, "Test Column").meta.eq(expr_p90) + + +def test_create_statistical_expression_keep_nans() -> None: + expr_mean = pl.col("Test Column").mean().alias(f"Test Column_{Statistic.MEAN.value}") + expr_min = pl.col("Test Column").min().alias(f"Test Column_{Statistic.MIN.value}") + expr_max = pl.col("Test Column").max().alias(f"Test Column_{Statistic.MAX.value}") + expr_std_dev = pl.col("Test Column").std().alias(f"Test Column_{Statistic.STD_DEV.value}") + expr_p10 = pl.col("Test Column").quantile(0.9, "linear").alias(f"Test Column_{Statistic.P10.value}") + expr_p90 = pl.col("Test Column").quantile(0.1, "linear").alias(f"Test Column_{Statistic.P90.value}") + + assert _create_statistical_expression(Statistic.MEAN, "Test Column", False).meta.eq(expr_mean) + assert _create_statistical_expression(Statistic.MIN, "Test Column", False).meta.eq(expr_min) + assert _create_statistical_expression(Statistic.MAX, "Test Column", False).meta.eq(expr_max) + assert _create_statistical_expression(Statistic.STD_DEV, "Test Column", False).meta.eq(expr_std_dev) + assert _create_statistical_expression(Statistic.P10, "Test Column", False).meta.eq(expr_p10) + assert _create_statistical_expression(Statistic.P90, "Test Column", False).meta.eq(expr_p90) + + +def test_create_statistic_aggregation_expressions() -> None: + result_columns = ["column1", "column2"] + statistics = [Statistic.MEAN, Statistic.MIN, Statistic.MAX] + + expressions = _create_statistic_aggregation_expressions(result_columns, statistics) + + assert len(expressions) == len(result_columns) * len(statistics) + assert expressions[0].meta.eq(pl.col("column1").drop_nans().mean().alias(f"column1_{Statistic.MEAN.value}")) + assert expressions[1].meta.eq(pl.col("column1").drop_nans().min().alias(f"column1_{Statistic.MIN.value}")) + assert expressions[2].meta.eq(pl.col("column1").drop_nans().max().alias(f"column1_{Statistic.MAX.value}")) + assert expressions[3].meta.eq(pl.col("column2").drop_nans().mean().alias(f"column2_{Statistic.MEAN.value}")) + assert expressions[4].meta.eq(pl.col("column2").drop_nans().min().alias(f"column2_{Statistic.MIN.value}")) + assert expressions[5].meta.eq(pl.col("column2").drop_nans().max().alias(f"column2_{Statistic.MAX.value}")) + + +def test_create_statistic_aggregation_expressions_with_drop_nans() -> None: + result_columns = ["column1"] + statistics = [Statistic.STD_DEV, Statistic.P10, Statistic.P90] + + expressions = _create_statistic_aggregation_expressions(result_columns, statistics, drop_nans=True) + expected_expressions = [ + pl.col("column1").drop_nans().std().alias(f"column1_{Statistic.STD_DEV.value}"), + pl.col("column1").drop_nans().quantile(0.9, "linear").alias(f"column1_{Statistic.P10.value}"), + pl.col("column1").drop_nans().quantile(0.1, "linear").alias(f"column1_{Statistic.P90.value}"), + ] + + assert len(expressions) == len(expected_expressions) + assert expressions[0].meta.eq(pl.col("column1").drop_nans().std().alias(f"column1_{Statistic.STD_DEV.value}")) + assert expressions[1].meta.eq( + pl.col("column1").drop_nans().quantile(0.9, "linear").alias(f"column1_{Statistic.P10.value}") + ) + assert expressions[2].meta.eq( + pl.col("column1").drop_nans().quantile(0.1, "linear").alias(f"column1_{Statistic.P90.value}") + ) + + +def test_create_statistic_aggregation_expressions_without_drop_nans() -> None: + result_columns = ["column1"] + statistics = [Statistic.STD_DEV, Statistic.P10, Statistic.P90] + + expressions = _create_statistic_aggregation_expressions(result_columns, statistics, drop_nans=False) + + assert len(expressions) == len(result_columns) * len(statistics) + assert expressions[0].meta.eq(pl.col("column1").std().alias(f"column1_{Statistic.STD_DEV.value}")) + assert expressions[1].meta.eq(pl.col("column1").quantile(0.9, "linear").alias(f"column1_{Statistic.P10.value}")) + assert expressions[2].meta.eq(pl.col("column1").quantile(0.1, "linear").alias(f"column1_{Statistic.P90.value}")) + + +def test_create_statistic_aggregation_expressions_empty_columns() -> None: + result_columns: List[str] = [] + statistics = [Statistic.MEAN, Statistic.MIN, Statistic.MAX] + + expressions = _create_statistic_aggregation_expressions(result_columns, statistics) + + assert len(expressions) == 0 + + +def test_create_statistic_aggregation_expressions_empty_statistics() -> None: + result_columns = ["column1", "column2"] + statistics: List[Statistic] = [] + + expressions = _create_statistic_aggregation_expressions(result_columns, statistics) + + assert len(expressions) == 0 + + +def test_validate_length_of_statistics_data_lists_equal_lengths() -> None: + selector_column_data_list = [ + RepeatedTableColumnData(column_name="selector1", unique_values=[1, 2], indices=[0, 1, 0]) + ] + result_statistical_data_list = [ + TableColumnStatisticalData( + column_name="result1", + statistic_values={Statistic.MEAN: [1.0, 2.0, 1.5], Statistic.STD_DEV: [0.1, 0.2, 0.15]}, + ) + ] + # Should not raise any exception + _validate_length_of_statistics_data_lists(selector_column_data_list, result_statistical_data_list) + + +def test_validate_length_of_statistics_data_lists_empty_lists() -> None: + selector_column_data_list: List[RepeatedTableColumnData] = [] + result_statistical_data_list: List[TableColumnStatisticalData] = [] + # Should not raise any exception + _validate_length_of_statistics_data_lists(selector_column_data_list, result_statistical_data_list) + + +def test_validate_length_of_statistics_data_lists_mismatched_lengths_selector_vs_statistic() -> None: + selector_column_data_list = [RepeatedTableColumnData(column_name="selector1", unique_values=[1, 2], indices=[0, 1])] + result_statistical_data_list = [ + TableColumnStatisticalData( + column_name="result1", + statistic_values={Statistic.MEAN: [1.0, 2.0, 1.5], Statistic.STD_DEV: [0.1, 0.2, 0.15]}, + ) + ] + with pytest.raises( + ValueError, match="Number of result1 statistic mean values does not match expected number of rows: 2. Got: 3" + ): + _validate_length_of_statistics_data_lists(selector_column_data_list, result_statistical_data_list) + + +def test_validate_length_of_statistics_data_lists_mismatched_lengths_selector_vs_selector() -> None: + selector_column_data_list = [ + RepeatedTableColumnData(column_name="selector1", unique_values=[1, 2], indices=[0, 1]), + RepeatedTableColumnData(column_name="selector2", unique_values=[1, 2, 3], indices=[0, 1, 2]), + ] + result_statistical_data_list = [ + TableColumnStatisticalData( + column_name="result1", + statistic_values={Statistic.MEAN: [1.0, 2.0, 1.5]}, + ) + ] + with pytest.raises( + ValueError, match="Length of selector2 column data list does not match expected number of rows: 2. Got: 3" + ): + _validate_length_of_statistics_data_lists(selector_column_data_list, result_statistical_data_list) + + +def test_validate_length_of_statistics_data_lists_mismatched_lengths_statistic() -> None: + selector_column_data_list = [ + RepeatedTableColumnData(column_name="selector1", unique_values=[1, 2], indices=[0, 1, 0]) + ] + result_statistical_data_list = [ + TableColumnStatisticalData( + column_name="result1", statistic_values={Statistic.MEAN: [1.0, 2.0], Statistic.STD_DEV: [0.1, 0.2, 0.15]} + ) + ] + with pytest.raises( + ValueError, match="Number of result1 statistic mean values does not match expected number of rows: 3. Got: 2" + ): + _validate_length_of_statistics_data_lists(selector_column_data_list, result_statistical_data_list) + + +def test_create_repeated_table_column_data_from_polars_number_column() -> None: + # Test case 1: Basic functionality + column_name = "test_column" + column_values = [1, 3, 3, 2, 1] + expected_sorted_unique_values = [1, 2, 3] + + result = _create_repeated_table_column_data_from_polars_column(column_name, pl.Series(column_values)) + + # Build the result values + result_values = [result.unique_values[i] for i in result.indices] + + # Note: unique() method might not preserve the order of the unique values, thus we sort the unique values for comparison + # and build the result_values list to compare with the original values + assert result.column_name == column_name + assert sorted(result.unique_values) == expected_sorted_unique_values + assert result_values == column_values + + +def test_create_repeated_table_column_data_from_polars_string_column() -> None: + # Test case 2: String values + column_name = "string_column" + column_values = ["a", "b", "a", "c", "b"] + expected_sorted_unique_values = ["a", "b", "c"] + + result = _create_repeated_table_column_data_from_polars_column(column_name, pl.Series(column_values)) + + # Build the result values + result_values = [result.unique_values[i] for i in result.indices] + + # Note: unique() method might not preserve the order of the unique values, thus we sort the unique values for comparison + # and build the result_values list to compare with the original values + assert result.column_name == column_name + assert sorted(result.unique_values) == expected_sorted_unique_values + assert result_values == column_values + + +def test_create_repeated_table_column_data_from_polars_empty_column() -> None: + # Test case 3: Empty column + column_name = "empty_column" + column_values = pl.Series([]) + expected_unique_values: List[str | int] = [] + expected_indices: List[int] = [] + + result = _create_repeated_table_column_data_from_polars_column(column_name, column_values) + + assert result.column_name == column_name + assert result.unique_values == expected_unique_values + assert result.indices == expected_indices + + +def test_create_repeated_table_column_data_from_polars_single_value_column() -> None: + # Test case 4: Single value column + column_name = "single_value_column" + column_values = pl.Series([42, 42, 42]) + expected_unique_values = [42] + expected_indices = [0, 0, 0] + + result = _create_repeated_table_column_data_from_polars_column(column_name, column_values) + + assert result.column_name == column_name + assert result.unique_values == expected_unique_values + assert result.indices == expected_indices + + +def test_create_named_expression_with_nan_for_inf() -> None: + # Create a Polars DataFrame with some test data + df = pl.DataFrame({"values": [1.0, 2.0, np.inf, -np.inf, 5.0]}) + + # Apply the function to create the expression + expr = _create_named_expression_with_nan_for_inf(pl.col("values"), "values_with_nan") + + # Evaluate the expression + result_df: pl.DataFrame = df.with_columns(expr) + + # Expected result + expected_values = [1.0, 2.0, np.nan, np.nan, 5.0] + + # Assert the results (need np.testing.assert_array_equal for NaN comparison) + values_with_nan_column = result_df.get_column("values_with_nan").to_list() + np.testing.assert_array_equal(values_with_nan_column, expected_values) diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts index 9881aadac..de5ed8f35 100644 --- a/frontend/src/api/index.ts +++ b/frontend/src/api/index.ts @@ -13,7 +13,8 @@ export type { OpenAPIConfig } from './core/OpenAPI'; export { ALQ as ALQ_api } from './models/ALQ'; export { B64FloatArray as B64FloatArray_api } from './models/B64FloatArray'; export { B64UintArray as B64UintArray_api } from './models/B64UintArray'; -export type { Body_get_realizations_response as Body_get_realizations_response_api } from './models/Body_get_realizations_response'; +export type { Body_post_get_aggregated_per_realization_table_data as Body_post_get_aggregated_per_realization_table_data_api } from './models/Body_post_get_aggregated_per_realization_table_data'; +export type { Body_post_get_aggregated_statistical_table_data as Body_post_get_aggregated_statistical_table_data_api } from './models/Body_post_get_aggregated_statistical_table_data'; export type { Body_post_get_polyline_intersection as Body_post_get_polyline_intersection_api } from './models/Body_post_get_polyline_intersection'; export type { Body_post_get_seismic_fence as Body_post_get_seismic_fence_api } from './models/Body_post_get_seismic_fence'; export type { Body_post_get_surface_intersection as Body_post_get_surface_intersection_api } from './models/Body_post_get_surface_intersection'; @@ -33,6 +34,7 @@ export type { EnsembleSensitivityCase as EnsembleSensitivityCase_api } from './m export type { FenceMeshSection as FenceMeshSection_api } from './models/FenceMeshSection'; export type { FieldInfo as FieldInfo_api } from './models/FieldInfo'; export { FlowRateTypeProd as FlowRateTypeProd_api } from './models/FlowRateTypeProd'; +export { FluidZone as FluidZone_api } from './models/FluidZone'; export { Frequency as Frequency_api } from './models/Frequency'; export { GFR as GFR_api } from './models/GFR'; export type { GraphUserPhoto as GraphUserPhoto_api } from './models/GraphUserPhoto'; @@ -46,8 +48,15 @@ export type { GridDimensions as GridDimensions_api } from './models/GridDimensio export type { GroupTreeData as GroupTreeData_api } from './models/GroupTreeData'; export type { GroupTreeMetadata as GroupTreeMetadata_api } from './models/GroupTreeMetadata'; export type { HTTPValidationError as HTTPValidationError_api } from './models/HTTPValidationError'; -export type { InplaceVolumetricsCategoricalMetaData as InplaceVolumetricsCategoricalMetaData_api } from './models/InplaceVolumetricsCategoricalMetaData'; -export type { InplaceVolumetricsTableMetaData as InplaceVolumetricsTableMetaData_api } from './models/InplaceVolumetricsTableMetaData'; +export type { InplaceStatisticalVolumetricTableData as InplaceStatisticalVolumetricTableData_api } from './models/InplaceStatisticalVolumetricTableData'; +export type { InplaceStatisticalVolumetricTableDataPerFluidSelection as InplaceStatisticalVolumetricTableDataPerFluidSelection_api } from './models/InplaceStatisticalVolumetricTableDataPerFluidSelection'; +export { InplaceVolumetricResultName as InplaceVolumetricResultName_api } from './models/InplaceVolumetricResultName'; +export { InplaceVolumetricsIdentifier as InplaceVolumetricsIdentifier_api } from './models/InplaceVolumetricsIdentifier'; +export type { InplaceVolumetricsIdentifierWithValues as InplaceVolumetricsIdentifierWithValues_api } from './models/InplaceVolumetricsIdentifierWithValues'; +export type { InplaceVolumetricsTableDefinition as InplaceVolumetricsTableDefinition_api } from './models/InplaceVolumetricsTableDefinition'; +export { InplaceVolumetricStatistic as InplaceVolumetricStatistic_api } from './models/InplaceVolumetricStatistic'; +export type { InplaceVolumetricTableData as InplaceVolumetricTableData_api } from './models/InplaceVolumetricTableData'; +export type { InplaceVolumetricTableDataPerFluidSelection as InplaceVolumetricTableDataPerFluidSelection_api } from './models/InplaceVolumetricTableDataPerFluidSelection'; export { NodeType as NodeType_api } from './models/NodeType'; export type { Observations as Observations_api } from './models/Observations'; export type { PointSetXY as PointSetXY_api } from './models/PointSetXY'; @@ -56,6 +65,7 @@ export { PolygonsAttributeType as PolygonsAttributeType_api } from './models/Pol export type { PolygonsMeta as PolygonsMeta_api } from './models/PolygonsMeta'; export type { PolylineIntersection as PolylineIntersection_api } from './models/PolylineIntersection'; export type { PvtData as PvtData_api } from './models/PvtData'; +export type { RepeatedTableColumnData as RepeatedTableColumnData_api } from './models/RepeatedTableColumnData'; export type { RftInfo as RftInfo_api } from './models/RftInfo'; export type { RftObservation as RftObservation_api } from './models/RftObservation'; export type { RftObservations as RftObservations_api } from './models/RftObservations'; @@ -80,6 +90,8 @@ export type { SurfaceMetaSet as SurfaceMetaSet_api } from './models/SurfaceMetaS export type { SurfaceRealizationSampleValues as SurfaceRealizationSampleValues_api } from './models/SurfaceRealizationSampleValues'; export { SurfaceStatisticFunction as SurfaceStatisticFunction_api } from './models/SurfaceStatisticFunction'; export { SurfaceTimeType as SurfaceTimeType_api } from './models/SurfaceTimeType'; +export type { TableColumnData as TableColumnData_api } from './models/TableColumnData'; +export type { TableColumnStatisticalData as TableColumnStatisticalData_api } from './models/TableColumnStatisticalData'; export { TabType as TabType_api } from './models/TabType'; export type { THP as THP_api } from './models/THP'; export { TreeNode as TreeNode_api } from './models/TreeNode'; diff --git a/frontend/src/api/models/Body_get_realizations_response.ts b/frontend/src/api/models/Body_get_realizations_response.ts deleted file mode 100644 index 44ae8a7d2..000000000 --- a/frontend/src/api/models/Body_get_realizations_response.ts +++ /dev/null @@ -1,10 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { InplaceVolumetricsCategoricalMetaData } from './InplaceVolumetricsCategoricalMetaData'; -export type Body_get_realizations_response = { - categorical_filter?: (Array | null); - realizations?: (Array | null); -}; - diff --git a/frontend/src/api/models/Body_post_get_aggregated_per_realization_table_data.ts b/frontend/src/api/models/Body_post_get_aggregated_per_realization_table_data.ts new file mode 100644 index 000000000..51062bab4 --- /dev/null +++ b/frontend/src/api/models/Body_post_get_aggregated_per_realization_table_data.ts @@ -0,0 +1,12 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { InplaceVolumetricsIdentifierWithValues } from './InplaceVolumetricsIdentifierWithValues'; +export type Body_post_get_aggregated_per_realization_table_data = { + /** + * Selected identifiers and wanted values + */ + identifiers_with_values: Array; +}; + diff --git a/frontend/src/api/models/Body_post_get_aggregated_statistical_table_data.ts b/frontend/src/api/models/Body_post_get_aggregated_statistical_table_data.ts new file mode 100644 index 000000000..abda19e85 --- /dev/null +++ b/frontend/src/api/models/Body_post_get_aggregated_statistical_table_data.ts @@ -0,0 +1,12 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { InplaceVolumetricsIdentifierWithValues } from './InplaceVolumetricsIdentifierWithValues'; +export type Body_post_get_aggregated_statistical_table_data = { + /** + * Selected identifiers and wanted values + */ + identifiers_with_values: Array; +}; + diff --git a/frontend/src/api/models/InplaceVolumetricsCategoricalMetaData.ts b/frontend/src/api/models/FluidZone.ts similarity index 52% rename from frontend/src/api/models/InplaceVolumetricsCategoricalMetaData.ts rename to frontend/src/api/models/FluidZone.ts index c28dbeb2f..85fcb7717 100644 --- a/frontend/src/api/models/InplaceVolumetricsCategoricalMetaData.ts +++ b/frontend/src/api/models/FluidZone.ts @@ -2,8 +2,8 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export type InplaceVolumetricsCategoricalMetaData = { - name: string; - unique_values: Array<(string | number)>; -}; - +export enum FluidZone { + OIL = 'Oil', + GAS = 'Gas', + WATER = 'Water', +} diff --git a/frontend/src/api/models/InplaceStatisticalVolumetricTableData.ts b/frontend/src/api/models/InplaceStatisticalVolumetricTableData.ts new file mode 100644 index 000000000..7d2fa5b22 --- /dev/null +++ b/frontend/src/api/models/InplaceStatisticalVolumetricTableData.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { RepeatedTableColumnData } from './RepeatedTableColumnData'; +import type { TableColumnStatisticalData } from './TableColumnStatisticalData'; +/** + * Statistical volumetric data for single volume table + * + * Contains data for a single fluid zone, e.g. Oil, Gas, Water, or sum of fluid zones + */ +export type InplaceStatisticalVolumetricTableData = { + fluidSelectionName: string; + selectorColumns: Array; + resultColumnStatistics: Array; +}; + diff --git a/frontend/src/api/models/InplaceStatisticalVolumetricTableDataPerFluidSelection.ts b/frontend/src/api/models/InplaceStatisticalVolumetricTableDataPerFluidSelection.ts new file mode 100644 index 000000000..804768290 --- /dev/null +++ b/frontend/src/api/models/InplaceStatisticalVolumetricTableDataPerFluidSelection.ts @@ -0,0 +1,14 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { InplaceStatisticalVolumetricTableData } from './InplaceStatisticalVolumetricTableData'; +/** + * Statistical volumetric data for a single table per fluid selection + * + * Fluid selection can be single fluid zones, e.g. Oil, Gas, Water, or sum of fluid zones - Oil + Gas + Water + */ +export type InplaceStatisticalVolumetricTableDataPerFluidSelection = { + tableDataPerFluidSelection: Array; +}; + diff --git a/frontend/src/api/models/InplaceVolumetricResultName.ts b/frontend/src/api/models/InplaceVolumetricResultName.ts new file mode 100644 index 000000000..d9e2db9c2 --- /dev/null +++ b/frontend/src/api/models/InplaceVolumetricResultName.ts @@ -0,0 +1,25 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +/** + * Allowed volumetric response names + */ +export enum InplaceVolumetricResultName { + BULK = 'BULK', + NET = 'NET', + PORO = 'PORO', + PORO_NET = 'PORO_NET', + PORV = 'PORV', + HCPV = 'HCPV', + STOIIP = 'STOIIP', + GIIP = 'GIIP', + NTG = 'NTG', + ASSOCIATEDGAS = 'ASSOCIATEDGAS', + ASSOCIATEDOIL = 'ASSOCIATEDOIL', + BO = 'BO', + BG = 'BG', + SW = 'SW', + STOIIP_TOTAL = 'STOIIP_TOTAL', + GIIP_TOTAL = 'GIIP_TOTAL', +} diff --git a/frontend/src/api/models/InplaceVolumetricStatistic.ts b/frontend/src/api/models/InplaceVolumetricStatistic.ts new file mode 100644 index 000000000..70258c068 --- /dev/null +++ b/frontend/src/api/models/InplaceVolumetricStatistic.ts @@ -0,0 +1,15 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +/** + * Definition of possible statistics for a result column in an inplace volumetrics table + */ +export enum InplaceVolumetricStatistic { + MEAN = 'mean', + STDDEV = 'stddev', + MAX = 'max', + MIN = 'min', + P10 = 'p10', + P90 = 'p90', +} diff --git a/frontend/src/api/models/InplaceVolumetricTableData.ts b/frontend/src/api/models/InplaceVolumetricTableData.ts new file mode 100644 index 000000000..df54d1a37 --- /dev/null +++ b/frontend/src/api/models/InplaceVolumetricTableData.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { RepeatedTableColumnData } from './RepeatedTableColumnData'; +import type { TableColumnData } from './TableColumnData'; +/** + * Volumetric data for a single table + * + * Contains data for a single fluid zone, e.g. Oil, Gas, Water, or sum of fluid zones + */ +export type InplaceVolumetricTableData = { + fluidSelectionName: string; + selectorColumns: Array; + resultColumns: Array; +}; + diff --git a/frontend/src/api/models/InplaceVolumetricTableDataPerFluidSelection.ts b/frontend/src/api/models/InplaceVolumetricTableDataPerFluidSelection.ts new file mode 100644 index 000000000..9d3c796da --- /dev/null +++ b/frontend/src/api/models/InplaceVolumetricTableDataPerFluidSelection.ts @@ -0,0 +1,14 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { InplaceVolumetricTableData } from './InplaceVolumetricTableData'; +/** + * Volumetric data for a single table per fluid selection + * + * Fluid selection can be single fluid zones, e.g. Oil, Gas, Water, or sum of fluid zones - Oil + Gas + Water + */ +export type InplaceVolumetricTableDataPerFluidSelection = { + tableDataPerFluidSelection: Array; +}; + diff --git a/frontend/src/api/models/InplaceVolumetricsIdentifier.ts b/frontend/src/api/models/InplaceVolumetricsIdentifier.ts new file mode 100644 index 000000000..04c4646fe --- /dev/null +++ b/frontend/src/api/models/InplaceVolumetricsIdentifier.ts @@ -0,0 +1,10 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export enum InplaceVolumetricsIdentifier { + ZONE = 'ZONE', + REGION = 'REGION', + FACIES = 'FACIES', + LICENSE = 'LICENSE', +} diff --git a/frontend/src/api/models/InplaceVolumetricsIdentifierWithValues.ts b/frontend/src/api/models/InplaceVolumetricsIdentifierWithValues.ts new file mode 100644 index 000000000..c870b3329 --- /dev/null +++ b/frontend/src/api/models/InplaceVolumetricsIdentifierWithValues.ts @@ -0,0 +1,14 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { InplaceVolumetricsIdentifier } from './InplaceVolumetricsIdentifier'; +/** + * Unique values for an index column in a volumetric table + * All values should ideally be strings, but it is common to see integers, especially for REGION + */ +export type InplaceVolumetricsIdentifierWithValues = { + identifier: InplaceVolumetricsIdentifier; + values: Array<(string | number)>; +}; + diff --git a/frontend/src/api/models/InplaceVolumetricsTableDefinition.ts b/frontend/src/api/models/InplaceVolumetricsTableDefinition.ts new file mode 100644 index 000000000..f11dc975a --- /dev/null +++ b/frontend/src/api/models/InplaceVolumetricsTableDefinition.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { FluidZone } from './FluidZone'; +import type { InplaceVolumetricResultName } from './InplaceVolumetricResultName'; +import type { InplaceVolumetricsIdentifierWithValues } from './InplaceVolumetricsIdentifierWithValues'; +/** + * Definition of a volumetric table + */ +export type InplaceVolumetricsTableDefinition = { + tableName: string; + fluidZones: Array; + resultNames: Array; + identifiersWithValues: Array; +}; + diff --git a/frontend/src/api/models/InplaceVolumetricsTableMetaData.ts b/frontend/src/api/models/InplaceVolumetricsTableMetaData.ts deleted file mode 100644 index 83922fc65..000000000 --- a/frontend/src/api/models/InplaceVolumetricsTableMetaData.ts +++ /dev/null @@ -1,11 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { InplaceVolumetricsCategoricalMetaData } from './InplaceVolumetricsCategoricalMetaData'; -export type InplaceVolumetricsTableMetaData = { - name: string; - categorical_column_metadata: Array; - numerical_column_names: Array; -}; - diff --git a/frontend/src/api/models/RepeatedTableColumnData.ts b/frontend/src/api/models/RepeatedTableColumnData.ts new file mode 100644 index 000000000..2318eab19 --- /dev/null +++ b/frontend/src/api/models/RepeatedTableColumnData.ts @@ -0,0 +1,18 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +/** + * Data for a single column in a volumetric table + * + * Length of index list should be equal to the number of rows in the table + * + * - unique_values: List of unique values in the column + * - indices: List of indices, in unique_values list, for each row in the table + */ +export type RepeatedTableColumnData = { + columnName: string; + uniqueValues: Array<(string | number)>; + indices: Array; +}; + diff --git a/frontend/src/api/models/TableColumnData.ts b/frontend/src/api/models/TableColumnData.ts new file mode 100644 index 000000000..ebe7d586c --- /dev/null +++ b/frontend/src/api/models/TableColumnData.ts @@ -0,0 +1,14 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +/** + * Data for a single column in a volumetric table + * + * Length of column values should be equal to the number of rows in the table + */ +export type TableColumnData = { + columnName: string; + columnValues: Array; +}; + diff --git a/frontend/src/api/models/TableColumnStatisticalData.ts b/frontend/src/api/models/TableColumnStatisticalData.ts new file mode 100644 index 000000000..0dd5fb56d --- /dev/null +++ b/frontend/src/api/models/TableColumnStatisticalData.ts @@ -0,0 +1,14 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +/** + * Statistical data for a single result column in a volumetric table + * + * Length of column values should be equal to the number of rows in the table + */ +export type TableColumnStatisticalData = { + columnName: string; + statisticValues: Record>; +}; + diff --git a/frontend/src/api/services/InplaceVolumetricsService.ts b/frontend/src/api/services/InplaceVolumetricsService.ts index 661b990e9..bf32c0755 100644 --- a/frontend/src/api/services/InplaceVolumetricsService.ts +++ b/frontend/src/api/services/InplaceVolumetricsService.ts @@ -2,28 +2,32 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Body_get_realizations_response } from '../models/Body_get_realizations_response'; -import type { EnsembleScalarResponse } from '../models/EnsembleScalarResponse'; -import type { InplaceVolumetricsTableMetaData } from '../models/InplaceVolumetricsTableMetaData'; +import type { Body_post_get_aggregated_per_realization_table_data } from '../models/Body_post_get_aggregated_per_realization_table_data'; +import type { Body_post_get_aggregated_statistical_table_data } from '../models/Body_post_get_aggregated_statistical_table_data'; +import type { FluidZone } from '../models/FluidZone'; +import type { InplaceStatisticalVolumetricTableDataPerFluidSelection } from '../models/InplaceStatisticalVolumetricTableDataPerFluidSelection'; +import type { InplaceVolumetricsIdentifier } from '../models/InplaceVolumetricsIdentifier'; +import type { InplaceVolumetricsTableDefinition } from '../models/InplaceVolumetricsTableDefinition'; +import type { InplaceVolumetricTableDataPerFluidSelection } from '../models/InplaceVolumetricTableDataPerFluidSelection'; import type { CancelablePromise } from '../core/CancelablePromise'; import type { BaseHttpRequest } from '../core/BaseHttpRequest'; export class InplaceVolumetricsService { constructor(public readonly httpRequest: BaseHttpRequest) {} /** - * Get Table Names And Descriptions - * Get all volumetric tables for a given ensemble. + * Get Table Definitions + * Get the volumetric tables definitions for a given ensemble. * @param caseUuid Sumo case uuid * @param ensembleName Ensemble name - * @returns InplaceVolumetricsTableMetaData Successful Response + * @returns InplaceVolumetricsTableDefinition Successful Response * @throws ApiError */ - public getTableNamesAndDescriptions( + public getTableDefinitions( caseUuid: string, ensembleName: string, - ): CancelablePromise> { + ): CancelablePromise> { return this.httpRequest.request({ method: 'GET', - url: '/inplace_volumetrics/table_names_and_descriptions/', + url: '/inplace_volumetrics/table_definitions/', query: { 'case_uuid': caseUuid, 'ensemble_name': ensembleName, @@ -34,31 +38,95 @@ export class InplaceVolumetricsService { }); } /** - * Get Realizations Response - * Get response for a given table and index filter. + * Post Get Aggregated Per Realization Table Data + * Get aggregated volumetric data for a given table with data per realization based on requested results and categories/index filter. + * + * Note: This endpoint is a post endpoint because the list of identifiers with values can be quite large and may exceed the query string limit. + * As the endpoint is post, the identifiers with values object is kept for convenience. * @param caseUuid Sumo case uuid * @param ensembleName Ensemble name * @param tableName Table name - * @param responseName Response name + * @param resultNames The name of the volumetric results + * @param fluidZones The fluid zones to aggregate by + * @param accumulateFluidZones Whether to accumulate fluid zones * @param requestBody - * @returns EnsembleScalarResponse Successful Response + * @param groupByIdentifiers The identifiers to group table data by + * @param realizations Optional list of realizations to include. If not specified, all realizations will be returned. + * @returns InplaceVolumetricTableDataPerFluidSelection Successful Response * @throws ApiError */ - public getRealizationsResponse( + public postGetAggregatedPerRealizationTableData( caseUuid: string, ensembleName: string, tableName: string, - responseName: string, - requestBody?: Body_get_realizations_response, - ): CancelablePromise { + resultNames: Array, + fluidZones: Array, + accumulateFluidZones: boolean, + requestBody: Body_post_get_aggregated_per_realization_table_data, + groupByIdentifiers?: (Array | null), + realizations?: (Array | null), + ): CancelablePromise { return this.httpRequest.request({ method: 'POST', - url: '/inplace_volumetrics/realizations_response/', + url: '/inplace_volumetrics/get_aggregated_per_realization_table_data/', query: { 'case_uuid': caseUuid, 'ensemble_name': ensembleName, 'table_name': tableName, - 'response_name': responseName, + 'result_names': resultNames, + 'fluid_zones': fluidZones, + 'accumulate_fluid_zones': accumulateFluidZones, + 'group_by_identifiers': groupByIdentifiers, + 'realizations': realizations, + }, + body: requestBody, + mediaType: 'application/json', + errors: { + 422: `Validation Error`, + }, + }); + } + /** + * Post Get Aggregated Statistical Table Data + * Get statistical volumetric data across selected realizations for a given table based on requested results and categories/index filter. + * + * Note: This endpoint is a post endpoint because the list of identifiers with values can be quite large and may exceed the query string limit. + * As the endpoint is post, the identifiers with values object is kept for convenience. + * @param caseUuid Sumo case uuid + * @param ensembleName Ensemble name + * @param tableName Table name + * @param resultNames The name of the volumetric results + * @param fluidZones The fluid zones to aggregate by + * @param accumulateFluidZones Whether to accumulate fluid zones + * @param requestBody + * @param groupByIdentifiers The identifiers to group table data by + * @param realizations Optional list of realizations to include. If not specified, all realizations will be returned. + * @returns InplaceStatisticalVolumetricTableDataPerFluidSelection Successful Response + * @throws ApiError + */ + public postGetAggregatedStatisticalTableData( + caseUuid: string, + ensembleName: string, + tableName: string, + resultNames: Array, + fluidZones: Array, + accumulateFluidZones: boolean, + requestBody: Body_post_get_aggregated_statistical_table_data, + groupByIdentifiers?: (Array | null), + realizations?: (Array | null), + ): CancelablePromise { + return this.httpRequest.request({ + method: 'POST', + url: '/inplace_volumetrics/get_aggregated_statistical_table_data/', + query: { + 'case_uuid': caseUuid, + 'ensemble_name': ensembleName, + 'table_name': tableName, + 'result_names': resultNames, + 'fluid_zones': fluidZones, + 'accumulate_fluid_zones': accumulateFluidZones, + 'group_by_identifiers': groupByIdentifiers, + 'realizations': realizations, }, body: requestBody, mediaType: 'application/json', diff --git a/frontend/src/assets/volumeDefinitions.ts b/frontend/src/assets/volumeDefinitions.ts new file mode 100644 index 000000000..53e46d6fa --- /dev/null +++ b/frontend/src/assets/volumeDefinitions.ts @@ -0,0 +1,29 @@ +export type VolumeDefinition = { + description: string; + unit?: string; + // eclsum?: [string, string]; // string[] +}; + +export type VolumeDefinitionsType = Record; + +// This is a simple example of a volume definitions object - in order of priority. +// See the original file for the full list of volume definitions: +// - https://github.com/equinor/webviz-subsurface/blob/master/webviz_subsurface/_abbreviations/abbreviation_data/volume_terminology.json +// - `eclsum` not included +export const ORDERED_VOLUME_DEFINITIONS: VolumeDefinitionsType = { + STOIIP: { description: "Stock tank oil initially in place", unit: "Sm³" }, + GIIP: { description: "Gas initially in place", unit: "Sm³" }, + STOIIP_TOTAL: { description: "Stock tank oil initially in place (total)", unit: "Sm³" }, + GIIP_TOTAL: { description: "Gas initially in place (total)", unit: "Sm³" }, + ASSOCIATEDGAS: { description: "Associated gas", unit: "Sm³" }, + ASSOCIATEDOIL: { description: "Associated oil", unit: "Sm³" }, + BULK: { description: "Bulk volume", unit: "m³" }, + NET: { description: "Net volume", unit: "m³" }, + PORV: { description: "Pore volume", unit: "m³" }, + HCPV: { description: "Hydro carbon pore volume", unit: "m³" }, + PORO: { description: "Porosity" }, + SW: { description: "Water saturation" }, + NTG: { description: "Net to gross" }, + BO: { description: "Oil formation volume factor" }, + BG: { description: "Gas formation volume factor" }, +}; diff --git a/frontend/src/framework/GlobalAtoms.ts b/frontend/src/framework/GlobalAtoms.ts index 501932bf7..421bd4320 100644 --- a/frontend/src/framework/GlobalAtoms.ts +++ b/frontend/src/framework/GlobalAtoms.ts @@ -10,6 +10,11 @@ import { atomWithCompare } from "./utils/atomUtils"; export const EnsembleSetAtom = atomWithCompare(new EnsembleSet([]), isEqual); +/** + * Get the valid ensemble realizations function that filters out invalid realizations based on the current realization filter set. + * + * If realization filter set is not defined, the atom will return null + */ export const EnsembleRealizationFilterFunctionAtom = atom((get) => { const realizationFilterSet = get(RealizationFilterSetAtom)?.filterSet; @@ -21,6 +26,24 @@ export const EnsembleRealizationFilterFunctionAtom = atom { + const ensembleSet = get(EnsembleSetAtom); + let validEnsembleRealizationsFunction = get(EnsembleRealizationFilterFunctionAtom); + + if (validEnsembleRealizationsFunction === null) { + validEnsembleRealizationsFunction = (ensembleIdent: EnsembleIdent) => { + return ensembleSet.findEnsemble(ensembleIdent)?.getRealizations() ?? []; + }; + } + + return validEnsembleRealizationsFunction; +}); + // RealizationFilterSetAtom needs to be packed into an object such that we can shallow-compare it with its previous value // as the class instance of RealizationFilterSet will never change in the lifetime of the application. export const RealizationFilterSetAtom = atom<{ diff --git a/frontend/src/framework/SyncSettings.ts b/frontend/src/framework/SyncSettings.ts index bc2e192eb..13e32605a 100644 --- a/frontend/src/framework/SyncSettings.ts +++ b/frontend/src/framework/SyncSettings.ts @@ -26,6 +26,8 @@ export enum SyncSettingKey { WELLBORE = "WELLBORE", INTERSECTION = "INTERSECTION", VERTICAL_SCALE = "VERTICAL_SCALE", + INPLACE_VOLUMETRICS_FILTER = "INPLACE_VOLUMETRICS_FILTER", + INPLACE_VOLUMETRICS_RESULT_NAME = "INPLACE_VOLUMETRICS_RESULT_NAME", } export const SyncSettingsMeta = { @@ -41,6 +43,8 @@ export const SyncSettingsMeta = { abbreviation: "CAM", }, [SyncSettingKey.VERTICAL_SCALE]: { name: "Vertical Scale", abbreviation: "VSCAL" }, + [SyncSettingKey.INPLACE_VOLUMETRICS_FILTER]: { name: "Inplace Volumetrics Filter", abbreviation: "IVF" }, + [SyncSettingKey.INPLACE_VOLUMETRICS_RESULT_NAME]: { name: "Inplace Volumetrics Result Name", abbreviation: "IVRN" }, }; export class SyncSettingsHelper { diff --git a/frontend/src/framework/WorkbenchServices.ts b/frontend/src/framework/WorkbenchServices.ts index c7c64642d..86ff73797 100644 --- a/frontend/src/framework/WorkbenchServices.ts +++ b/frontend/src/framework/WorkbenchServices.ts @@ -6,6 +6,7 @@ import { isEqual } from "lodash"; import { EnsembleIdent } from "./EnsembleIdent"; import { Workbench } from "./Workbench"; +import { InplaceVolumetricsFilter } from "./types/inplaceVolumetricsFilter"; import { Intersection } from "./types/intersection"; import { Wellbore } from "./types/wellbore"; @@ -18,6 +19,9 @@ export type GlobalTopicDefinitions = { "global.hoverRealization": { realization: number } | null; "global.hoverTimestamp": { timestampUtcMs: number } | null; "global.hoverMd": { wellboreUuid: string; md: number } | null; + "global.hoverZone": { zoneName: string } | null; + "global.hoverRegion": { regionName: string } | null; + "global.hoverFacies": { faciesName: string } | null; "global.syncValue.ensembles": EnsembleIdent[]; "global.syncValue.date": { timeOrInterval: string }; @@ -33,6 +37,8 @@ export type GlobalTopicDefinitions = { "global.syncValue.intersection": Intersection; "global.syncValue.cameraPositionIntersection": Viewport; "global.syncValue.verticalScale": number; + "global.syncValue.inplaceVolumetricsFilter": InplaceVolumetricsFilter; + "global.syncValue.inplaceVolumetricsResultName": string; }; export type AllTopicDefinitions = NavigatorTopicDefinitions & GlobalTopicDefinitions; diff --git a/frontend/src/framework/components/EnsembleDropdown/ensembleDropdown.tsx b/frontend/src/framework/components/EnsembleDropdown/ensembleDropdown.tsx index 42128cb06..b292f249a 100644 --- a/frontend/src/framework/components/EnsembleDropdown/ensembleDropdown.tsx +++ b/frontend/src/framework/components/EnsembleDropdown/ensembleDropdown.tsx @@ -7,7 +7,7 @@ type EnsembleDropdownProps = { ensembleSet: EnsembleSet; value: EnsembleIdent | null; onChange: (ensembleIdent: EnsembleIdent | null) => void; -} & Omit; +} & Omit, "options" | "value" | "onChange">; export function EnsembleDropdown(props: EnsembleDropdownProps): JSX.Element { const { ensembleSet, value, onChange, ...rest } = props; diff --git a/frontend/src/framework/components/EnsembleSelect/ensembleSelect.tsx b/frontend/src/framework/components/EnsembleSelect/ensembleSelect.tsx index 3899145dc..51a9601f6 100644 --- a/frontend/src/framework/components/EnsembleSelect/ensembleSelect.tsx +++ b/frontend/src/framework/components/EnsembleSelect/ensembleSelect.tsx @@ -7,7 +7,7 @@ type EnsembleSelectProps = { ensembleSet: EnsembleSet; value: EnsembleIdent[]; onChange: (ensembleIdentArr: EnsembleIdent[]) => void; -} & Omit; +} & Omit, "options" | "value" | "onChange">; export function EnsembleSelect(props: EnsembleSelectProps): JSX.Element { const { ensembleSet, value, onChange, multiple, ...rest } = props; diff --git a/frontend/src/framework/internal/components/RightSettingsPanel/private-components/ModuleInstanceLog/moduleInstanceLog.tsx b/frontend/src/framework/internal/components/RightSettingsPanel/private-components/ModuleInstanceLog/moduleInstanceLog.tsx index cc4ce2077..120019365 100644 --- a/frontend/src/framework/internal/components/RightSettingsPanel/private-components/ModuleInstanceLog/moduleInstanceLog.tsx +++ b/frontend/src/framework/internal/components/RightSettingsPanel/private-components/ModuleInstanceLog/moduleInstanceLog.tsx @@ -188,7 +188,7 @@ function LogList(props: LogListProps): React.ReactNode { <> {showDatetime && (
{convertDatetimeMsToHumanReadableString(entry.datetimeMs)} diff --git a/frontend/src/framework/types/inplaceVolumetricsFilter.ts b/frontend/src/framework/types/inplaceVolumetricsFilter.ts new file mode 100644 index 000000000..bcb18b1f7 --- /dev/null +++ b/frontend/src/framework/types/inplaceVolumetricsFilter.ts @@ -0,0 +1,9 @@ +import { FluidZone_api, InplaceVolumetricsIdentifierWithValues_api } from "@api"; +import { EnsembleIdent } from "@framework/EnsembleIdent"; + +export type InplaceVolumetricsFilter = { + ensembleIdents: EnsembleIdent[]; + tableNames: string[]; + fluidZones: FluidZone_api[]; + identifiersValues: InplaceVolumetricsIdentifierWithValues_api[]; +}; diff --git a/frontend/src/lib/components/Dropdown/dropdown.tsx b/frontend/src/lib/components/Dropdown/dropdown.tsx index 03096890b..441b6342c 100644 --- a/frontend/src/lib/components/Dropdown/dropdown.tsx +++ b/frontend/src/lib/components/Dropdown/dropdown.tsx @@ -13,32 +13,27 @@ import { BaseComponent, BaseComponentProps } from "../BaseComponent"; import { IconButton } from "../IconButton"; import { Input } from "../Input"; import { Virtualization } from "../Virtualization"; -import { withDefaults } from "../_component-utils/components"; -export type DropdownOption = { - value: string; +export type DropdownOption = { + value: TValue; label: string; adornment?: React.ReactNode; + hoverText?: string; disabled?: boolean; }; -export type DropdownProps = { +export type DropdownProps = { id?: string; wrapperId?: string; - options: DropdownOption[]; - value?: string; - onChange?: (value: string) => void; + options: DropdownOption[]; + value?: TValue; + onChange?: (value: TValue) => void; filter?: boolean; width?: string | number; showArrows?: boolean; debounceTimeMs?: number; } & BaseComponentProps; -const defaultProps = { - value: "", - filter: false, -}; - const minHeight = 200; const optionHeight = 32; @@ -54,9 +49,11 @@ type DropdownRect = { const noMatchingOptionsText = "No matching options"; const noOptionsText = "No options"; -export const Dropdown = withDefaults()(defaultProps, (props) => { +export function Dropdown(props: DropdownProps) { const { onChange } = props; + const valueWithDefault = props.value ?? null; + const [dropdownVisible, setDropdownVisible] = React.useState(false); const [dropdownRect, setDropdownRect] = React.useState({ width: 0, @@ -64,11 +61,11 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { height: 0, }); const [filter, setFilter] = React.useState(null); - const [selection, setSelection] = React.useState(props.value); - const [prevValue, setPrevValue] = React.useState(props.value); - const [prevFilteredOptions, setPrevFilteredOptions] = React.useState(props.options); + const [selection, setSelection] = React.useState(props.value ?? null); + const [prevValue, setPrevValue] = React.useState(props.value ?? null); + const [prevFilteredOptions, setPrevFilteredOptions] = React.useState[]>(props.options); const [selectionIndex, setSelectionIndex] = React.useState(-1); - const [filteredOptions, setFilteredOptions] = React.useState(props.options); + const [filteredOptions, setFilteredOptions] = React.useState[]>(props.options); const [optionIndexWithFocus, setOptionIndexWithFocus] = React.useState(-1); const [startIndex, setStartIndex] = React.useState(0); const [keyboardFocus, setKeyboardFocus] = React.useState(false); @@ -81,17 +78,17 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { const setOptionIndexWithFocusToCurrentSelection = React.useCallback( function handleFilteredOptionsChange() { - const index = filteredOptions.findIndex((option) => option.value === selection); + const index = filteredOptions.findIndex((option) => isEqual(option.value, selection)); setSelectionIndex(index); setOptionIndexWithFocus(index); }, [filteredOptions, selection] ); - if (prevValue !== props.value) { - setSelection(props.value); - setSelectionIndex(props.options.findIndex((option) => option.value === props.value)); - setPrevValue(props.value); + if (!isEqual(prevValue, valueWithDefault)) { + setSelection(valueWithDefault); + setSelectionIndex(props.options.findIndex((option) => isEqual(option.value, valueWithDefault))); + setPrevValue(valueWithDefault); } if (!isEqual(prevFilteredOptions, filteredOptions)) { @@ -175,7 +172,7 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { }; if (inputClientBoundingRect.y + inputBoundingRect.height + height > window.innerHeight) { - newDropdownRect.top = inputClientBoundingRect.y - minHeight; + newDropdownRect.top = inputClientBoundingRect.y - height; newDropdownRect.height = Math.min(height, inputClientBoundingRect.y); } else { newDropdownRect.top = inputClientBoundingRect.y + inputBoundingRect.height; @@ -218,7 +215,7 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { ); const handleOnChange = React.useCallback( - function handleOnChange(value: string) { + function handleOnChange(value: TValue) { if (!onChange) { return; } @@ -240,9 +237,9 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { ); const handleOptionClick = React.useCallback( - function handleOptionClick(value: string) { + function handleOptionClick(value: TValue) { setSelection(value); - setSelectionIndex(props.options.findIndex((option) => option.value === value)); + setSelectionIndex(props.options.findIndex((option) => isEqual(option.value, value))); setDropdownVisible(false); setFilter(null); setFilteredOptions(props.options); @@ -265,6 +262,12 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { React.useEffect( function addKeyDownEventHandler() { function handleKeyDown(e: KeyboardEvent) { + if (e.key === "Escape") { + setDropdownVisible(false); + setOptionIndexWithFocus(-1); + setKeyboardFocus(false); + inputRef.current?.blur(); + } if (dropdownRef.current) { const currentStartIndex = Math.round(dropdownRef.current?.scrollTop / optionHeight); if (dropdownVisible) { @@ -328,7 +331,7 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { setFilter(event.target.value); const newFilteredOptions = props.options.filter((option) => option.label.includes(event.target.value)); setFilteredOptions(newFilteredOptions); - setSelectionIndex(newFilteredOptions.findIndex((option) => option.value === selection)); + setSelectionIndex(newFilteredOptions.findIndex((option) => isEqual(option.value, selection))); }, [props.options, selection] ); @@ -342,7 +345,7 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { if (dropdownVisible && filter !== null) { return filter; } - return props.options.find((el) => el.value === selection)?.label || ""; + return props.options.find((el) => isEqual(el.value, selection))?.label || ""; } function makeInputAdornment() { @@ -373,14 +376,14 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { return ( -
+
option.value === selection) === undefined && + props.options.find((option) => isEqual(option.value, selection)) === undefined && props.options.length > 0 } onClick={() => handleInputClick()} @@ -458,12 +461,14 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { "pl-1", "pr-1", { - "bg-blue-600 text-white box-border hover:bg-blue-700": - selection === option.value, + "bg-blue-600 text-white box-border hover:bg-blue-700": isEqual( + selection, + option.value + ), "bg-blue-100": - selection !== option.value && optionIndexWithFocus === index, - "bg-blue-700": - selection === option.value && optionIndexWithFocus === index, + !isEqual(selection, option.value) && optionIndexWithFocus === index, + "bg-blue-700 text-white": + !isEqual(selection, option.value) && optionIndexWithFocus === index, "pointer-events-none": option.disabled, "text-gray-400": option.disabled, } @@ -476,7 +481,7 @@ export const Dropdown = withDefaults()(defaultProps, (props) => { }} style={{ height: optionHeight }} onPointerMove={() => handlePointerOver(index)} - title={option.label} + title={option.hoverText ?? option.label} > {option.adornment && ( @@ -494,6 +499,6 @@ export const Dropdown = withDefaults()(defaultProps, (props) => {
); -}); +} Dropdown.displayName = "Dropdown"; diff --git a/frontend/src/lib/components/PendingWrapper/pendingWrapper.tsx b/frontend/src/lib/components/PendingWrapper/pendingWrapper.tsx index e510270ea..6faa47886 100644 --- a/frontend/src/lib/components/PendingWrapper/pendingWrapper.tsx +++ b/frontend/src/lib/components/PendingWrapper/pendingWrapper.tsx @@ -19,12 +19,12 @@ export const PendingWrapper: React.FC = (props) => { })} > {props.isPending && ( -
+
)} {!props.isPending && props.errorMessage && ( -
+
{props.errorMessage}
)} diff --git a/frontend/src/lib/components/Select/select.tsx b/frontend/src/lib/components/Select/select.tsx index b74f222de..093fe4a56 100644 --- a/frontend/src/lib/components/Select/select.tsx +++ b/frontend/src/lib/components/Select/select.tsx @@ -1,47 +1,43 @@ import React from "react"; import { resolveClassNames } from "@lib/utils/resolveClassNames"; +import { Deselect, SelectAll } from "@mui/icons-material"; import { isEqual } from "lodash"; import { BaseComponent, BaseComponentProps } from "../BaseComponent"; +import { Button } from "../Button"; import { Input } from "../Input"; import { Virtualization } from "../Virtualization"; -import { withDefaults } from "../_component-utils/components"; enum KeyModifier { SHIFT = "shift", CONTROL = "control", } -export type SelectOption = { - value: string; +export type SelectOption = { + value: TValue; adornment?: React.ReactNode; label: string; + hoverText?: string; disabled?: boolean; }; -export type SelectProps = { +export type SelectProps = { id?: string; wrapperId?: string; - options: SelectOption[]; - value?: string[]; - onChange?: (values: string[]) => void; + options: SelectOption[]; + value?: TValue[]; + onChange?: (values: TValue[]) => void; placeholder?: string; filter?: boolean; size?: number; multiple?: boolean; width?: string | number; debounceTimeMs?: number; + showQuickSelectButtons?: boolean; } & BaseComponentProps; -const defaultProps = { - value: [""], - filter: false, - size: 1, - multiple: false, -}; - const noMatchingOptionsText = "No matching options"; function ensureKeyboardSelectionInView( @@ -59,16 +55,20 @@ function ensureKeyboardSelectionInView( return prevViewStartIndex; } -export const Select = withDefaults()(defaultProps, (props) => { +export function Select(props: SelectProps) { const { onChange } = props; + const sizeWithDefault = props.size ?? 1; + const multipleWithDefault = props.multiple ?? false; + const filterWithDefault = props.filter ?? false; + const [filterString, setFilterString] = React.useState(""); const [hasFocus, setHasFocus] = React.useState(false); - const [options, setOptions] = React.useState(props.options); - const [filteredOptions, setFilteredOptions] = React.useState(props.options); + const [options, setOptions] = React.useState[]>(props.options); + const [filteredOptions, setFilteredOptions] = React.useState[]>(props.options); const [selectionAnchor, setSelectionAnchor] = React.useState(null); - const [selectedOptionValues, setSelectedOptionValues] = React.useState([]); - const [prevPropsValue, setPrevPropsValue] = React.useState(undefined); + const [selectedOptionValues, setSelectedOptionValues] = React.useState([]); + const [prevPropsValue, setPrevPropsValue] = React.useState(undefined); const [currentFocusIndex, setCurrentFocusIndex] = React.useState(0); const [virtualizationStartIndex, setVirtualizationStartIndex] = React.useState(0); const [reportedVirtualizationStartIndex, setReportedVirtualizationStartIndex] = React.useState(0); @@ -85,13 +85,14 @@ export const Select = withDefaults()(defaultProps, (props) => { } if (!isEqual(props.value, prevPropsValue)) { + const firstValueIndex = filteredOptions.findIndex((option) => option.value === props.value?.[0]); + setSelectionAnchor(firstValueIndex !== -1 ? firstValueIndex : null); setPrevPropsValue(props.value ? [...props.value] : undefined); setSelectedOptionValues(props.value ? [...props.value] : []); - setSelectionAnchor(props.value ? filteredOptions.findIndex((option) => option.value === props.value[0]) : null); } const handleOnChange = React.useCallback( - function handleOnChange(values: string[]) { + function handleOnChange(values: TValue[]) { if (!onChange) { return; } @@ -129,14 +130,14 @@ export const Select = withDefaults()(defaultProps, (props) => { return; } - if (!props.multiple) { + if (!multipleWithDefault) { const newSelectedOptions = [filteredOptions[index].value]; setSelectedOptionValues(newSelectedOptions); setSelectionAnchor(null); handleOnChange(newSelectedOptions); } - let newSelectedOptions: string[] = [filteredOptions[index].value]; + let newSelectedOptions: TValue[] = [filteredOptions[index].value]; if (modifiers.includes(KeyModifier.CONTROL) && !modifiers.includes(KeyModifier.SHIFT)) { return; @@ -162,7 +163,7 @@ export const Select = withDefaults()(defaultProps, (props) => { return; } - if (!props.multiple) { + if (!multipleWithDefault) { const newSelectedOptions = [filteredOptions[index].value]; setSelectedOptionValues(newSelectedOptions); setSelectionAnchor(null); @@ -171,7 +172,7 @@ export const Select = withDefaults()(defaultProps, (props) => { setSelectionAnchor(index); - let newSelectedOptions: string[] = []; + let newSelectedOptions: TValue[] = []; if (selectedOptionValues.includes(filteredOptions[index].value)) { newSelectedOptions = selectedOptionValues.filter((value) => value !== filteredOptions[index].value); } else { @@ -202,7 +203,7 @@ export const Select = withDefaults()(defaultProps, (props) => { const newIndex = Math.max(0, currentFocusIndex - 1); setCurrentFocusIndex(newIndex); setVirtualizationStartIndex((prev) => - ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, props.size) + ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, sizeWithDefault) ); makeKeyboardSelection(newIndex, modifiers); } @@ -212,7 +213,7 @@ export const Select = withDefaults()(defaultProps, (props) => { const newIndex = Math.min(filteredOptions.length - 1, currentFocusIndex + 1); setCurrentFocusIndex(newIndex); setVirtualizationStartIndex((prev) => - ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, props.size) + ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, sizeWithDefault) ); makeKeyboardSelection(newIndex, modifiers); } @@ -224,20 +225,20 @@ export const Select = withDefaults()(defaultProps, (props) => { if (e.key === "PageDown") { e.preventDefault(); - const newIndex = Math.min(filteredOptions.length - 1, currentFocusIndex + props.size); + const newIndex = Math.min(filteredOptions.length - 1, currentFocusIndex + sizeWithDefault); setCurrentFocusIndex(newIndex); setVirtualizationStartIndex((prev) => - ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, props.size) + ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, sizeWithDefault) ); makeKeyboardSelection(newIndex, modifiers); } if (e.key === "PageUp") { e.preventDefault(); - const newIndex = Math.max(0, currentFocusIndex - props.size); + const newIndex = Math.max(0, currentFocusIndex - sizeWithDefault); setCurrentFocusIndex(newIndex); setVirtualizationStartIndex((prev) => - ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, props.size) + ensureKeyboardSelectionInView(prev, reportedVirtualizationStartIndex, newIndex, sizeWithDefault) ); makeKeyboardSelection(newIndex, modifiers); } @@ -253,7 +254,7 @@ export const Select = withDefaults()(defaultProps, (props) => { e.preventDefault(); const newIndex = filteredOptions.length - 1; setCurrentFocusIndex(newIndex); - setVirtualizationStartIndex(Math.max(0, newIndex - props.size + 1)); + setVirtualizationStartIndex(Math.max(0, newIndex - sizeWithDefault + 1)); makeKeyboardSelection(newIndex, modifiers); } } @@ -275,8 +276,8 @@ export const Select = withDefaults()(defaultProps, (props) => { [ currentFocusIndex, filteredOptions, - props.size, - props.multiple, + sizeWithDefault, + multipleWithDefault, handleOnChange, selectionAnchor, selectedOptionValues, @@ -284,20 +285,20 @@ export const Select = withDefaults()(defaultProps, (props) => { ] ); - function handleOptionClick(e: React.MouseEvent, option: SelectOption, index: number) { + function handleOptionClick(e: React.MouseEvent, option: SelectOption, index: number) { if (option.disabled) { return; } setCurrentFocusIndex(index); - if (!props.multiple) { + if (!multipleWithDefault) { setSelectedOptionValues([option.value]); handleOnChange([option.value]); return; } - let newSelectedOptions: string[] = []; + let newSelectedOptions: TValue[] = []; if (e.shiftKey && selectionAnchor !== null) { const start = Math.min(index, selectionAnchor); const end = Math.max(index, selectionAnchor); @@ -319,7 +320,7 @@ export const Select = withDefaults()(defaultProps, (props) => { setSelectedOptionValues(newSelectedOptions); } - function filterOptions(options: SelectOption[], filterString: string) { + function filterOptions(options: SelectOption[], filterString: string) { let newCurrentKeyboardFocusIndex = 0; let newVirtualizationStartIndex = 0; @@ -345,7 +346,6 @@ export const Select = withDefaults()(defaultProps, (props) => { setCurrentFocusIndex(newCurrentKeyboardFocusIndex); setVirtualizationStartIndex(newVirtualizationStartIndex); - setSelectionAnchor(newFilteredOptions.findIndex((option) => option.value === selectedOptionValues[0])); } function handleFilterChange(event: React.ChangeEvent) { @@ -357,83 +357,123 @@ export const Select = withDefaults()(defaultProps, (props) => { setReportedVirtualizationStartIndex(index); } + function handleSelectAll() { + if (!onChange) { + return; + } + onChange(props.options.map((option) => option.value)); + } + + function handleUnselectAll() { + if (!onChange) { + return; + } + onChange([]); + } + return ( - -
- {props.filter && ( - - )} +
+ {props.showQuickSelectButtons && ( +
+ + +
+ )} +
- {filteredOptions.length === 0 && ( -
- {options.length === 0 || filterString === "" ? noOptionsText : noMatchingOptionsText} -
+ {filterWithDefault && ( + )} - { - return ( -
handleOptionClick(e, option, index)} - style={{ height: 24 }} - > - {option.adornment} - + {filteredOptions.length === 0 && ( +
+ {options.length === 0 || filterString === "" ? noOptionsText : noMatchingOptionsText} +
+ )} + { + return ( +
handleOptionClick(e, option, index)} + style={{ height: 24 }} > - {option.label} - -
- ); - }} - direction="vertical" - startIndex={virtualizationStartIndex} - /> + {option.adornment} + + {option.label} + +
+ ); + }} + direction="vertical" + startIndex={virtualizationStartIndex} + /> +
-
-
+ +
); -}); +} Select.displayName = "Select"; diff --git a/frontend/src/lib/components/Table/table.tsx b/frontend/src/lib/components/Table/table.tsx index 6079e9421..9d8d0211e 100644 --- a/frontend/src/lib/components/Table/table.tsx +++ b/frontend/src/lib/components/Table/table.tsx @@ -1,30 +1,35 @@ import React from "react"; +import { resolveClassNames } from "@lib/utils/resolveClassNames"; +import { getTextWidthWithFont } from "@lib/utils/textSize"; import { Close, ExpandLess, ExpandMore } from "@mui/icons-material"; +import { isEqual } from "lodash"; import { v4 } from "uuid"; import { BaseComponent, BaseComponentProps } from "../BaseComponent"; -import { IconButton } from "../IconButton"; import { Input } from "../Input"; import { Virtualization } from "../Virtualization"; export type TableHeading = { [key: string]: { label: string; + hoverText?: string; sortable?: boolean; sizeInPercent: number; - format?: (value: string | number) => string | number; + formatValue?: (value: string | number | null) => string; + formatStyle?: (value: string | number | null) => React.CSSProperties; + subHeading?: TableHeading; }; }; -type TableRow = { - [key in keyof T]: string | number; +export type TableRow = { + [key in keyof T]: string | number | null; }; type IdentifiedTableRow = { id: string; - values: { [key in keyof T]: string | number }; + values: { [key in keyof T]: string | number | null }; }; export type TableProps = { @@ -32,9 +37,10 @@ export type TableProps = { data: TableRow[]; width?: number | string; height?: number | string; - onHover?: (row: TableRow) => void; + onHover?: (row: TableRow | null) => void; onClick?: (row: TableRow) => void; highlightFilter?: (row: TableRow) => boolean; + alternatingColumnColors?: boolean; } & BaseComponentProps; type LayoutError = { @@ -43,8 +49,8 @@ type LayoutError = { }; enum SortDirection { - Asc = "asc", - Desc = "desc", + ASC = "asc", + DESC = "desc", } function filterData( @@ -54,10 +60,12 @@ function filterData( ): IdentifiedTableRow[] { return data.filter((series) => { for (const col in filterValues) { - const format = headings[col].format || ((value: string | number) => value); + const format = headings[col].formatValue || ((value: string | number) => value); + const seriesValue = series.values[col]; if ( filterValues[col] !== "" && - format(series.values[col]).toString().toLowerCase().indexOf(filterValues[col].toLowerCase()) === -1 + (seriesValue === null || + format(seriesValue).toString().toLowerCase().indexOf(filterValues[col].toLowerCase()) === -1) ) { return false; } @@ -66,22 +74,43 @@ function filterData( }); } -function sortData( +type SortColumnAndDirectionElement = { + col: string; + dir: SortDirection; +}; + +function sortDataByColumns( data: IdentifiedTableRow[], - col: string, - dir: SortDirection + sortColumnAndDirectionArray: SortColumnAndDirectionElement[] ): IdentifiedTableRow[] { - return [ - ...data.sort((a, b) => { - if (a.values[col] < b.values[col]) { - return dir === SortDirection.Asc ? -1 : 1; - } - if (a.values[col] > b.values[col]) { - return dir === SortDirection.Asc ? 1 : -1; - } - return 0; - }), - ]; + return [...data.sort((a, b) => compareDataByColumns(a, b, sortColumnAndDirectionArray))]; +} + +function compareDataByColumns( + a: IdentifiedTableRow, + b: IdentifiedTableRow, + sortColumnAndDirectionArray: SortColumnAndDirectionElement[] +): number { + for (const { col, dir } of sortColumnAndDirectionArray) { + const aValue = a.values[col]; + const bValue = b.values[col]; + if (aValue === null && bValue === null) { + continue; + } + if (aValue === null) { + return dir === SortDirection.ASC ? 1 : -1; + } + if (bValue === null) { + return dir === SortDirection.ASC ? -1 : 1; + } + if (aValue < bValue) { + return dir === SortDirection.ASC ? -1 : 1; + } + if (aValue > bValue) { + return dir === SortDirection.ASC ? 1 : -1; + } + } + return 0; } function preprocessData(data: TableRow[]): IdentifiedTableRow[] { @@ -93,43 +122,231 @@ function preprocessData(data: TableRow[]): IdentifiedTableRow> = (props) => { +type TableHeadingCellInformation = { + id: string; + colSpan: number; + rowSpan: number; + hasSubHeaders: boolean; +}; + +type TableHeadingInformation = { + numColumns: number; + dataColumnIds: string[]; + headerRows: TableHeadingCellInformation[][]; +}; + +function recursivelyCalcDepth(headings: TableHeading, depth: number = 1): number { + let maxDepth = depth; + for (const col in headings) { + const subHeading = headings[col].subHeading; + if (subHeading) { + const localDepth = recursivelyCalcDepth(subHeading, depth + 1); + maxDepth = Math.max(maxDepth, localDepth); + } + } + return maxDepth; +} + +function extractInformationFromTableHeading( + headings: TableHeading, + depth: number = 0, + headerRows: TableHeadingCellInformation[][] = [] +): TableHeadingInformation { + const maxDepth = recursivelyCalcDepth(headings); + + let numColumns = 0; + if (!headerRows[depth]) { + headerRows[depth] = []; + } + + const dataColumnIds: string[] = []; + + for (const col in headings) { + const subHeading = headings[col].subHeading; + if (subHeading) { + const subHeadingInfo = extractInformationFromTableHeading(subHeading, depth + 1, headerRows); + headerRows[depth].push({ + id: col, + hasSubHeaders: true, + colSpan: subHeadingInfo.numColumns, + rowSpan: 1, + }); + numColumns += subHeadingInfo.numColumns; + dataColumnIds.push(...subHeadingInfo.dataColumnIds); + } else { + numColumns++; + headerRows[depth].push({ + id: col, + hasSubHeaders: false, + colSpan: 1, + rowSpan: Math.max(1, maxDepth - depth), + }); + dataColumnIds.push(col); + } + } + + return { + numColumns, + dataColumnIds, + headerRows, + }; +} + +type FlattenedHeading = Record< + string, + Omit & { headingGroupId?: string } +>; + +function flattenHeadings( + headings: TableHeading, + headingGroupId?: string, + parentSizeInPercent: number = 100.0 +): FlattenedHeading { + const newHeadings: FlattenedHeading = {}; + for (const col in headings) { + const subHeadings = headings[col].subHeading; + if (subHeadings) { + const flattenedSubHeadings = flattenHeadings( + subHeadings, + headingGroupId ?? col, + headings[col].sizeInPercent + ); + for (const subCol in flattenedSubHeadings) { + newHeadings[`${subCol}`] = { + ...flattenedSubHeadings[subCol], + sizeInPercent: (parentSizeInPercent * flattenedSubHeadings[subCol].sizeInPercent) / 100, + }; + } + } + newHeadings[col] = { + label: headings[col].label, + hoverText: headings[col].hoverText, + sizeInPercent: (parentSizeInPercent * headings[col].sizeInPercent) / 100, + formatValue: headings[col].formatValue, + formatStyle: headings[col].formatStyle, + headingGroupId, + }; + } + return newHeadings; +} + +function calcMaxColumnWidths( + headings: THeading, + data: TableRow[] +): { [key: string]: number } { + const columnWidths: { [key: string]: number } = {}; + for (const col in headings) { + columnWidths[col] = getTextWidthWithFont(headings[col].label, "Equinor", 1.5); + } + for (const row of data) { + for (const col in row) { + const cellContent = row[col]; + const formatValue = headings[col]?.formatValue; + const value = cellContent === null ? "" : formatValue ? formatValue(cellContent) : cellContent.toString(); + columnWidths[col] = Math.max(columnWidths[col], getTextWidthWithFont(value, "Equinor", 1.1)); + } + } + return columnWidths; +} + +const HEADER_HEIGHT_PX = 30; +const ROW_HEIGHT_PX = 30; +const ALTERNATING_COLUMN_HEADING_COLORS = ["bg-slate-100", "bg-slate-200"]; +const ALTERNATING_COLUMN_CELL_COLORS = ["bg-white", "bg-slate-50"]; + +class AlternatingColumnStyleHelper { + private _alternatingGroup = 0; + private _lastGroupId: string | null = null; + private readonly _headings: FlattenedHeading; + private readonly _colors: string[]; + + constructor(headings: FlattenedHeading, colors: string[]) { + this._headings = headings; + this._colors = colors; + } + + getClassNames(columnId: string): string { + const groupId = this._headings[columnId].headingGroupId; + + let isSameGroup = true; + if (groupId !== this._lastGroupId) { + this._alternatingGroup = (this._alternatingGroup + 1) % this._colors.length; + isSameGroup = false; + } + this._lastGroupId = groupId ?? null; + + const color = this._colors[this._alternatingGroup]; + const border = isSameGroup ? "" : "border-l border-l-slate-500"; + + return resolveClassNames(color, border); + } +} + +export function Table(props: TableProps): React.ReactNode { const [layoutError, setLayoutError] = React.useState({ error: false, message: "" }); const [preprocessedData, setPreprocessedData] = React.useState[]>([]); const [filteredData, setFilteredData] = React.useState[]>([]); const [filterValues, setFilterValues] = React.useState<{ [key: string]: string }>({}); - const [sortColumnAndDirection, setSortColumnAndDirection] = React.useState<{ col: string; dir: SortDirection }>({ - col: "", - dir: SortDirection.Asc, - }); - const containerRef = React.useRef(null); + const [sortColumnAndDirectionArray, setSortColumnAndDirectionArray] = React.useState< + SortColumnAndDirectionElement[] + >([]); + const [headerRows, setHeaderRows] = React.useState([]); + const [prevFlattenedHeadings, setPrevFlattenedHeadings] = React.useState({}); + const [flattenedHeadings, setFlattenedHeadings] = React.useState({}); + const [dataColumnIds, setDataColumnIds] = React.useState([]); + const [columnWidths, setColumnWidths] = React.useState<{ [key: string]: number }>({}); - React.useEffect(() => { - setPreprocessedData(preprocessData(props.data)); - }, [props.data]); + const [prevData, setPrevData] = React.useState[]>([]); + const [prevHeadings, setPrevHeadings] = React.useState({}); - React.useEffect(() => { - setFilteredData(filterData(preprocessedData, filterValues, props.headings)); - }, [preprocessedData, filterValues, props.headings]); + const containerRef = React.useRef(null); - React.useEffect(() => { - setFilteredData((prev) => sortData(prev, sortColumnAndDirection.col, sortColumnAndDirection.dir)); - }, [sortColumnAndDirection]); + if (!isEqual(prevData, props.data)) { + setPrevData(props.data); + const newPreprocessedData = preprocessData(props.data); + setPreprocessedData(newPreprocessedData); + setFilteredData( + sortDataByColumns( + filterData(newPreprocessedData, filterValues, flattenedHeadings), + sortColumnAndDirectionArray + ) + ); + } - React.useEffect(() => { - const maxNumberOfSubheadings = Object.keys(props.headings).length; + if (!isEqual(prevHeadings, props.headings) || !isEqual(prevData, props.data)) { + setPrevHeadings(props.headings); + const info = extractInformationFromTableHeading(props.headings); + setHeaderRows(info.headerRows); + setDataColumnIds(info.dataColumnIds); for (const row of props.data) { - if (Object.keys(row).length !== maxNumberOfSubheadings) { + if (Object.keys(row).length !== info.numColumns) { setLayoutError({ error: true, message: "The number of headings does not match the number of data series.", }); break; } + if (Object.keys(row).some((col) => !info.dataColumnIds.includes(col))) { + setLayoutError({ + error: true, + message: "The data series column ids do not match the heading ids.", + }); + break; + } } - }, [props.headings, props.data]); + const newFlattenedHeadings = flattenHeadings(props.headings); + setFlattenedHeadings(newFlattenedHeadings); + } - function handlePointerOver(row: TableRow) { + if (!isEqual(prevData, props.data) || !isEqual(prevFlattenedHeadings, flattenedHeadings)) { + setColumnWidths(calcMaxColumnWidths(flattenedHeadings, props.data)); + } + + if (!isEqual(prevFlattenedHeadings, flattenedHeadings)) { + setPrevFlattenedHeadings(flattenedHeadings); + } + + function handlePointerOver(row: TableRow | null) { if (props.onHover) { props.onHover(row); } @@ -142,116 +359,284 @@ export const Table: React.FC> = (props) => { } function handleFilterChange(col: string, value: string) { - setFilterValues({ ...filterValues, [col]: value }); + const newFilterValues = { ...filterValues, [col]: value }; + setFilterValues(newFilterValues); + setFilteredData( + sortDataByColumns( + filterData(preprocessedData, newFilterValues, flattenedHeadings), + sortColumnAndDirectionArray + ) + ); } - function handleSortDirectionChange(col: string, dir: SortDirection) { - setSortColumnAndDirection({ col, dir }); + function handleSortDirectionChange(event: React.MouseEvent, col: string, dir: SortDirection) { + const sortColumnAndDirectionElement: SortColumnAndDirectionElement = { + col, + dir, + }; + + let newSortColumnAndDirectionArray: SortColumnAndDirectionElement[] = []; + + if (event.shiftKey) { + const element = sortColumnAndDirectionArray.find((el) => el.col === col); + if (element && element.dir === dir) { + newSortColumnAndDirectionArray = sortColumnAndDirectionArray.filter((el) => el.col !== col); + } else if (element) { + newSortColumnAndDirectionArray = sortColumnAndDirectionArray.filter((el) => el.col !== col); + newSortColumnAndDirectionArray = [...newSortColumnAndDirectionArray, sortColumnAndDirectionElement]; + } else { + newSortColumnAndDirectionArray = [...sortColumnAndDirectionArray, sortColumnAndDirectionElement]; + } + } else { + newSortColumnAndDirectionArray = [sortColumnAndDirectionElement]; + } + + setSortColumnAndDirectionArray(newSortColumnAndDirectionArray); + sortDataByColumns(filteredData, newSortColumnAndDirectionArray); } if (layoutError.error) { return
{layoutError.message}
; } + function makeSortButtons(col: string): React.ReactNode { + let sortDirection: SortDirection | null = null; + let numSortColumn = 0; + if (sortColumnAndDirectionArray.length > 0) { + const index = sortColumnAndDirectionArray.findIndex((el) => el.col === col); + if (index !== -1) { + numSortColumn = index + 1; + sortDirection = sortColumnAndDirectionArray[index].dir; + } + } + + const component = ( +
+
handleSortDirectionChange(e, col, SortDirection.ASC)} + title="Sort ascending" + > +
+ +
+
+
handleSortDirectionChange(e, col, SortDirection.DESC)} + title="Sort descending" + > +
+ +
+
+
+ ); + + if (sortColumnAndDirectionArray.length <= 1 || numSortColumn === 0) { + return component; + } + + return ( +
+
+ {numSortColumn} +
+ {component} +
+ ); + } + + function makeHeadingFilterRow(): React.ReactNode { + const headingCells: React.ReactNode[] = []; + + const alternatingColumnStyleHelper = new AlternatingColumnStyleHelper( + flattenedHeadings, + ALTERNATING_COLUMN_HEADING_COLORS + ); + + for (const key of dataColumnIds) { + let additionalClassNames: string = ""; + if (props.alternatingColumnColors) { + additionalClassNames = alternatingColumnStyleHelper.getClassNames(key); + } else { + additionalClassNames = "bg-slate-100"; + } + + headingCells.push( + + handleFilterChange(key, e.target.value)} + endAdornment={ +
handleFilterChange(key, "")} + > + +
+ } + wrapperStyle={{ + fontWeight: "normal", + fontSize: "0.5rem", + }} + /> + + ); + } + + return {headingCells}; + } + + function makeHeadingRow(row: TableHeadingCellInformation[], depth: number): React.ReactNode { + const headingCells: React.ReactNode[] = []; + + const alternatingColumnStyleHelper = new AlternatingColumnStyleHelper( + flattenedHeadings, + ALTERNATING_COLUMN_HEADING_COLORS + ); + + for (const cell of row) { + let additionalClassNames: string = ""; + if (props.alternatingColumnColors) { + additionalClassNames = alternatingColumnStyleHelper.getClassNames(cell.id); + } else { + additionalClassNames = "bg-slate-100"; + } + + headingCells.push( + +
+
+ + {flattenedHeadings[cell.id].label} + + {!cell.hasSubHeaders ? makeSortButtons(cell.id) : null} +
+
+ + ); + } + + return {headingCells}; + } + + function makeHeadings(): React.ReactNode { + const headingComponents: React.ReactNode[] = []; + for (let depth = 0; depth < headerRows.length; depth++) { + headingComponents.push(makeHeadingRow(headerRows[depth], depth)); + } + headingComponents.push(makeHeadingFilterRow()); + return <>{headingComponents}; + } + + function makeDataRow(row: IdentifiedTableRow): React.ReactNode { + const cells: React.ReactNode[] = []; + + const alternatingColumnStyleHelper = new AlternatingColumnStyleHelper( + flattenedHeadings, + ALTERNATING_COLUMN_CELL_COLORS + ); + + for (const colId of dataColumnIds) { + let additionalClassNames: string = ""; + if (props.alternatingColumnColors) { + additionalClassNames = alternatingColumnStyleHelper.getClassNames(colId); + } + + const format = flattenedHeadings[colId].formatValue; + const formatStyle = flattenedHeadings[colId].formatStyle; + cells.push( + + {format ? format(row.values[colId]) : row.values[colId]} + + ); + } + + return ( + handlePointerOver(row.values)} + onPointerLeave={() => handlePointerOver(null)} + onPointerDown={() => handlePointerDown(row.values)} + style={{ height: 30, maxHeight: ROW_HEIGHT_PX }} + > + {cells} + + ); + } + return (
- - - - {Object.keys(props.headings).map((col) => ( - - ))} - - +
-
- {props.headings[col].label} -
- handleSortDirectionChange(col, SortDirection.Asc)} - color={ - sortColumnAndDirection.col === col && - sortColumnAndDirection.dir === SortDirection.Asc - ? "danger" - : undefined - } - > - - - handleSortDirectionChange(col, SortDirection.Desc)} - color={ - sortColumnAndDirection.col === col && - sortColumnAndDirection.dir === SortDirection.Desc - ? "danger" - : undefined - } - > - - -
-
-
- handleFilterChange(col, e.target.value)} - endAdornment={ - handleFilterChange(col, "")}> - - - } - /> -
-
+ {makeHeadings()}) => { - return ( - handlePointerOver(item.values)} - onPointerDown={() => handlePointerDown(item.values)} - style={{ height: 30 }} - > - {Object.keys(item.values).map((col) => { - const format = props.headings[col].format; - return ( - - ); - })} - - ); - }} + itemSize={ROW_HEIGHT_PX} + renderItem={makeDataRow} />
- {format ? format(item.values[col]) : item.values[col]} -
); -}; +} Table.displayName = "Table"; diff --git a/frontend/src/lib/components/TagPicker/index.ts b/frontend/src/lib/components/TagPicker/index.ts new file mode 100644 index 000000000..ac8587c05 --- /dev/null +++ b/frontend/src/lib/components/TagPicker/index.ts @@ -0,0 +1,2 @@ +export { TagPicker } from "./tagPicker"; +export type { TagPickerProps, TagOption } from "./tagPicker"; diff --git a/frontend/src/lib/components/TagPicker/tagPicker.tsx b/frontend/src/lib/components/TagPicker/tagPicker.tsx new file mode 100644 index 000000000..0793ddcb9 --- /dev/null +++ b/frontend/src/lib/components/TagPicker/tagPicker.tsx @@ -0,0 +1,340 @@ +import React from "react"; + +import { useElementBoundingRect } from "@lib/hooks/useElementBoundingRect"; +import { createPortal } from "@lib/utils/createPortal"; +import { resolveClassNames } from "@lib/utils/resolveClassNames"; +import { getTextWidthWithFont } from "@lib/utils/textSize"; +import { Close, ExpandMore } from "@mui/icons-material"; + +import { isEqual } from "lodash"; + +import { BaseComponent, BaseComponentProps } from "../BaseComponent"; +import { Checkbox } from "../Checkbox"; +import { IconButton } from "../IconButton"; +import { Virtualization } from "../Virtualization"; + +export type TagOption = { + value: T; + label: string; +}; + +export type TagPickerProps = { + id?: string; + wrapperId?: string; + tags: TagOption[]; + value: T[]; + onChange?: (value: T[]) => void; + width?: string | number; + debounceTimeMs?: number; +} & BaseComponentProps; + +const MIN_HEIGHT = 200; +const TAG_HEIGHT = 32; + +type DropdownRect = { + left?: number; + top?: number; + right?: number; + width: number; + height: number; + minWidth: number; +}; + +const NO_MATCHING_TAGS_TEXT = "No matching tags"; +const NO_TAGS_TEXT = "No tags"; + +export function TagPicker(props: TagPickerProps): React.ReactElement { + const [selectedTags, setSelectedTags] = React.useState(props.value); + const [prevSelectedTags, setPrevSelectedTags] = React.useState(props.value); + const [dropdownVisible, setDropdownVisible] = React.useState(false); + const [dropdownRect, setDropdownRect] = React.useState({ + width: 0, + minWidth: 0, + height: 0, + }); + const [filter, setFilter] = React.useState(null); + const [filteredTags, setFilteredTags] = React.useState[]>(props.tags); + const [startIndex, setStartIndex] = React.useState(0); + const [focused, setFocused] = React.useState(false); + + const divRef = React.useRef(null); + const inputRef = React.useRef(null); + const dropdownRef = React.useRef(null); + const debounceTimerRef = React.useRef | null>(null); + + const divBoundingRect = useElementBoundingRect(divRef); + + if (!isEqual(props.value, prevSelectedTags)) { + setSelectedTags(props.value); + if (filter) { + setFilteredTags(props.tags.filter((option) => option.label.toLowerCase().includes(filter))); + } else { + setFilteredTags(props.tags); + } + setPrevSelectedTags(props.value); + } + + React.useEffect(function handleMount() { + const debounceTimerRefCurrent = debounceTimerRef.current; + return function handleUnmount() { + if (debounceTimerRefCurrent) { + clearTimeout(debounceTimerRefCurrent); + } + }; + }, []); + + React.useEffect(function handleTagsChange() { + function handleMouseDown(event: MouseEvent) { + if ( + dropdownRef.current && + !dropdownRef.current.contains(event.target as Node) && + divRef.current && + !divRef.current.contains(event.target as Node) + ) { + setDropdownVisible(false); + } + } + + function handleKeyDown(event: KeyboardEvent) { + if (event.key === "Escape") { + setDropdownVisible(false); + inputRef.current?.blur(); + setFocused(false); + return; + } + } + + document.addEventListener("mousedown", handleMouseDown); + document.addEventListener("keydown", handleKeyDown); + + return () => { + document.removeEventListener("mousedown", handleMouseDown); + }; + }, []); + + React.useEffect( + function updateDropdownRectWidth() { + let longestTagWidth = props.tags.reduce((prev, current) => { + const labelWidth = getTextWidthWithFont(current.label, "Equinor", 1); + const totalWidth = labelWidth; + if (totalWidth > prev) { + return totalWidth; + } + return prev; + }, 0); + + if (longestTagWidth === 0) { + if (props.tags.length === 0 || filter === "") { + longestTagWidth = getTextWidthWithFont(NO_TAGS_TEXT, "Equinor", 1); + } else { + longestTagWidth = getTextWidthWithFont(NO_MATCHING_TAGS_TEXT, "Equinor", 1); + } + } + setDropdownRect((prev) => ({ ...prev, width: longestTagWidth + 32 })); + + const newFilteredOptions = props.tags.filter((tag) => tag.label.toLowerCase().includes(filter || "")); + setFilteredTags(newFilteredOptions); + }, + [props.tags, filter] + ); + + React.useEffect( + function computeDropdownRect() { + if (dropdownVisible) { + const divClientBoundingRect = divRef.current?.getBoundingClientRect(); + const bodyClientBoundingRect = document.body.getBoundingClientRect(); + + const height = Math.min(MIN_HEIGHT, Math.max(filteredTags.length * TAG_HEIGHT, TAG_HEIGHT)) + 2; + + if (divClientBoundingRect && bodyClientBoundingRect) { + const newDropdownRect: DropdownRect = { + minWidth: divBoundingRect.width, + width: dropdownRect.width, + height: height, + }; + + if (divClientBoundingRect.y + divBoundingRect.height + height > window.innerHeight) { + newDropdownRect.top = divClientBoundingRect.y - height; + newDropdownRect.height = Math.min(height, divClientBoundingRect.y); + } else { + newDropdownRect.top = divClientBoundingRect.y + divBoundingRect.height; + newDropdownRect.height = Math.min( + height, + window.innerHeight - divClientBoundingRect.y - divBoundingRect.height + ); + } + if (divClientBoundingRect.x + divBoundingRect.width > window.innerWidth / 2) { + newDropdownRect.right = window.innerWidth - (divClientBoundingRect.x + divBoundingRect.width); + } else { + newDropdownRect.left = divClientBoundingRect.x; + } + + setDropdownRect((prev) => ({ ...newDropdownRect, width: prev.width })); + + setStartIndex( + Math.max( + 0, + Math.round( + (filteredTags.findIndex((tag) => tag.value === selectedTags[selectedTags.length - 1]) || + 0) - + height / TAG_HEIGHT / 2 + ) + ) + ); + } + } + }, + [divBoundingRect, dropdownVisible, filteredTags, selectedTags, dropdownRect.width, props.tags] + ); + + function handleInputClick() { + setDropdownVisible(true); + } + + function handleTagToggle(value: T) { + let newSelectedTags = [...selectedTags]; + if (selectedTags.includes(value)) { + newSelectedTags = newSelectedTags.filter((v) => v !== value); + } else { + newSelectedTags.push(value); + } + + setFilter(null); + inputRef.current?.focus(); + setSelectedTags(newSelectedTags); + + if (props.debounceTimeMs) { + if (debounceTimerRef.current) { + clearTimeout(debounceTimerRef.current); + } + debounceTimerRef.current = setTimeout(() => { + props.onChange?.(newSelectedTags); + }, props.debounceTimeMs); + } else { + props.onChange?.(newSelectedTags); + } + } + + function handleInputChange(event: React.ChangeEvent) { + const newFilter = event.target.value.toLowerCase(); + setFilter(newFilter); + const newFilteredOptions = props.tags.filter((option) => + option.label.toLowerCase().includes(newFilter.toLowerCase()) + ); + setFilteredTags(newFilteredOptions); + } + + function handleClick() { + setDropdownVisible(true); + if (inputRef.current) { + inputRef.current.focus(); + } + } + + function removeTag(value: T) { + setSelectedTags(selectedTags.filter((tag) => tag !== value)); + props.onChange?.(selectedTags.filter((tag) => tag !== value)); + } + + function handleClearAll() { + setSelectedTags([]); + props.onChange?.([]); + } + + function handleFocus() { + setFocused(true); + } + + function handleBlur() { + setFocused(false); + } + + return ( + +
+
+ {selectedTags.map((tag) => { + const tagOption = props.tags.find((el) => el.value === tag); + if (!tagOption) { + return null; + } + return removeTag(tag)} />; + })} + +
+
+ {selectedTags.length === 0 ? ( + + ) : ( + + + + )} +
+
+ {dropdownVisible && + createPortal( +
+ {filteredTags.length === 0 && ( +
+ {props.tags.length === 0 || filter === "" ? NO_TAGS_TEXT : NO_MATCHING_TAGS_TEXT} +
+ )} + ( + handleTagToggle(option.value)} + label={option.label} + /> + )} + /> +
+ )} +
+ ); +} + +type TagProps = { + tag: TagOption; + onRemove: () => void; +}; + +function Tag(props: TagProps): React.ReactNode { + return ( +
+ {props.tag.label} + { + + + + } +
+ ); +} diff --git a/frontend/src/lib/utils/fixupUserSelection.ts b/frontend/src/lib/utils/fixupUserSelection.ts new file mode 100644 index 000000000..f614e821c --- /dev/null +++ b/frontend/src/lib/utils/fixupUserSelection.ts @@ -0,0 +1,15 @@ +export function fixupUserSelection( + userSelection: TSelection[], + validOptions: TSelection[], + selectAll: boolean = false +): TSelection[] { + const newSelections = userSelection.filter((selection) => validOptions.includes(selection)); + if (newSelections.length === 0 && validOptions.length > 0) { + if (selectAll) { + return validOptions; + } + newSelections.push(validOptions[0]); + } + + return newSelections; +} diff --git a/frontend/src/main.css b/frontend/src/main.css index db87d72ad..829a7338f 100644 --- a/frontend/src/main.css +++ b/frontend/src/main.css @@ -3,6 +3,15 @@ @tailwind components; @tailwind utilities; +/* Additional tailwind component classes */ +@layer components { + .input-comp { + @apply hover:outline hover:outline-1 hover:outline-blue-300 focus:outline focus:outline-1 focus:outline-blue-600; + } +} + +/* Custom CSS styles*/ + body { overflow: hidden; font-family: Equinor; diff --git a/frontend/src/modules/DistributionPlot/view.tsx b/frontend/src/modules/DistributionPlot/view.tsx index ae359eea5..5e949c1fd 100644 --- a/frontend/src/modules/DistributionPlot/view.tsx +++ b/frontend/src/modules/DistributionPlot/view.tsx @@ -10,13 +10,13 @@ import { Size2D } from "@lib/utils/geometry"; import { makeSubplots } from "@modules/_shared/Figure"; import { ContentInfo } from "@modules/_shared/components/ContentMessage"; import { ContentWarning } from "@modules/_shared/components/ContentMessage/contentMessage"; +import { makeHistogramTrace } from "@modules/_shared/histogram"; import { Warning } from "@mui/icons-material"; import { Layout, PlotData } from "plotly.js"; import { Interfaces } from "./interfaces"; import { PlotType } from "./typesAndEnums"; -import { makeHistogramTrace } from "./utils/histogram"; import { makeHoverText, makeHoverTextWithColor, makeTitleFromChannelContent } from "./utils/stringUtils"; import { calcTextSize } from "./utils/textSize"; diff --git a/frontend/src/modules/InplaceVolumetrics/interfaces.ts b/frontend/src/modules/InplaceVolumetrics/interfaces.ts deleted file mode 100644 index b886daa1b..000000000 --- a/frontend/src/modules/InplaceVolumetrics/interfaces.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { InplaceVolumetricsCategoricalMetaData_api } from "@api"; -import { EnsembleIdent } from "@framework/EnsembleIdent"; -import { InterfaceInitialization } from "@framework/UniDirectionalModuleComponentsInterface"; - -import { - categoricalFilterAtom, - categoricalOptionsAtom, - ensembleIdentAtom, - realizationsToIncludeAtom, - responseNameAtom, - tableNameAtom, -} from "./settings/atoms/baseAtoms"; - -type SettingsToViewInterface = { - ensembleIdent: EnsembleIdent | null; - tableName: string | null; - responseName: string | null; - categoricalOptions: InplaceVolumetricsCategoricalMetaData_api[] | null; - categoricalFilter: InplaceVolumetricsCategoricalMetaData_api[] | null; - realizationsToInclude: number[] | null; -}; - -export type Interfaces = { - settingsToView: SettingsToViewInterface; -}; - -export const settingsToViewInterfaceInitialization: InterfaceInitialization = { - ensembleIdent: (get) => get(ensembleIdentAtom), - tableName: (get) => get(tableNameAtom), - responseName: (get) => get(responseNameAtom), - categoricalOptions: (get) => get(categoricalOptionsAtom), - categoricalFilter: (get) => get(categoricalFilterAtom), - realizationsToInclude: (get) => get(realizationsToIncludeAtom), -}; diff --git a/frontend/src/modules/InplaceVolumetrics/loadModule.tsx b/frontend/src/modules/InplaceVolumetrics/loadModule.tsx deleted file mode 100644 index 8cdb4582d..000000000 --- a/frontend/src/modules/InplaceVolumetrics/loadModule.tsx +++ /dev/null @@ -1,10 +0,0 @@ -import { ModuleRegistry } from "@framework/ModuleRegistry"; - -import { Interfaces, settingsToViewInterfaceInitialization } from "./interfaces"; -import { Settings } from "./settings/settings"; -import { View } from "./view"; - -const module = ModuleRegistry.initModule("InplaceVolumetrics", { settingsToViewInterfaceInitialization }); - -module.viewFC = View; -module.settingsFC = Settings; diff --git a/frontend/src/modules/InplaceVolumetrics/queryHooks.tsx b/frontend/src/modules/InplaceVolumetrics/queryHooks.tsx deleted file mode 100644 index 7b86eace7..000000000 --- a/frontend/src/modules/InplaceVolumetrics/queryHooks.tsx +++ /dev/null @@ -1,63 +0,0 @@ -import { - Body_get_realizations_response_api, - EnsembleInfo_api, - EnsembleScalarResponse_api, - InplaceVolumetricsTableMetaData_api, -} from "@api"; -import { apiService } from "@framework/ApiService"; -import { EnsembleIdent } from "@framework/EnsembleIdent"; -import { UseQueryResult, useQuery } from "@tanstack/react-query"; - -const STALE_TIME = 60 * 1000; -const CACHE_TIME = 60 * 1000; - -export function useEnsemblesQuery(caseUuid: string | null): UseQueryResult> { - return useQuery({ - queryKey: ["getEnsembles", caseUuid], - queryFn: () => apiService.explore.getEnsembles(caseUuid ?? ""), - staleTime: STALE_TIME, - gcTime: CACHE_TIME, - enabled: caseUuid ? true : false, - }); -} - -export function useTableDescriptionsQuery( - ensemble: EnsembleIdent | null, - allowEnable: boolean -): UseQueryResult> { - return useQuery({ - queryKey: ["getTableNamesAndDescriptions", ensemble], - queryFn: () => - apiService.inplaceVolumetrics.getTableNamesAndDescriptions( - ensemble?.getCaseUuid() ?? "", - ensemble?.getEnsembleName() ?? "" - ), - staleTime: STALE_TIME, - gcTime: CACHE_TIME, - enabled: allowEnable && ensemble ? true : false, - }); -} - -export function useRealizationsResponseQuery( - caseUuid: string | null, - ensembleName: string | null, - tableName: string | null, - responseName: string | null, - requestBody: Body_get_realizations_response_api | null, - allowEnable: boolean -): UseQueryResult { - return useQuery({ - queryKey: ["getRealizationResponse", caseUuid, ensembleName, tableName, responseName, requestBody], - queryFn: () => - apiService.inplaceVolumetrics.getRealizationsResponse( - caseUuid ?? "", - ensembleName ?? "", - tableName ?? "", - responseName ?? "", - requestBody ?? {} - ), - staleTime: STALE_TIME, - gcTime: CACHE_TIME, - enabled: allowEnable && caseUuid && ensembleName && tableName && responseName ? true : false, - }); -} diff --git a/frontend/src/modules/InplaceVolumetrics/settings/atoms/baseAtoms.ts b/frontend/src/modules/InplaceVolumetrics/settings/atoms/baseAtoms.ts deleted file mode 100644 index deefc0d39..000000000 --- a/frontend/src/modules/InplaceVolumetrics/settings/atoms/baseAtoms.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { InplaceVolumetricsCategoricalMetaData_api } from "@api"; -import { EnsembleIdent } from "@framework/EnsembleIdent"; - -import { atom } from "jotai"; - -export const ensembleIdentAtom = atom(null); -export const tableNameAtom = atom(null); -export const responseNameAtom = atom(null); -export const categoricalOptionsAtom = atom(null); -export const categoricalFilterAtom = atom(null); -export const realizationsToIncludeAtom = atom(null); diff --git a/frontend/src/modules/InplaceVolumetrics/settings/settings.tsx b/frontend/src/modules/InplaceVolumetrics/settings/settings.tsx deleted file mode 100644 index 53fdfe45c..000000000 --- a/frontend/src/modules/InplaceVolumetrics/settings/settings.tsx +++ /dev/null @@ -1,222 +0,0 @@ -import React from "react"; - -import { InplaceVolumetricsCategoricalMetaData_api, InplaceVolumetricsTableMetaData_api } from "@api"; -import { EnsembleIdent } from "@framework/EnsembleIdent"; -import { ModuleSettingsProps } from "@framework/Module"; -import { useEnsembleSet } from "@framework/WorkbenchSession"; -import { EnsembleDropdown } from "@framework/components/EnsembleDropdown"; -import { fixupEnsembleIdent } from "@framework/utils/ensembleUiHelpers"; -import { CircularProgress } from "@lib/components/CircularProgress"; -import { Dropdown } from "@lib/components/Dropdown"; -import { Label } from "@lib/components/Label"; -import { QueryStateWrapper } from "@lib/components/QueryStateWrapper"; -import { Select } from "@lib/components/Select"; -import { UseQueryResult } from "@tanstack/react-query"; - -import { useAtom } from "jotai"; - -import { categoricalFilterAtom, ensembleIdentAtom, responseNameAtom, tableNameAtom } from "./atoms/baseAtoms"; - -import { Interfaces } from "../interfaces"; -import { useTableDescriptionsQuery } from "../queryHooks"; - -//----------------------------------------------------------------------------------------------------------- - -export enum VolumetricResponseAbbreviations { - //a bit future proff - STOIIP_OIL = "Stock tank oil initially in place (oil zone)", - GIIP_GAS = "Gas initially in place (gas zone)", - BULK_OIL = "Bulk volume (oil zone)", - BULK_GAS = "Bulk volume (gas zone)", - BULK_TOTAL = "Bulk volume (total)", - NET_OIL = "Net volume (oil zone)", - NET_GAS = "Net volume (gas zone)", - NET_TOTAL = "Net volume (total)", - // PORV_OIL = "Pore volume (oil zone)", - // PORV_GAS = "Pore volume (gas zone)", - // PORV_TOTAL = "Pore volume (total)", - PORE_OIL = "Pore volume (oil zone)", - PORE_GAS = "Pore volume (gas zone)", - PORE_TOTAL = "Pore volume (total)", - HCPV_OIL = "Hydro carbon pore volume (oil zone)", - HCPV_GAS = "Hydro carbon pore volume (gas zone)", - HCPV_TOTAL = "Hydro carbon pore volume (total zone)", - STOIIP_GAS = "Stock tank oil initially in place (gas zone)", - STOIIP_TOTAL = "Stock tank oil initially in place (total)", - GIIP_OIL = "Gas initially in place (oil zone)", - GIIP_TOTAL = "Gas initially in place (total)", - RECOVERABLE_OIL = "Recoverable volume (oil zone)", - RECOVERABLE_GAS = "Recoverable volume (gas zone)", - RECOVERABLE_TOTAL = "Recoverable volume (total)", - BULK = "Bulk volume", - NET = "Net volume", - PORV = "Pore volume", - HCPV = "Hydro carbon pore volume", - STOIIP = "Stock tank oil initially in place", - GIIP = "Gas initially in place", - RECOVERABLE = "Recoverable volume", - ASSOCIATEDGAS = "Associated gas", - ASSOCIATEDOIL = "Associated oil", - PORO = "Porosity", - SW = "Water saturation", - NTG = "Net to gross", - BO = "Oil formation volume factor", - BG = "Gas formation volume factor", -} -function sortedResponses(responses: string[]): string[] { - return Object.keys(VolumetricResponseAbbreviations).filter((response) => responses.includes(response)); -} -function responsesToSelectOptions(responses: string[]): { value: string; label: string }[] { - return ( - responses.map((response: string) => ({ - value: response, - label: VolumetricResponseAbbreviations[response as keyof typeof VolumetricResponseAbbreviations], - })) ?? [] - ); -} -function getTableNameOptions( - tableDescriptionsQuery: UseQueryResult -): { value: string; label: string }[] { - return ( - tableDescriptionsQuery.data?.map((table: InplaceVolumetricsTableMetaData_api) => ({ - value: table.name, - label: table.name, - })) ?? [] - ); -} -function getTableCategoricalOptions( - tableDescriptionsQuery: UseQueryResult, - tableName: string | null -): InplaceVolumetricsCategoricalMetaData_api[] { - const tableDescription = tableDescriptionsQuery.data?.find((table) => table.name === tableName); - return tableDescription?.categorical_column_metadata ?? []; -} -function getTableResponseOptions( - tableDescriptionsQuery: UseQueryResult, - tableName: string | null -): { value: string; label: string }[] { - const tableDescription = tableDescriptionsQuery.data?.find((table) => table.name === tableName); - const responses = sortedResponses(tableDescription?.numerical_column_names ?? []); - return responsesToSelectOptions(responses); -} - -export function Settings({ workbenchSession }: ModuleSettingsProps) { - const ensembleSet = useEnsembleSet(workbenchSession); - const [ensembleIdent, setEnsembleIdent] = useAtom(ensembleIdentAtom); - const [tableName, setTableName] = useAtom(tableNameAtom); - const [categoricalFilter, setCategoricalFilter] = useAtom(categoricalFilterAtom); - const [responseName, setResponseName] = useAtom(responseNameAtom); - - const tableDescriptionsQuery = useTableDescriptionsQuery(ensembleIdent, true); - - React.useEffect( - function selectDefaultEnsemble() { - const fixedEnsembleIdent = fixupEnsembleIdent(ensembleIdent, ensembleSet); - if (fixedEnsembleIdent !== ensembleIdent) { - setEnsembleIdent(fixedEnsembleIdent); - } - }, - [ensembleSet, ensembleIdent, setEnsembleIdent] - ); - - React.useEffect( - function selectDefaultTable() { - if (tableDescriptionsQuery.data) { - setTableName(tableDescriptionsQuery.data[0].name); - const responses = tableDescriptionsQuery.data[0].numerical_column_names; - setResponseName(sortedResponses(responses)[0]); - } else { - setTableName(null); - setResponseName(null); - } - }, - [tableDescriptionsQuery.data, setTableName, setResponseName] - ); - - function handleEnsembleSelectionChange(newEnsembleIdent: EnsembleIdent | null) { - setEnsembleIdent(newEnsembleIdent); - } - function handleTableChange(tableName: string) { - setTableName(tableName); - } - function handleResponseChange(responseName: string) { - setResponseName(responseName); - } - - const handleSelectionChange = React.useCallback( - function handleSelectionChange(categoryName: string, categoryValues: string[]) { - let currentCategoryFilter = categoricalFilter; - if (currentCategoryFilter) { - const categoryIndex = currentCategoryFilter.findIndex((category) => category.name === categoryName); - if (categoryIndex > -1) { - currentCategoryFilter[categoryIndex].unique_values = categoryValues; - } else { - currentCategoryFilter.push({ name: categoryName, unique_values: categoryValues }); - } - } else { - currentCategoryFilter = []; - currentCategoryFilter.push({ name: categoryName, unique_values: categoryValues }); - } - - setCategoricalFilter(currentCategoryFilter); - }, - [categoricalFilter, setCategoricalFilter] - ); - - const tableNameOptions = getTableNameOptions(tableDescriptionsQuery); - const tableCategoricalOptions = getTableCategoricalOptions(tableDescriptionsQuery, tableName); - const responseOptions = getTableResponseOptions(tableDescriptionsQuery, tableName); - - return ( - <> - - } - errorComponent={"Could not load table descriptions"} - className="flex flex-col gap-4" - > - - -
Filters
- {tableCategoricalOptions?.map((category) => { - return ( - + +
+ + ); + + return ( + + ); +} diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/atoms/baseAtoms.ts b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/baseAtoms.ts new file mode 100644 index 000000000..b9ca507ec --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/baseAtoms.ts @@ -0,0 +1,20 @@ +import { InplaceVolumetricResultName_api, InplaceVolumetricStatistic_api } from "@api"; +import { InplaceVolumetricsFilter } from "@framework/types/inplaceVolumetricsFilter"; +import { SourceAndTableIdentifierUnion, SourceIdentifier, TableType } from "@modules/_shared/InplaceVolumetrics/types"; + +import { atom } from "jotai"; + +export const filterAtom = atom({ + ensembleIdents: [], + tableNames: [], + fluidZones: [], + identifiersValues: [], +}); +export const areSelectedTablesComparableAtom = atom(false); +export const resultNamesAtom = atom([]); +export const accumulationOptionsAtom = atom< + Omit[] +>([]); +export const tableTypeAtom = atom(TableType.STATISTICAL); +export const statisticOptionsAtom = atom([]); +export const areTableDefinitionSelectionsValidAtom = atom(false); diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/atoms/derivedAtoms.ts b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/derivedAtoms.ts new file mode 100644 index 000000000..094838ab0 --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/derivedAtoms.ts @@ -0,0 +1,83 @@ +import { InplaceVolumetricsIdentifier_api } from "@api"; +import { ValidEnsembleRealizationsFunctionAtom } from "@framework/GlobalAtoms"; +import { EnsembleIdentWithRealizations } from "@modules/_shared/InplaceVolumetrics/queryHooks"; +import { SourceIdentifier, TableType } from "@modules/_shared/InplaceVolumetrics/types"; + +import { atom } from "jotai"; + +import { accumulationOptionsAtom, filterAtom, tableTypeAtom } from "./baseAtoms"; +import { perRealizationTableDataResultsAtom, statisticalTableDataResultsAtom } from "./queryAtoms"; + +export const tableNamesAtom = atom((get) => { + const filter = get(filterAtom); + return filter?.tableNames ?? []; +}); + +export const fluidZonesAtom = atom((get) => { + const filter = get(filterAtom); + return filter?.fluidZones ?? []; +}); + +export const identifiersValuesAtom = atom((get) => { + const filter = get(filterAtom); + return filter?.identifiersValues ?? []; +}); + +export const ensembleIdentsWithRealizationsAtom = atom((get) => { + const filter = get(filterAtom); + const ensemblIdents = filter?.ensembleIdents ?? []; + const validEnsembleRealizationsFunction = get(ValidEnsembleRealizationsFunctionAtom); + + const ensembleIdentsWithRealizations: EnsembleIdentWithRealizations[] = []; + for (const ensembleIdent of ensemblIdents) { + ensembleIdentsWithRealizations.push({ + ensembleIdent, + realizations: [...validEnsembleRealizationsFunction(ensembleIdent)], + }); + } + + return ensembleIdentsWithRealizations; +}); + +export const accumulateFluidZonesAtom = atom((get) => { + const accumulationOptions = get(accumulationOptionsAtom); + + return !accumulationOptions.includes(SourceIdentifier.FLUID_ZONE); +}); + +export const groupByIdentifiersAtom = atom((get) => { + const accumulationOptions = get(accumulationOptionsAtom); + + return accumulationOptions.filter((el) => el !== SourceIdentifier.FLUID_ZONE) as InplaceVolumetricsIdentifier_api[]; +}); + +export const activeQueriesResultAtom = atom((get) => { + // Active queries result atom based on selected table type + const tableType = get(tableTypeAtom); + if (tableType === TableType.PER_REALIZATION) { + return get(perRealizationTableDataResultsAtom); + } + if (tableType === TableType.STATISTICAL) { + return get(statisticalTableDataResultsAtom); + } + throw new Error(`Unsupported table type: ${tableType}`); +}); + +export const isQueryFetchingAtom = atom((get) => { + const activeQueriesResult = get(activeQueriesResultAtom); + return activeQueriesResult.isFetching; +}); + +export const hasAllQueriesFailedAtom = atom((get) => { + const tableType = get(tableTypeAtom); + const perRealizationTableDataResults = get(perRealizationTableDataResultsAtom); + const statisticalTableDataResults = get(statisticalTableDataResultsAtom); + + if (tableType === TableType.PER_REALIZATION) { + return perRealizationTableDataResults.allQueriesFailed; + } + if (tableType === TableType.STATISTICAL) { + return statisticalTableDataResults.allQueriesFailed; + } + return false; +}); diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/atoms/interfaceEffects.ts b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/interfaceEffects.ts new file mode 100644 index 000000000..3be1ab557 --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/interfaceEffects.ts @@ -0,0 +1,43 @@ +import { InterfaceEffects } from "@framework/Module"; +import { SettingsToViewInterface } from "@modules/InplaceVolumetricsTable/interfaces"; + +import { + accumulationOptionsAtom, + areSelectedTablesComparableAtom, + areTableDefinitionSelectionsValidAtom, + filterAtom, + resultNamesAtom, + statisticOptionsAtom, + tableTypeAtom, +} from "./baseAtoms"; + +export const settingsToViewInterfaceEffects: InterfaceEffects = [ + (getInterfaceValue, setAtomValue) => { + const filter = getInterfaceValue("filter"); + setAtomValue(filterAtom, filter); + }, + (getInterfaceValue, setAtomValue) => { + const areSelectedTablesComparable = getInterfaceValue("areSelectedTablesComparable"); + setAtomValue(areSelectedTablesComparableAtom, areSelectedTablesComparable); + }, + (getInterfaceValue, setAtomValue) => { + const resultNames = getInterfaceValue("resultNames"); + setAtomValue(resultNamesAtom, resultNames); + }, + (getInterfaceValue, setAtomValue) => { + const accumulationOptions = getInterfaceValue("accumulationOptions"); + setAtomValue(accumulationOptionsAtom, accumulationOptions); + }, + (getInterfaceValue, setAtomValue) => { + const tableType = getInterfaceValue("tableType"); + setAtomValue(tableTypeAtom, tableType); + }, + (getInterfaceValue, setAtomValue) => { + const statisticOptions = getInterfaceValue("statisticOptions"); + setAtomValue(statisticOptionsAtom, statisticOptions); + }, + (getInterfaceValue, setAtomValue) => { + const areTableDefinitionSelectionsValid = getInterfaceValue("areTableDefinitionSelectionsValid"); + setAtomValue(areTableDefinitionSelectionsValidAtom, areTableDefinitionSelectionsValid); + }, +]; diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/atoms/queryAtoms.ts b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/queryAtoms.ts new file mode 100644 index 000000000..f60222b02 --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/atoms/queryAtoms.ts @@ -0,0 +1,77 @@ +import { atomWithQueries } from "@framework/utils/atomUtils"; +import { + useGetAggregatedPerRealizationTableDataQueries, + useGetAggregatedStatisticalTableDataQueries, +} from "@modules/_shared/InplaceVolumetrics/queryHooks"; +import { TableType } from "@modules/_shared/InplaceVolumetrics/types"; + +import { + areSelectedTablesComparableAtom, + areTableDefinitionSelectionsValidAtom, + resultNamesAtom, + tableTypeAtom, +} from "./baseAtoms"; +import { + accumulateFluidZonesAtom, + ensembleIdentsWithRealizationsAtom, + fluidZonesAtom, + groupByIdentifiersAtom, + identifiersValuesAtom, + tableNamesAtom, +} from "./derivedAtoms"; + +export const perRealizationTableDataResultsAtom = atomWithQueries((get) => { + const resultNames = get(resultNamesAtom); + const tableType = get(tableTypeAtom); + + const accumulateFluidZones = get(accumulateFluidZonesAtom); + const groupByIdentifiers = get(groupByIdentifiersAtom); + const tableNames = get(tableNamesAtom); + const fluidZones = get(fluidZonesAtom); + const identifiersValues = get(identifiersValuesAtom); + const ensembleIdentsWithRealizations = get(ensembleIdentsWithRealizationsAtom); + const areSelectedTablesComparable = get(areSelectedTablesComparableAtom); + const areTableDefinitionSelectionsValid = get(areTableDefinitionSelectionsValidAtom); + + const enableQueries = + tableType === TableType.PER_REALIZATION && areSelectedTablesComparable && areTableDefinitionSelectionsValid; + + return useGetAggregatedPerRealizationTableDataQueries( + ensembleIdentsWithRealizations, + tableNames, + resultNames, + fluidZones, + groupByIdentifiers, + accumulateFluidZones, + identifiersValues, + enableQueries + ); +}); + +export const statisticalTableDataResultsAtom = atomWithQueries((get) => { + const resultNames = get(resultNamesAtom); + const tableType = get(tableTypeAtom); + + const accumulateFluidZones = get(accumulateFluidZonesAtom); + const groupByIdentifiers = get(groupByIdentifiersAtom); + const tableNames = get(tableNamesAtom); + const fluidZones = get(fluidZonesAtom); + const identifiersValues = get(identifiersValuesAtom); + const ensembleIdentsWithRealizations = get(ensembleIdentsWithRealizationsAtom); + const areSelectedTablesComparable = get(areSelectedTablesComparableAtom); + const areTableDefinitionSelectionsValid = get(areTableDefinitionSelectionsValidAtom); + + const enableQueries = + tableType === TableType.STATISTICAL && areSelectedTablesComparable && areTableDefinitionSelectionsValid; + + return useGetAggregatedStatisticalTableDataQueries( + ensembleIdentsWithRealizations, + tableNames, + resultNames, + fluidZones, + groupByIdentifiers, + accumulateFluidZones, + identifiersValues, + enableQueries + ); +}); diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/hooks/useMakeViewStatusWriterMessages.ts b/frontend/src/modules/InplaceVolumetricsTable/view/hooks/useMakeViewStatusWriterMessages.ts new file mode 100644 index 000000000..fd9438c5c --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/hooks/useMakeViewStatusWriterMessages.ts @@ -0,0 +1,74 @@ +import { InplaceStatisticalVolumetricTableData_api, InplaceVolumetricTableData_api } from "@api"; +import { ViewStatusWriter } from "@framework/StatusWriter"; +import { ApiErrorHelper } from "@framework/utils/ApiErrorHelper"; + +import { useAtomValue } from "jotai"; + +import { resultNamesAtom } from "../atoms/baseAtoms"; +import { activeQueriesResultAtom, identifiersValuesAtom } from "../atoms/derivedAtoms"; + +// Type guard for InplaceVolumetricTableData +function isInplaceVolumetricTableData( + obj: InplaceVolumetricTableData_api | InplaceStatisticalVolumetricTableData_api +): obj is InplaceVolumetricTableData_api { + return obj && typeof obj === "object" && "resultColumns" in obj; +} + +// Type guard for InplaceStatisticalVolumetricTableData +function isInplaceStatisticalVolumetricTableData( + obj: InplaceVolumetricTableData_api | InplaceStatisticalVolumetricTableData_api +): obj is InplaceStatisticalVolumetricTableData_api { + return obj && typeof obj === "object" && "resultColumnStatistics" in obj; +} + +export function useMakeViewStatusWriterMessages(statusWriter: ViewStatusWriter) { + const activeQueriesResult = useAtomValue(activeQueriesResultAtom); + const identifiersValues = useAtomValue(identifiersValuesAtom); + const resultNames = useAtomValue(resultNamesAtom); + + const errors = activeQueriesResult.errors; + + for (const error of errors) { + const helper = ApiErrorHelper.fromError(error); + if (helper) { + statusWriter.addError(helper.makeStatusMessage()); + } + } + + for (const elm of identifiersValues) { + if (elm.values.length === 0) { + statusWriter.addWarning(`Select at least one filter value for ${elm.identifier.valueOf()}`); + } + } + + // Due to no throw in back-end for missing/non-existing result for specific tables, we should compare + // the retrieved result columns with the requested columns + for (const tableData of activeQueriesResult.tablesData) { + // Per unique volumetric table (EnsembleIdent, tableName) we have a query result + const queryData = tableData.data; + + // Result columns across all fluid selections + const tableResultColumnsUnion = new Set(); + for (const fluidSelectionTable of queryData.tableDataPerFluidSelection) { + if (isInplaceVolumetricTableData(fluidSelectionTable)) { + fluidSelectionTable.resultColumns.forEach((col) => tableResultColumnsUnion.add(col.columnName)); + } + if (isInplaceStatisticalVolumetricTableData(fluidSelectionTable)) { + fluidSelectionTable.resultColumnStatistics.forEach((col) => + tableResultColumnsUnion.add(col.columnName) + ); + } + } + + // Find result name missing in the result columns + const missingResultNames = resultNames.filter((result) => !tableResultColumnsUnion.has(result)); + + // List missing result names for specific table. Note fluid selection is not considered here, as + // the result columns will be visible in the table component if they are present in any of the fluid selections + if (missingResultNames.length > 0) { + statusWriter.addWarning( + `Missing result names for Table "${tableData.tableName}": ${missingResultNames.join(", ")}` + ); + } + } +} diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/hooks/useTableBuilder.ts b/frontend/src/modules/InplaceVolumetricsTable/view/hooks/useTableBuilder.ts new file mode 100644 index 000000000..de19f2b68 --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/hooks/useTableBuilder.ts @@ -0,0 +1,46 @@ +import { EnsembleSet } from "@framework/EnsembleSet"; +import { TableHeading, TableRow } from "@lib/components/Table/table"; +import { TableType } from "@modules/_shared/InplaceVolumetrics/types"; + +import { useAtomValue } from "jotai"; + +import { statisticOptionsAtom, tableTypeAtom } from "../atoms/baseAtoms"; +import { perRealizationTableDataResultsAtom, statisticalTableDataResultsAtom } from "../atoms/queryAtoms"; +import { + createStatisticalTableHeadingsAndRowsFromTablesData, + createTableHeadingsAndRowsFromTablesData, +} from "../utils/tableComponentUtils"; + +export function useTableBuilder(ensembleSet: EnsembleSet): { headings: TableHeading; tableRows: TableRow[] } { + let headings: TableHeading = {}; + let tableRows: TableRow[] = []; + + const tableType = useAtomValue(tableTypeAtom); + const statisticOptions = useAtomValue(statisticOptionsAtom); + const perRealizationTableDataResults = useAtomValue(perRealizationTableDataResultsAtom); + const statisticalTableDataResults = useAtomValue(statisticalTableDataResultsAtom); + + if (tableType === TableType.PER_REALIZATION) { + const tableHeadingsAndRows = createTableHeadingsAndRowsFromTablesData( + perRealizationTableDataResults.tablesData, + ensembleSet + ); + headings = tableHeadingsAndRows.headings; + tableRows = tableHeadingsAndRows.rows; + + return { headings, tableRows }; + } else if (tableType === TableType.STATISTICAL) { + const tableHeadingsAndRows = createStatisticalTableHeadingsAndRowsFromTablesData( + statisticalTableDataResults.tablesData, + statisticOptions, + ensembleSet + ); + + headings = tableHeadingsAndRows.headings; + tableRows = tableHeadingsAndRows.rows; + + return { headings, tableRows }; + } + + throw new Error("Not able to build table - Table type not supported"); +} diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/utils/tableComponentUtils.ts b/frontend/src/modules/InplaceVolumetricsTable/view/utils/tableComponentUtils.ts new file mode 100644 index 000000000..583afeb51 --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/utils/tableComponentUtils.ts @@ -0,0 +1,224 @@ +import { FluidZone_api, InplaceVolumetricStatistic_api } from "@api"; +import { EnsembleIdent } from "@framework/EnsembleIdent"; +import { EnsembleSet } from "@framework/EnsembleSet"; +import { TableHeading, TableRow } from "@lib/components/Table/table"; +import { Column, ColumnType, Row, Table } from "@modules/_shared/InplaceVolumetrics/Table"; +import { sortResultNameStrings } from "@modules/_shared/InplaceVolumetrics/sortResultNames"; +import { + makeStatisticalTableColumnDataFromApiData, + makeTableFromApiData, +} from "@modules/_shared/InplaceVolumetrics/tableUtils"; +import { + InplaceVolumetricsStatisticalTableData, + InplaceVolumetricsTableData, +} from "@modules/_shared/InplaceVolumetrics/types"; +import { createHoverTextForVolume } from "@modules/_shared/InplaceVolumetrics/volumetricStringUtils"; +import { makeDistinguishableEnsembleDisplayName } from "@modules/_shared/ensembleNameUtils"; +import { createScaledNumberWithSuffix } from "@modules/_shared/utils/numberSuffixFormatting"; + +export function createTableHeadingsAndRowsFromTablesData( + tablesData: InplaceVolumetricsTableData[], + ensembleSet: EnsembleSet +): { + headings: TableHeading; + rows: TableRow[]; +} { + const tableHeadings: TableHeading = {}; + const tableRows: TableRow[] = []; + + const dataTable = makeTableFromApiData(tablesData); + for (const column of dataTable.getColumns()) { + tableHeadings[column.getName()] = { + label: column.getName(), + hoverText: createHoverTextForVolume(column.getName()), + sizeInPercent: 100 / dataTable.getNumColumns(), + formatValue: makeValueFormattingFunc(column, ensembleSet), + formatStyle: makeStyleFormattingFunc(column), + }; + } + + for (const row of dataTable.getRows()) { + tableRows.push(row); + } + + return { headings: tableHeadings, rows: tableRows }; +} + +export function createStatisticalTableHeadingsAndRowsFromTablesData( + tablesData: InplaceVolumetricsStatisticalTableData[], + statisticOptions: InplaceVolumetricStatistic_api[], + ensembleSet: EnsembleSet +): { + headings: TableHeading; + rows: TableRow[]; +} { + const tableHeadings: TableHeading = {}; + const tableRows: TableRow[] = []; + + const columnData = makeStatisticalTableColumnDataFromApiData(tablesData, statisticOptions); + + const nonStatisticalColumns = columnData.nonStatisticalColumns; + const resultStatisticalColumns = columnData.resultStatisticalColumns; + + const numNonStatisticalColumns = nonStatisticalColumns.length; + const numStatisticalResultColumns = resultStatisticalColumns.size; + const numStatisticOptions = statisticOptions.length; + + // Give non-statistical columns a total width of 40% + const nonStatisticalColumnSizePercentage = 40; + const statisticalColumnSizePercentage = 100 - nonStatisticalColumnSizePercentage; + + // Headings for non-statistical columns + for (const column of nonStatisticalColumns) { + tableHeadings[column.getName()] = { + label: column.getName(), + sizeInPercent: nonStatisticalColumnSizePercentage / numNonStatisticalColumns, + formatValue: makeValueFormattingFunc(column, ensembleSet), + formatStyle: makeStyleFormattingFunc(column), + }; + } + + // Initialize rows using non-statistical columns + const rows: Row[] = []; + const nonStatisticalColumnsTable = new Table(nonStatisticalColumns); + for (const row of nonStatisticalColumnsTable.getRows()) { + rows.push(row); + } + + const numberOfRows = rows.length; + + // Headings and row data for result statistical columns + const sortedResultNames = sortResultNameStrings(Array.from(resultStatisticalColumns.keys())); + for (const resultName of sortedResultNames) { + const statisticalColumns = resultStatisticalColumns.get(resultName); + if (!statisticalColumns) { + throw new Error(`Statistical columns for result ${resultName} not found.`); + } + + // Create table object for easier access to columns and rows + const resultStatisticalTable = new Table(Object.values(statisticalColumns)); + + const resultHoverText = createHoverTextForVolume(resultName); + + const subHeading: TableHeading = {}; + resultStatisticalTable.getColumns().forEach((column) => { + const columnSize = 100 / numStatisticOptions; // Size relative to parent heading (i.e. resultName) + const columnId = `${resultName}-${column.getName()}`; + subHeading[columnId] = { + label: column.getName(), + hoverText: `${column.getName()} - ${resultHoverText}`, + sizeInPercent: columnSize, + formatValue: makeValueFormattingFunc(column, ensembleSet), + formatStyle: makeStyleFormattingFunc(column), + }; + }); + + tableHeadings[resultName] = { + label: resultName, + hoverText: resultHoverText, + sizeInPercent: statisticalColumnSizePercentage / numStatisticalResultColumns, + subHeading: subHeading, + }; + + if (numberOfRows !== resultStatisticalTable.getNumRows()) { + throw new Error( + "Number of rows in statistical table does not match the number of rows in the non-statistical table." + ); + } + + for (let i = 0; i < numberOfRows; i++) { + const statisticalRow = resultStatisticalTable.getRow(i); + + // Add resultName as prefix to column names + for (const column of resultStatisticalTable.getColumns()) { + const columnId = `${resultName}-${column.getName()}`; + rows[i][columnId] = statisticalRow[column.getName()]; + } + } + } + + // Add rows to tableRows + for (const row of rows) { + tableRows.push(row); + } + + return { headings: tableHeadings, rows: tableRows }; +} + +function makeStyleFormattingFunc(column: Column): ((value: number | string | null) => React.CSSProperties) | undefined { + if (column.getType() === ColumnType.FLUID_ZONE) { + return (value: number | string | null) => { + const style: React.CSSProperties = { textAlign: "right", fontWeight: "bold" }; + + if (value === FluidZone_api.OIL) { + style.color = "#0b8511"; + } + if (value === FluidZone_api.WATER) { + style.color = "#0c24ab"; + } + if (value === FluidZone_api.GAS) { + style.color = "#ab110c"; + } + + return style; + }; + } + + if (column.getType() === ColumnType.ENSEMBLE) { + return undefined; + } + + return () => ({ textAlign: "right" }); +} + +function makeValueFormattingFunc( + column: Column, + ensembleSet: EnsembleSet +): ((value: number | string | null) => string) | undefined { + if (column.getType() === ColumnType.ENSEMBLE) { + return (value: number | string | null) => formatEnsembleIdent(value, ensembleSet); + } + if (column.getType() === ColumnType.RESULT) { + return formatResultValue; + } + + return undefined; +} + +function formatEnsembleIdent(value: string | number | null, ensembleSet: EnsembleSet): string { + if (value === null) { + return "-"; + } + const ensemble = ensembleSet.findEnsembleByIdentString(value.toString()); + if (ensemble) { + return makeDistinguishableEnsembleDisplayName( + EnsembleIdent.fromString(value.toString()), + ensembleSet.getEnsembleArr() + ); + } + return value.toString(); +} + +function formatResultValue(value: string | number | null): string { + // If properties cannot be calculated, + // e.g. due to a 0 denominator, the value returned from backend will be null + if (value === null) { + return "-"; + } + + if (typeof value === "string") { + return value; + } + + const { scaledValue, suffix } = createScaledNumberWithSuffix(value); + + // Determine the number of decimal places based on the value's magnitude + let decimalPlaces = 2; + if (Math.abs(scaledValue) < 0.01) { + decimalPlaces = 4; + } else if (Math.abs(scaledValue) < 0.1) { + decimalPlaces = 3; + } + + return `${scaledValue.toFixed(decimalPlaces)} ${suffix}`; +} diff --git a/frontend/src/modules/InplaceVolumetricsTable/view/view.tsx b/frontend/src/modules/InplaceVolumetricsTable/view/view.tsx new file mode 100644 index 000000000..de71c7096 --- /dev/null +++ b/frontend/src/modules/InplaceVolumetricsTable/view/view.tsx @@ -0,0 +1,99 @@ +import React from "react"; + +import { InplaceVolumetricsIdentifier_api } from "@api"; +import { ModuleViewProps } from "@framework/Module"; +import { useViewStatusWriter } from "@framework/StatusWriter"; +import { useEnsembleSet } from "@framework/WorkbenchSession"; +import { PendingWrapper } from "@lib/components/PendingWrapper"; +import { Table as TableComponent } from "@lib/components/Table"; +import { TableHeading, TableRow } from "@lib/components/Table/table"; +import { useElementBoundingRect } from "@lib/hooks/useElementBoundingRect"; + +import { useAtomValue } from "jotai"; + +import { areSelectedTablesComparableAtom } from "./atoms/baseAtoms"; +import { hasAllQueriesFailedAtom as haveAllQueriesFailedAtom, isQueryFetchingAtom } from "./atoms/derivedAtoms"; +import { useMakeViewStatusWriterMessages } from "./hooks/useMakeViewStatusWriterMessages"; +import { useTableBuilder } from "./hooks/useTableBuilder"; + +import { Interfaces } from "../interfaces"; + +export function View(props: ModuleViewProps): React.ReactNode { + const ensembleSet = useEnsembleSet(props.workbenchSession); + const statusWriter = useViewStatusWriter(props.viewContext); + + const divRef = React.useRef(null); + const divBoundingRect = useElementBoundingRect(divRef); + + const haveAllQueriesFailed = useAtomValue(haveAllQueriesFailedAtom); + const isQueryFetching = useAtomValue(isQueryFetchingAtom); + const areSelectedTablesComparable = useAtomValue(areSelectedTablesComparableAtom); + + useMakeViewStatusWriterMessages(statusWriter); + statusWriter.setLoading(isQueryFetching); + + // Build table headings and rows + const { headings, tableRows } = useTableBuilder(ensembleSet); + + const handleTableHover = React.useCallback( + function handleTableHover(row: TableRow | null) { + if (!row) { + props.workbenchServices.publishGlobalData("global.hoverRegion", null); + props.workbenchServices.publishGlobalData("global.hoverZone", null); + props.workbenchServices.publishGlobalData("global.hoverFacies", null); + return; + } + if (Object.keys(row).includes(InplaceVolumetricsIdentifier_api.REGION)) { + const regionName = row[InplaceVolumetricsIdentifier_api.REGION]?.toString(); + if (regionName) { + props.workbenchServices.publishGlobalData("global.hoverRegion", { regionName }); + } + } + + if (Object.keys(row).includes(InplaceVolumetricsIdentifier_api.ZONE)) { + const zoneName = row[InplaceVolumetricsIdentifier_api.ZONE]?.toString(); + if (zoneName) { + props.workbenchServices.publishGlobalData("global.hoverZone", { zoneName }); + } + } + + if (Object.keys(row).includes(InplaceVolumetricsIdentifier_api.FACIES)) { + const faciesName = row[InplaceVolumetricsIdentifier_api.FACIES]?.toString(); + if (faciesName) { + props.workbenchServices.publishGlobalData("global.hoverFacies", { faciesName }); + } + } + }, + [props.workbenchServices] + ); + + function createErrorMessage(): string | null { + if (haveAllQueriesFailed) { + return "Failed to load volumetric table data"; + } + if (!areSelectedTablesComparable) { + return "Selected volumetric tables are not comparable"; + } + + return null; + } + + // If a user selects a single table first and initiates a fetch but then selects a set of tables that are not comparable, + // we don't want to show that the module is pending, but rather immediately show the error message that the tables are not comparable. + // The query is still fetching, but we don't want to show the pending state. + const isPending = isQueryFetching && areSelectedTablesComparable; + + return ( +
+ + + +
+ ); +} diff --git a/frontend/src/modules/MyModule2/view.tsx b/frontend/src/modules/MyModule2/view.tsx index 24145ea06..bed62e741 100644 --- a/frontend/src/modules/MyModule2/view.tsx +++ b/frontend/src/modules/MyModule2/view.tsx @@ -1,20 +1,43 @@ -import { ModuleViewProps } from "@framework/Module"; -import { Label } from "@lib/components/Label"; +import { Table, TableHeading, TableRow } from "@lib/components/Table/table"; -import { Interfaces } from "./interfaces"; +export const View = () => { + const heading: TableHeading = { + col1: { + label: "Column 1", + sizeInPercent: 60, + subHeading: { + "col1.1": { + label: "Column 1.1", + sizeInPercent: 20, + }, + "col1.2": { + label: "Column 1.2", + sizeInPercent: 80, + }, + }, + }, + col2: { + label: "Column 2", + sizeInPercent: 40, + }, + }; -export const View = (props: ModuleViewProps) => { - const text = props.viewContext.useSettingsToViewInterfaceValue("text"); - const derivedText = props.viewContext.useSettingsToViewInterfaceValue("derivedText"); + const data: TableRow[] = [ + { + "col1.1": "Row 1, Column 1.1", + "col1.2": "Row 1, Column 1.2", + col2: "Row 1, Column 2", + }, + { + "col1.1": "Row 2, Column 1.1", + "col1.2": "Row 2, Column 1.2", + col2: "Row 2, Column 2", + }, + ]; return ( -
- - +
+ ); }; diff --git a/frontend/src/modules/SimulationTimeSeries/view/atoms/derivedAtoms.ts b/frontend/src/modules/SimulationTimeSeries/view/atoms/derivedAtoms.ts index 6290fd739..a74605348 100644 --- a/frontend/src/modules/SimulationTimeSeries/view/atoms/derivedAtoms.ts +++ b/frontend/src/modules/SimulationTimeSeries/view/atoms/derivedAtoms.ts @@ -1,5 +1,3 @@ -import { EnsembleIdent } from "@framework/EnsembleIdent"; -import { EnsembleRealizationFilterFunctionAtom, EnsembleSetAtom } from "@framework/GlobalAtoms"; import { VisualizationMode } from "@modules/SimulationTimeSeries/typesAndEnums"; import { atom } from "jotai"; @@ -21,19 +19,6 @@ import { import { createLoadedVectorSpecificationAndDataArray } from "../utils/vectorSpecificationsAndQueriesUtils"; -export const validEnsembleRealizationsFunctionAtom = atom((get) => { - const ensembleSet = get(EnsembleSetAtom); - let validEnsembleRealizationsFunction = get(EnsembleRealizationFilterFunctionAtom); - - if (validEnsembleRealizationsFunction === null) { - validEnsembleRealizationsFunction = (ensembleIdent: EnsembleIdent) => { - return ensembleSet.findEnsemble(ensembleIdent)?.getRealizations() ?? []; - }; - } - - return validEnsembleRealizationsFunction; -}); - export const queryIsFetchingAtom = atom((get) => { const vectorDataQueries = get(vectorDataQueriesAtom); const vectorStatisticsQueries = get(vectorStatisticsQueriesAtom); diff --git a/frontend/src/modules/SimulationTimeSeries/view/atoms/queryAtoms.ts b/frontend/src/modules/SimulationTimeSeries/view/atoms/queryAtoms.ts index 13b4ebe29..bb57272dd 100644 --- a/frontend/src/modules/SimulationTimeSeries/view/atoms/queryAtoms.ts +++ b/frontend/src/modules/SimulationTimeSeries/view/atoms/queryAtoms.ts @@ -1,5 +1,6 @@ import { Frequency_api, Observations_api } from "@api"; import { apiService } from "@framework/ApiService"; +import { ValidEnsembleRealizationsFunctionAtom } from "@framework/GlobalAtoms"; import { atomWithQueries } from "@framework/utils/atomUtils"; import { EnsembleVectorObservationDataMap, VisualizationMode } from "@modules/SimulationTimeSeries/typesAndEnums"; import { QueryObserverResult } from "@tanstack/react-query"; @@ -10,7 +11,6 @@ import { vectorSpecificationsAtom, visualizationModeAtom, } from "./baseAtoms"; -import { validEnsembleRealizationsFunctionAtom } from "./derivedAtoms"; const STALE_TIME = 60 * 1000; const CACHE_TIME = 60 * 1000; @@ -19,7 +19,7 @@ export const vectorDataQueriesAtom = atomWithQueries((get) => { const vectorSpecifications = get(vectorSpecificationsAtom); const resampleFrequency = get(resampleFrequencyAtom); const visualizationMode = get(visualizationModeAtom); - const validEnsembleRealizationsFunction = get(validEnsembleRealizationsFunctionAtom); + const validEnsembleRealizationsFunction = get(ValidEnsembleRealizationsFunctionAtom); const enabled = visualizationMode === VisualizationMode.INDIVIDUAL_REALIZATIONS || @@ -64,7 +64,7 @@ export const vectorStatisticsQueriesAtom = atomWithQueries((get) => { const vectorSpecifications = get(vectorSpecificationsAtom); const resampleFrequency = get(resampleFrequencyAtom); const visualizationMode = get(visualizationModeAtom); - const validEnsembleRealizationsFunction = get(validEnsembleRealizationsFunctionAtom); + const validEnsembleRealizationsFunction = get(ValidEnsembleRealizationsFunctionAtom); const enabled = visualizationMode === VisualizationMode.STATISTICAL_FANCHART || diff --git a/frontend/src/modules/SubsurfaceMap/view.tsx b/frontend/src/modules/SubsurfaceMap/view.tsx index 91ac5ec54..8e88671cc 100644 --- a/frontend/src/modules/SubsurfaceMap/view.tsx +++ b/frontend/src/modules/SubsurfaceMap/view.tsx @@ -239,6 +239,7 @@ export function View({ bounds={viewportBounds} layers={newLayers} colorTables={colorTables} + getTooltip={undefined} views={{ layout: [1, 1], showLabel: false, diff --git a/frontend/src/modules/_shared/Figure.tsx b/frontend/src/modules/_shared/Figure.tsx index e7bdbf6ec..edec46cd9 100644 --- a/frontend/src/modules/_shared/Figure.tsx +++ b/frontend/src/modules/_shared/Figure.tsx @@ -1,7 +1,7 @@ import Plot from "react-plotly.js"; import { merge } from "lodash"; -import { Annotations, Layout, PlotData } from "plotly.js"; +import { Annotations, Layout, PlotData, Shape, XAxisName, YAxisName } from "plotly.js"; export class Figure { private _plotData: Partial[]; @@ -22,7 +22,7 @@ export class Figure { this._gridAxesMapping = gridAxesMapping ?? [[1, 1]]; } - private getAxisIndex(row: number, column: number): number { + getAxisIndex(row: number, column: number): number { if (row > this._gridAxesMapping.length || column > this._gridAxesMapping[row - 1].length) { throw new Error(`Invalid row/column index: ${row}/${column}`); } @@ -53,6 +53,64 @@ export class Figure { this._plotData.push(adjustedTrace); } + addAnnotation(annotation: Partial, row?: number, column?: number): void { + if (row === undefined) { + row = 1; + } + if (column === undefined) { + column = 1; + } + + const axisIndex = this.getAxisIndex(row, column); + + const adjustedAnnotation: Partial = { + ...annotation, + xref: `x${axisIndex}` as XAxisName, + yref: `y${axisIndex}` as YAxisName, + }; + + if (!this._plotLayout.annotations) { + this._plotLayout.annotations = []; + } + + this._plotLayout.annotations.push(adjustedAnnotation); + } + + addShape(shape: Partial, row?: number, column?: number): void { + if (row === undefined) { + row = 1; + } + if (column === undefined) { + column = 1; + } + + const axisIndex = this.getAxisIndex(row, column); + + const adjustedShape: Partial = { + ...shape, + xref: `x${axisIndex} domain` as XAxisName, + yref: `y${axisIndex} domain` as YAxisName, + }; + + if (!this._plotLayout.shapes) { + this._plotLayout.shapes = []; + } + + this._plotLayout.shapes.push(adjustedShape); + } + + getLayout(): Partial { + return this._plotLayout; + } + + getNumRows(): number { + return this._gridAxesMapping.length; + } + + getNumColumns(): number { + return this._gridAxesMapping[0].length; + } + updateLayout(patch: Partial): void { merge(this._plotLayout, patch); } @@ -228,7 +286,7 @@ export function makeSubplots(options: MakeSubplotOptions): Figure { xref: "paper", yref: "paper", x: xDomainStart + (xDomainEnd - xDomainStart) / 2, - y: yDomainEnd + 0.02, + y: yDomainEnd + (options.height ? 20 / options.height : 0.02), text: title, showarrow: false, font: { diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/PlotBuilder.tsx b/frontend/src/modules/_shared/InplaceVolumetrics/PlotBuilder.tsx new file mode 100644 index 000000000..08c94ddf0 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/PlotBuilder.tsx @@ -0,0 +1,212 @@ +import React from "react"; + +import { PlotData } from "plotly.js"; +import { Axis } from "plotly.js"; + +import { Table } from "./Table"; + +import { Figure, MakeSubplotOptions, makeSubplots } from "../Figure"; + +export class PlotBuilder { + private _table: Table; + private _plotFunction: (table: Table) => Partial[]; + private _formatLabelFunction: (columnName: string, label: string | number) => string = (_, value) => + value.toString(); + private _groupByColumn: string | null = null; + private _subplotByColumn: string | null = null; + private _axesOptions: { x: Partial | null; y: Partial | null } = { x: null, y: null }; + private _highlightedSubPlotNames: string[] = []; + + constructor(table: Table, plotFunction: (table: Table) => Partial[]) { + this._table = table; + this._plotFunction = plotFunction; + } + + setGroupByColumn(columnName: string): void { + if (!this._table.getColumn(columnName)) { + throw new Error(`Column not found: ${columnName}`); + } + this._groupByColumn = columnName; + } + + setSubplotByColumn(columnName: string): void { + if (!this._table.getColumn(columnName)) { + throw new Error(`Column not found: ${columnName}`); + } + this._subplotByColumn = columnName; + } + + setXAxisOptions(options: Partial): void { + this._axesOptions.x = options; + } + + setYAxisOptions(options: Partial): void { + this._axesOptions.y = options; + } + + setFormatLabelFunction(func: (columnName: string, label: string | number) => string): void { + this._formatLabelFunction = func; + } + + setHighlightedSubPlots(subPlotNames: string[]): void { + this._highlightedSubPlotNames = subPlotNames; + } + + private calcNumRowsAndCols(numTables: number): { numRows: number; numCols: number } { + if (numTables < 1) { + return { numRows: 1, numCols: 1 }; + } + + const numRows = Math.ceil(Math.sqrt(numTables)); + const numCols = Math.ceil(numTables / numRows); + return { numRows, numCols }; + } + + private updateLayout(figure: Figure) { + const numRows = figure.getNumRows(); + const numCols = figure.getNumColumns(); + + for (let row = 1; row <= numRows; row++) { + for (let col = 1; col <= numCols; col++) { + const axisIndex = figure.getAxisIndex(row, col); + const yAxisKey = `yaxis${axisIndex}`; + const xAxisKey = `xaxis${axisIndex}`; + + const oldLayout = figure.getLayout(); + + figure.updateLayout({ + // @ts-expect-error - Ignore string type of xAxisKey for oldLayout[xAxisKey] + [xAxisKey]: { ...oldLayout[xAxisKey], ...this._axesOptions.x }, + // @ts-expect-error - Ignore string type of yAxisKey for oldLayout[yAxisKey] + [yAxisKey]: { ...oldLayout[yAxisKey], ...this._axesOptions.y }, + }); + } + } + } + + build( + height: number, + width: number, + options?: Pick< + MakeSubplotOptions, + "horizontalSpacing" | "verticalSpacing" | "showGrid" | "margin" | "sharedXAxes" | "sharedYAxes" + > + ): React.ReactNode { + if (!this._groupByColumn) { + const figure = this.buildSubplots(this._table, height, width, options ?? {}); + this.updateLayout(figure); + return figure.makePlot(); + } + + const components: React.ReactNode[] = []; + const tableCollection = this._table.splitByColumn(this._groupByColumn); + const numTables = tableCollection.getNumTables(); + const collectionMap = tableCollection.getCollectionMap(); + + for (const [key, table] of collectionMap) { + const figure = this.buildSubplots(table, height / numTables, width, options ?? {}); + this.updateLayout(figure); + const label = this._formatLabelFunction(tableCollection.getCollectedBy(), key); + components.push(

{label}

); + components.push(figure.makePlot()); + } + + return <>{components}; + } + + private buildSubplots( + table: Table, + height: number, + width: number, + options: Pick + ): Figure { + if (!this._subplotByColumn) { + const figure = makeSubplots({ + numRows: 1, + numCols: 1, + height, + width, + ...options, + }); + + const traces = this._plotFunction(table); + for (const trace of traces) { + figure.addTrace(trace); + } + return figure; + } + + const keepColumn = true; + const tableCollection = table.splitByColumn(this._subplotByColumn, keepColumn); + const numTables = tableCollection.getNumTables(); + const { numRows, numCols } = this.calcNumRowsAndCols(numTables); + + const tables = tableCollection.getTables(); + const keys = tableCollection.getKeys(); + + const traces: { row: number; col: number; trace: Partial }[] = []; + const subplotTitles: string[] = Array(numRows * numCols).fill(""); + + const highlightedSubplots: { row: number; col: number }[] = []; + + let legendAdded = false; + for (let row = 1; row <= numRows; row++) { + for (let col = 1; col <= numCols; col++) { + const index = (numRows - 1 - (row - 1)) * numCols + (col - 1); + if (!keys[index]) { + continue; + } + const label = this._formatLabelFunction(tableCollection.getCollectedBy(), keys[index]); + subplotTitles[(row - 1) * numCols + col - 1] = label; + + if (this._highlightedSubPlotNames.includes(keys[index].toString())) { + highlightedSubplots.push({ row, col }); + } + + const table = tables[index]; + + const plotDataArr = this._plotFunction(table); + for (const plotData of plotDataArr) { + if (legendAdded) { + plotData.showlegend = false; + } + traces.push({ row, col, trace: plotData }); + } + legendAdded = true; + } + } + + const figure = makeSubplots({ + numRows, + numCols, + height, + width, + subplotTitles, + ...options, + }); + + for (const { row, col, trace } of traces) { + figure.addTrace(trace, row, col); + } + + for (const { row, col } of highlightedSubplots) { + figure.addShape( + { + type: "rect", + line: { + color: "blue", + width: 1, + }, + x0: 0, + x1: 1, + y0: 0, + y1: 1, + }, + row, + col + ); + } + + return figure; + } +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/Table.ts b/frontend/src/modules/_shared/InplaceVolumetrics/Table.ts new file mode 100644 index 000000000..59fb53489 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/Table.ts @@ -0,0 +1,195 @@ +import { EnsembleIdent } from "@framework/EnsembleIdent"; + +import { TableCollection } from "./TableCollection"; + +export enum ColumnType { + ENSEMBLE = "ensemble", + TABLE = "table", + FLUID_ZONE = "fluidZone", + REAL = "real", + IDENTIFIER = "identifier", + RESULT = "result", +} + +export class Column { + private _name: string; + private _type: ColumnType; + private _uniqueValues: TValue[] = []; + private _indices: number[] = []; + + constructor(name: string, type: ColumnType); + constructor(name: string, type: ColumnType, uniqueValues: TValue[], indices: number[]); + constructor(name: string, type: ColumnType, uniqueValues: TValue[] = [], indices: number[] = []) { + this._name = name; + this._type = type; + this._uniqueValues = uniqueValues; + this._indices = indices; + } + + getName(): string { + return this._name; + } + + getType(): ColumnType { + return this._type; + } + + getUniqueValues(): TValue[] { + return this._uniqueValues; + } + + getRowsWhere(predicate: (value: TValue) => boolean): { index: number; value: TValue }[] { + const rows: { index: number; value: TValue }[] = []; + for (let i = 0; i < this._indices.length; i++) { + const value = this._uniqueValues[this._indices[i]]; + if (predicate(value)) { + rows.push({ index: i, value }); + } + } + return rows; + } + + getNumRows(): number { + return this._indices.length; + } + + addRowValue(value: TValue): void { + const index = this._uniqueValues.indexOf(value); + if (index === -1) { + this._uniqueValues.push(value); + this._indices.push(this._uniqueValues.length - 1); + return; + } + this._indices.push(index); + } + + addRowValues(values: TValue[]): void { + for (const value of values) { + this.addRowValue(value); + } + } + + getRowValue(rowIndex: number): TValue { + if (rowIndex < 0 || rowIndex >= this._indices.length) { + throw new Error(`Invalid index: ${rowIndex}`); + } + + return this._uniqueValues[this._indices[rowIndex]]; + } + + getAllRowValues(): TValue[] { + return this._indices.map((i) => this._uniqueValues[i]); + } + + cloneEmpty(): Column { + return new Column(this._name, this._type); + } + + reduce(reduceFunc: (acc: TAcc, value: TValue) => TAcc, initialValue: TAcc): TAcc { + return this.getAllRowValues().reduce(reduceFunc, initialValue); + } +} + +export interface Row { + [columnName: string]: string | number; +} + +export class Table { + private _columns: Column[]; + + constructor(columns: Column[]) { + this._columns = columns; + this.assertColumnLengthsMatch(); + } + + private assertColumnLengthsMatch(): void { + const numRows = this._columns[0].getNumRows(); + for (const column of this._columns) { + if (column.getNumRows() !== numRows) { + throw new Error("Column lengths do not match"); + } + } + } + + getNumColumns(): number { + return this._columns.length; + } + + getNumRows(): number { + return this._columns[0].getNumRows(); + } + + getColumns(): Column[] { + return this._columns; + } + + getColumn(columnName: string): Column | undefined { + return this._columns.find((c) => c.getName() === columnName); + } + + getRows(): Row[] { + const rows: Row[] = []; + for (let i = 0; i < this.getNumRows(); i++) { + rows.push(this.getRow(i)); + } + return rows; + } + + getRow(rowIndex: number): Row { + if (rowIndex < 0 || rowIndex >= this.getNumRows()) { + throw new Error(`Invalid row index: ${rowIndex}`); + } + + const row: Row = {}; + for (const column of this._columns) { + row[column.getName()] = column.getRowValue(rowIndex); + } + + return row; + } + + filterRowsByColumn(columnName: string, predicate: (value: string | number | EnsembleIdent) => boolean): Row[] { + const columnIndex = this._columns.findIndex((column) => column.getName() === columnName); + + if (columnIndex === -1) { + throw new Error(`Column not found: ${columnName}`); + } + + const column = this._columns[columnIndex]; + const rows = column.getRowsWhere(predicate); + + return rows.map((row) => this.getRow(row.index)); + } + + splitByColumn(columnName: string, keepColumn: boolean = false): TableCollection { + const columnIndex = this._columns.findIndex((column) => column.getName() === columnName); + + if (columnIndex === -1) { + throw new Error(`Column not found: ${columnName}`); + } + + const column = this._columns[columnIndex]; + const uniqueValues = column.getUniqueValues(); + const numCols = this.getNumColumns(); + + const tables: Table[] = []; + for (const value of uniqueValues) { + const rows = this.filterRowsByColumn(columnName, (v) => v === value); + const columns: Column[] = []; + for (let i = 0; i < numCols; i++) { + if (i === columnIndex && !keepColumn) { + continue; + } + + const newColumn = this._columns[i].cloneEmpty(); + for (const row of rows) { + newColumn.addRowValue(row[newColumn.getName()]); + } + columns.push(newColumn); + } + tables.push(new Table(columns)); + } + + return new TableCollection(columnName, uniqueValues, tables); + } +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/TableCollection.ts b/frontend/src/modules/_shared/InplaceVolumetrics/TableCollection.ts new file mode 100644 index 000000000..40ace2155 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/TableCollection.ts @@ -0,0 +1,49 @@ +import { Table } from "./Table"; + +export class TableCollection { + private _collectedBy: string; + private _collection: Map; + + constructor(collectedBy: string, values: (string | number)[], tables: Table[]) { + this._collectedBy = collectedBy; + this._collection = new Map(); + + if (values.length !== tables.length) { + throw new Error("Values and tables length do not match"); + } + + for (let i = 0; i < values.length; i++) { + this._collection.set(values[i], tables[i]); + } + } + + getCollectedBy(): string { + return this._collectedBy; + } + + getCollectionMap(): Map { + return this._collection; + } + + getNumTables(): number { + return this._collection.size; + } + + getKeys(): (string | number)[] { + return Array.from(this._collection.keys()); + } + + getTables(): Table[] { + return Array.from(this._collection.values()); + } + + getTable(key: string | number): Table { + const item = this._collection.get(key); + + if (!item) { + throw new Error(`Item not found for key: ${key}`); + } + + return item; + } +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/TableDefinitionsAccessor.ts b/frontend/src/modules/_shared/InplaceVolumetrics/TableDefinitionsAccessor.ts new file mode 100644 index 000000000..b5c9ee994 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/TableDefinitionsAccessor.ts @@ -0,0 +1,225 @@ +import { + FluidZone_api, + InplaceVolumetricResultName_api, + InplaceVolumetricsIdentifierWithValues_api, + InplaceVolumetricsTableDefinition_api, +} from "@api"; +import { EnsembleIdent } from "@framework/EnsembleIdent"; + +import { sortResultNames } from "./sortResultNames"; + +type TableDefinitionsForEnsembleIdent = { + ensembleIdent: EnsembleIdent; + tableDefinitions: InplaceVolumetricsTableDefinition_api[]; +}; + +export function makeUniqueTableNamesIntersection( + tableDefinitionsPerEnsembleIdent: TableDefinitionsForEnsembleIdent[] +): string[] { + if (tableDefinitionsPerEnsembleIdent.length === 0) { + return []; + } + + const tableNamesIntersection: Set = new Set(); + for (const [index, tableDefinition] of tableDefinitionsPerEnsembleIdent.entries()) { + if (index === 0) { + // Initialize intersection of table names + tableDefinition.tableDefinitions.forEach((el) => tableNamesIntersection.add(el.tableName)); + continue; + } + + // If intersection is empty, there is no need to continue + if (tableNamesIntersection.size === 0) { + return []; + } + + // Update intersection of table names + const newTableNames = new Set(tableDefinition.tableDefinitions.map((el) => el.tableName)); + for (const tableName of tableNamesIntersection) { + if (!newTableNames.has(tableName)) { + tableNamesIntersection.delete(tableName); + } + } + } + + return Array.from(tableNamesIntersection); +} + +export class TableDefinitionsAccessor { + private _tableDefinitions: InplaceVolumetricsTableDefinition_api[]; + private _tableNamesFilter: string[]; + private _uniqueEnsembleIdents: EnsembleIdent[]; + private _tableNamesIntersection: string[]; + private _fluidZonesIntersection: FluidZone_api[] = []; + private _resultNamesIntersection: InplaceVolumetricResultName_api[] = []; + private _identifiersWithIntersectionValues: InplaceVolumetricsIdentifierWithValues_api[] = []; + private _tablesNotComparable: boolean = false; + + constructor(tableDefinitionsPerEnsembleIdent: TableDefinitionsForEnsembleIdent[], tableNamesFilter?: string[]) { + this._tableDefinitions = tableDefinitionsPerEnsembleIdent.flatMap((data) => data.tableDefinitions); + this._tableNamesFilter = tableNamesFilter ?? []; + this._uniqueEnsembleIdents = tableDefinitionsPerEnsembleIdent.map((data) => data.ensembleIdent); + this._tableNamesIntersection = makeUniqueTableNamesIntersection(tableDefinitionsPerEnsembleIdent); + this.makeIntersections(); + } + + private makeIntersections(): void { + const fluidZones: Set = new Set(); + const resultNames: Set = new Set(); + const identifiersWithValuesIntersection: InplaceVolumetricsIdentifierWithValues_api[] = []; + + let index = 0; + for (const tableDefinition of this._tableDefinitions) { + if (this._tableNamesFilter && !this._tableNamesFilter.includes(tableDefinition.tableName)) { + continue; + } + + if (index === 0) { + // Initialize sets and arrays with the first valid tableDefinition + tableDefinition.fluidZones.forEach((fluidZone) => fluidZones.add(fluidZone)); + tableDefinition.resultNames.forEach((resultName) => resultNames.add(resultName)); + + for (const identifierWithValues of tableDefinition.identifiersWithValues) { + const existingIdentifierWithValues = identifiersWithValuesIntersection.find( + (el) => el.identifier === identifierWithValues.identifier + ); + if (existingIdentifierWithValues) { + throw new Error(`Duplicate identifier ${identifierWithValues.identifier}`); + } + + identifiersWithValuesIntersection.push(identifierWithValues); + } + index++; + continue; + } + + for (const fluidZone of fluidZones) { + if (!tableDefinition.fluidZones.includes(fluidZone)) { + fluidZones.delete(fluidZone); + } + } + + for (const resultName of resultNames) { + if (!tableDefinition.resultNames.includes(resultName)) { + resultNames.delete(resultName); + } + } + + for (const identifierWithIntersectionValues of identifiersWithValuesIntersection) { + const currentIdentifierWithValues = tableDefinition.identifiersWithValues.find( + (item) => item.identifier === identifierWithIntersectionValues.identifier + ); + + if (!currentIdentifierWithValues) { + // Identifier is not present in the current tableDefinition, an intersection is not possible + this._tablesNotComparable = true; + break; + } + + // Update values of the identifier + identifierWithIntersectionValues.values = identifierWithIntersectionValues.values.filter((value) => + currentIdentifierWithValues.values.includes(value) + ); + + if (identifierWithIntersectionValues.values.length === 0) { + // Intersection is empty, an intersection is not possible + this._tablesNotComparable = true; + break; + } + } + index++; + } + + this._fluidZonesIntersection = Array.from(fluidZones).sort(); + this._resultNamesIntersection = sortResultNames(Array.from(resultNames)); + this._identifiersWithIntersectionValues = identifiersWithValuesIntersection.sort(); + } + + getUniqueEnsembleIdents(): EnsembleIdent[] { + return this._uniqueEnsembleIdents; + } + + getTableNamesIntersection(): string[] { + return this._tableNamesIntersection; + } + + getFluidZonesIntersection(): FluidZone_api[] { + return this._fluidZonesIntersection; + } + + getResultNamesIntersection(): InplaceVolumetricResultName_api[] { + return this._resultNamesIntersection; + } + + getIdentifiersWithIntersectionValues(): InplaceVolumetricsIdentifierWithValues_api[] { + return this._identifiersWithIntersectionValues; + } + + getAreTablesComparable(): boolean { + return !this._tablesNotComparable; + } + + hasEnsembleIdents(ensembleIdents: EnsembleIdent[]): boolean { + for (const ensembleIdent of ensembleIdents) { + if (!this._uniqueEnsembleIdents.includes(ensembleIdent)) { + return false; + } + } + + return true; + } + + hasTableNames(tableNames: string[]): boolean { + for (const tableName of tableNames) { + if (!this._tableNamesIntersection.includes(tableName)) { + return false; + } + } + + return true; + } + + hasFluidZones(fluidZones: FluidZone_api[]): boolean { + for (const fluidZone of fluidZones) { + if (!this._fluidZonesIntersection.includes(fluidZone)) { + return false; + } + } + + return true; + } + + hasResultNames(resultNames: InplaceVolumetricResultName_api[]): boolean { + for (const resultName of resultNames) { + if (!this._resultNamesIntersection.includes(resultName)) { + return false; + } + } + + return true; + } + + hasResultName(resultName: InplaceVolumetricResultName_api): boolean { + return this._resultNamesIntersection.includes(resultName); + } + + hasIdentifiersWithValues(identifiersWithValues: InplaceVolumetricsIdentifierWithValues_api[]): boolean { + for (const identifierValue of identifiersWithValues) { + const identifier = identifierValue.identifier; + const tableDefinitionsIdentifier = this._identifiersWithIntersectionValues.find( + (el) => el.identifier === identifier + ); + if (!tableDefinitionsIdentifier) { + return false; + } + + for (const value of identifierValue.values) { + if (!tableDefinitionsIdentifier.values.includes(value)) { + return false; + } + } + } + + return true; + } +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/fixupUserSelectedIdentifierValues.ts b/frontend/src/modules/_shared/InplaceVolumetrics/fixupUserSelectedIdentifierValues.ts new file mode 100644 index 000000000..2f058c432 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/fixupUserSelectedIdentifierValues.ts @@ -0,0 +1,50 @@ +import { InplaceVolumetricsIdentifierWithValues_api } from "@api"; +import { fixupUserSelection } from "@lib/utils/fixupUserSelection"; + +export function fixupUserSelectedIdentifierValues( + userSelectedIdentifierValues: InplaceVolumetricsIdentifierWithValues_api[] | null, + uniqueIdentifierValues: InplaceVolumetricsIdentifierWithValues_api[], + selectAllOnFixup: boolean +): InplaceVolumetricsIdentifierWithValues_api[] { + const fixedUpIdentifierValues: InplaceVolumetricsIdentifierWithValues_api[] = []; + if (!userSelectedIdentifierValues) { + for (const entry of uniqueIdentifierValues) { + fixedUpIdentifierValues.push({ + identifier: entry.identifier, + values: fixupUserSelection( + entry.values, + uniqueIdentifierValues.find((el) => el.identifier === entry.identifier)?.values ?? [], + selectAllOnFixup + ), + }); + } + return fixedUpIdentifierValues; + } + + for (const entry of userSelectedIdentifierValues) { + if (!uniqueIdentifierValues.find((el) => el.identifier === entry.identifier)) { + continue; + } + fixedUpIdentifierValues.push({ + identifier: entry.identifier, + values: fixupUserSelection( + entry.values, + uniqueIdentifierValues.find((el) => el.identifier === entry.identifier)?.values ?? [], + selectAllOnFixup + ), + }); + } + + if (userSelectedIdentifierValues.length !== uniqueIdentifierValues.length) { + for (const entry of uniqueIdentifierValues) { + if (fixedUpIdentifierValues.find((el) => el.identifier === entry.identifier)) { + continue; + } + fixedUpIdentifierValues.push({ + identifier: entry.identifier, + values: uniqueIdentifierValues.find((el) => el.identifier === entry.identifier)?.values ?? [], + }); + } + } + return fixedUpIdentifierValues; +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/queryHooks.ts b/frontend/src/modules/_shared/InplaceVolumetrics/queryHooks.ts new file mode 100644 index 000000000..31d9e1340 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/queryHooks.ts @@ -0,0 +1,232 @@ +import { + FluidZone_api, + InplaceStatisticalVolumetricTableDataPerFluidSelection_api, + InplaceVolumetricResultName_api, + InplaceVolumetricTableDataPerFluidSelection_api, + InplaceVolumetricsIdentifierWithValues_api, + InplaceVolumetricsIdentifier_api, +} from "@api"; +import { apiService } from "@framework/ApiService"; +import { EnsembleIdent } from "@framework/EnsembleIdent"; +import { + InplaceVolumetricsStatisticalTableData, + InplaceVolumetricsTableData, +} from "@modules/_shared/InplaceVolumetrics/types"; +import { UseQueryResult } from "@tanstack/react-query"; + +export type EnsembleIdentWithRealizations = { + ensembleIdent: EnsembleIdent; + realizations: readonly number[]; +}; + +const STALE_TIME = 60 * 1000; +const CACHE_TIME = 60 * 1000; + +export type AggregatedTableDataResults = { + tablesData: InplaceVolumetricsTableData[]; + isFetching: boolean; + someQueriesFailed: boolean; + allQueriesFailed: boolean; + errors: Error[]; +}; + +export type AggregatedStatisticalTableDataResults = { + tablesData: InplaceVolumetricsStatisticalTableData[]; + isFetching: boolean; + someQueriesFailed: boolean; + allQueriesFailed: boolean; + errors: Error[]; +}; + +export function useGetAggregatedStatisticalTableDataQueries( + ensembleIdentsWithRealizations: EnsembleIdentWithRealizations[], + tableNames: string[], + resultNames: InplaceVolumetricResultName_api[], + fluidZones: FluidZone_api[], + groupByIdentifiers: InplaceVolumetricsIdentifier_api[], + accumulateFluidZones: boolean, + identifiersWithValues: InplaceVolumetricsIdentifierWithValues_api[], + allowEnable: boolean +) { + const uniqueSources: { ensembleIdent: EnsembleIdent; realizations: readonly number[]; tableName: string }[] = []; + for (const el of ensembleIdentsWithRealizations) { + for (const tableName of tableNames) { + uniqueSources.push({ ensembleIdent: el.ensembleIdent, realizations: el.realizations, tableName }); + } + } + + const eachIdentifierHasValues = identifiersWithValues.every((identifier) => identifier.values.length > 0); + const validGroupByIdentifiers = groupByIdentifiers.length === 0 ? null : groupByIdentifiers; + + const queries = uniqueSources.map((source) => { + const validRealizations = source.realizations.length === 0 ? null : [...source.realizations]; + return () => ({ + queryKey: [ + "postGetAggregatedStatisticalTableData", + source.ensembleIdent.toString(), + source.tableName, + source.realizations, + fluidZones, + groupByIdentifiers, + accumulateFluidZones, + resultNames, + identifiersWithValues, + ], + queryFn: () => + apiService.inplaceVolumetrics.postGetAggregatedStatisticalTableData( + source.ensembleIdent.getCaseUuid(), + source.ensembleIdent.getEnsembleName(), + source.tableName, + resultNames, + fluidZones, + accumulateFluidZones, + { + identifiers_with_values: identifiersWithValues, + }, + validGroupByIdentifiers, + validRealizations + ), + staleTime: STALE_TIME, + cacheTime: CACHE_TIME, + enabled: Boolean( + allowEnable && + source.ensembleIdent && + source.tableName && + validRealizations && + validRealizations.length && + fluidZones.length && + resultNames.length && + eachIdentifierHasValues + ), + }); + }); + + function combine( + results: UseQueryResult[] + ): AggregatedStatisticalTableDataResults { + const tablesData: InplaceVolumetricsStatisticalTableData[] = []; + const errors: Error[] = []; + for (const [index, result] of results.entries()) { + if (result.data) { + tablesData.push({ + ensembleIdent: uniqueSources[index].ensembleIdent, + tableName: uniqueSources[index].tableName, + data: result.data, + }); + } + if (result.error) { + errors.push(result.error); + } + } + + return { + tablesData: tablesData, + isFetching: results.some((result) => result.isFetching), + someQueriesFailed: results.some((result) => result.isError), + allQueriesFailed: results.length > 0 && results.every((result) => result.isError), + errors: errors, + }; + } + + return { + queries, + combine, + }; +} + +export function useGetAggregatedPerRealizationTableDataQueries( + ensembleIdentsWithRealizations: EnsembleIdentWithRealizations[], + tableNames: string[], + resultNames: InplaceVolumetricResultName_api[], + fluidZones: FluidZone_api[], + groupByIdentifiers: InplaceVolumetricsIdentifier_api[], + accumulateFluidZones: boolean, + identifiersWithValues: InplaceVolumetricsIdentifierWithValues_api[], + allowEnable: boolean +) { + const uniqueSources: { ensembleIdent: EnsembleIdent; realizations: readonly number[]; tableName: string }[] = []; + for (const el of ensembleIdentsWithRealizations) { + for (const tableName of tableNames) { + uniqueSources.push({ ensembleIdent: el.ensembleIdent, realizations: el.realizations, tableName }); + } + } + + const eachIdentifierHasValues = identifiersWithValues.every((identifier) => identifier.values.length > 0); + const validGroupByIdentifiers = groupByIdentifiers.length === 0 ? null : groupByIdentifiers; + + const queries = uniqueSources.map((source) => { + const validRealizations = source.realizations.length === 0 ? null : [...source.realizations]; + return () => ({ + queryKey: [ + "postGetAggregatedPerRealizationTableData", + source.ensembleIdent.toString(), + source.tableName, + source.realizations, + fluidZones, + groupByIdentifiers, + accumulateFluidZones, + resultNames, + identifiersWithValues, + ], + queryFn: () => + apiService.inplaceVolumetrics.postGetAggregatedPerRealizationTableData( + source.ensembleIdent.getCaseUuid(), + source.ensembleIdent.getEnsembleName(), + source.tableName, + resultNames, + fluidZones, + accumulateFluidZones, + + { + identifiers_with_values: identifiersWithValues, + }, + validGroupByIdentifiers, + validRealizations + ), + staleTime: STALE_TIME, + cacheTime: CACHE_TIME, + enabled: Boolean( + allowEnable && + source.ensembleIdent && + source.tableName && + validRealizations && + validRealizations.length && + fluidZones.length && + resultNames.length && + eachIdentifierHasValues + ), + }); + }); + + function combine( + results: UseQueryResult[] + ): AggregatedTableDataResults { + const tablesData: InplaceVolumetricsTableData[] = []; + const errors: Error[] = []; + for (const [index, result] of results.entries()) { + if (result.data) { + tablesData.push({ + ensembleIdent: uniqueSources[index].ensembleIdent, + tableName: uniqueSources[index].tableName, + data: result.data, + }); + } + if (result.error) { + errors.push(result.error); + } + } + + return { + tablesData: tablesData, + isFetching: results.some((result) => result.isFetching), + someQueriesFailed: results.some((result) => result.isError), + allQueriesFailed: results.length > 0 && results.every((result) => result.isError), + errors: errors, + }; + } + + return { + queries, + combine, + }; +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/sortResultNames.ts b/frontend/src/modules/_shared/InplaceVolumetrics/sortResultNames.ts new file mode 100644 index 000000000..ff9fc25e9 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/sortResultNames.ts @@ -0,0 +1,25 @@ +import { InplaceVolumetricResultName_api } from "@api"; +import { ORDERED_VOLUME_DEFINITIONS } from "@assets/volumeDefinitions"; + +function sortResultNamesGeneric(resultNames: T[]): T[] { + const sortedResultNames: T[] = []; + const resultNamesSet = new Set(resultNames); + + for (const volumeDefinition in ORDERED_VOLUME_DEFINITIONS) { + const volumeDefinitionAbbreviation = volumeDefinition as T; + if (resultNamesSet.has(volumeDefinitionAbbreviation)) { + sortedResultNames.push(volumeDefinitionAbbreviation); + resultNamesSet.delete(volumeDefinitionAbbreviation); + } + } + + return sortedResultNames.concat(Array.from(resultNamesSet)); +} + +export function sortResultNames(resultNames: InplaceVolumetricResultName_api[]): InplaceVolumetricResultName_api[] { + return sortResultNamesGeneric(resultNames); +} + +export function sortResultNameStrings(resultNames: string[]): string[] { + return sortResultNamesGeneric(resultNames); +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/tableUtils.ts b/frontend/src/modules/_shared/InplaceVolumetrics/tableUtils.ts new file mode 100644 index 000000000..8f33eae87 --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/tableUtils.ts @@ -0,0 +1,236 @@ +import { InplaceVolumetricStatistic_api } from "@api"; + +import { Column, ColumnType, Table } from "./Table"; +import { + InplaceVolumetricStatisticEnumToStringMapping, + InplaceVolumetricsStatisticalTableData, + InplaceVolumetricsTableData, + SourceIdentifier, + StatisticalColumns, + StatisticalTableColumnData, +} from "./types"; + +export function makeTableFromApiData(data: InplaceVolumetricsTableData[]): Table { + const columns: Map> = new Map(); + columns.set("ensemble", new Column(SourceIdentifier.ENSEMBLE, ColumnType.ENSEMBLE)); + columns.set("table", new Column(SourceIdentifier.TABLE_NAME, ColumnType.TABLE)); + columns.set("fluid-zone", new Column(SourceIdentifier.FLUID_ZONE, ColumnType.FLUID_ZONE)); + + // First, collect all columns + for (const tableSet of data) { + for (const fluidZoneTable of tableSet.data.tableDataPerFluidSelection) { + for (const selectorColumn of fluidZoneTable.selectorColumns) { + if (!columns.has(selectorColumn.columnName)) { + let type = ColumnType.IDENTIFIER; + if (selectorColumn.columnName === "REAL") { + type = ColumnType.REAL; + } + columns.set(selectorColumn.columnName, new Column(selectorColumn.columnName, type)); + } + } + for (const resultColumn of fluidZoneTable.resultColumns) { + if (!columns.has(resultColumn.columnName)) { + columns.set(resultColumn.columnName, new Column(resultColumn.columnName, ColumnType.RESULT)); + } + } + } + } + + // Then, add the values to the columns + for (const tableSet of data) { + for (const fluidZoneTable of tableSet.data.tableDataPerFluidSelection) { + let mainColumnsAdded = false; + for (const selectorColumn of fluidZoneTable.selectorColumns) { + for (let i = 0; i < selectorColumn.indices.length; i++) { + columns + .get(selectorColumn.columnName) + ?.addRowValue(selectorColumn.uniqueValues[selectorColumn.indices[i]]); + + if (!mainColumnsAdded) { + columns.get("ensemble")?.addRowValue(tableSet.ensembleIdent); + columns.get("table")?.addRowValue(tableSet.tableName); + columns.get("fluid-zone")?.addRowValue(fluidZoneTable.fluidSelectionName); + } + } + mainColumnsAdded = true; + } + + let numAddedRows = 0; + for (const [index, resultColumn] of fluidZoneTable.resultColumns.entries()) { + for (const value of resultColumn.columnValues) { + columns.get(resultColumn.columnName)?.addRowValue(value); + + if (index === 0) { + numAddedRows++; + } + + if (!mainColumnsAdded) { + columns.get("ensemble")?.addRowValue(tableSet.ensembleIdent); + columns.get("table")?.addRowValue(tableSet.tableName); + columns.get("fluid-zone")?.addRowValue(fluidZoneTable.fluidSelectionName); + } + } + mainColumnsAdded = true; + } + if (numAddedRows > 0) { + const untouchedColumns = Array.from(columns.values()).filter( + (column) => + !fluidZoneTable.selectorColumns.some( + (selectorColumn) => selectorColumn.columnName === column.getName() + ) && + !fluidZoneTable.resultColumns.some( + (resultColumn) => resultColumn.columnName === column.getName() + ) && + column.getType() !== ColumnType.ENSEMBLE && + column.getType() !== ColumnType.TABLE && + column.getType() !== ColumnType.FLUID_ZONE + ); + for (const column of untouchedColumns) { + for (let i = 0; i < numAddedRows; i++) { + column.addRowValue(null); + } + } + } + } + } + + return new Table(Array.from(columns.values())); +} + +export function makeStatisticalTableColumnDataFromApiData( + data: InplaceVolumetricsStatisticalTableData[], + statisticOptions: InplaceVolumetricStatistic_api[] +): StatisticalTableColumnData { + // Result statistical tables + const resultStatisticalColumns: Map = new Map(); + + // Non-statistical columns + const allSelectorColumns: Set = new Set(); + const nonStatisticalColumns: Map> = new Map(); + + // Columns to always exist (non-statistical, but no selector columns) + nonStatisticalColumns.set("ensemble", new Column(SourceIdentifier.ENSEMBLE, ColumnType.ENSEMBLE)); + nonStatisticalColumns.set("table", new Column(SourceIdentifier.TABLE_NAME, ColumnType.TABLE)); + nonStatisticalColumns.set("fluid-zone", new Column(SourceIdentifier.FLUID_ZONE, ColumnType.FLUID_ZONE)); + + // Find union of selector columns and result columns + for (const tableSet of data) { + for (const fluidZoneTableData of tableSet.data.tableDataPerFluidSelection) { + // Selector columns + for (const selectorColumn of fluidZoneTableData.selectorColumns) { + allSelectorColumns.add(selectorColumn.columnName); + if (!nonStatisticalColumns.has(selectorColumn.columnName)) { + const type = ColumnType.IDENTIFIER; + if (selectorColumn.columnName === "REAL") { + throw new Error("REAL column should not be present in statistical tables"); + } + nonStatisticalColumns.set(selectorColumn.columnName, new Column(selectorColumn.columnName, type)); + } + } + + // Result statistical tables + for (const resultColumn of fluidZoneTableData.resultColumnStatistics) { + if (resultStatisticalColumns.has(resultColumn.columnName)) { + continue; + } + + // Add statistical columns for each result column based on the selected statistic options + const statisticalColumns: StatisticalColumns = {}; + for (const statistic of statisticOptions) { + const columnName = InplaceVolumetricStatisticEnumToStringMapping[statistic]; + statisticalColumns[statistic] = new Column(columnName, ColumnType.RESULT); + } + resultStatisticalColumns.set(resultColumn.columnName, statisticalColumns); + } + } + } + + // Add row values to the tables + for (const tableSet of data) { + for (const fluidZoneTableData of tableSet.data.tableDataPerFluidSelection) { + const hasNoResultColumnStatistics = + fluidZoneTableData.resultColumnStatistics.length === 0 || + Object.keys(fluidZoneTableData.resultColumnStatistics[0].statisticValues).length === 0; + if (hasNoResultColumnStatistics) { + continue; + } + + // Number of rows from the first result statistic column + const numRows = Object.values(fluidZoneTableData.resultColumnStatistics[0].statisticValues)[0].length; + for (let i = 0; i < numRows; i++) { + nonStatisticalColumns.get("ensemble")?.addRowValue(tableSet.ensembleIdent); + nonStatisticalColumns.get("table")?.addRowValue(tableSet.tableName); + nonStatisticalColumns.get("fluid-zone")?.addRowValue(fluidZoneTableData.fluidSelectionName); + } + + // Build selector columns + const selectorColumnsInTable = fluidZoneTableData.selectorColumns.map( + (selectorColumn) => selectorColumn.columnName + ); + const untouchedSelectorColumns = Array.from(allSelectorColumns).filter( + (elm) => !selectorColumnsInTable.includes(elm) + ); + for (const selectorColumn of fluidZoneTableData.selectorColumns) { + for (const valueIndex of selectorColumn.indices) { + const rowValue = selectorColumn.uniqueValues.at(valueIndex); + if (!rowValue) { + throw new Error( + `Expected value at index ${valueIndex} for ${selectorColumn.columnName} not found` + ); + } + + nonStatisticalColumns.get(selectorColumn.columnName)?.addRowValue(rowValue); + } + } + + // Fill in untouched selector columns with null + for (const untouchedColumn of untouchedSelectorColumns) { + for (let i = 0; i < numRows; i++) { + nonStatisticalColumns.get(untouchedColumn)?.addRowValue(null); + } + } + + // Build statistical columns per result across each unique table set + const resultStatisticsInTableData = fluidZoneTableData.resultColumnStatistics.map( + (resultColumn) => resultColumn.columnName + ); + const untouchedResultStatistics = Array.from(resultStatisticalColumns.keys()).filter( + (elm) => !resultStatisticsInTableData.includes(elm) + ); + for (const resultColumn of fluidZoneTableData.resultColumnStatistics) { + const statisticalColumns = resultStatisticalColumns.get(resultColumn.columnName); + + if (!statisticalColumns) { + throw new Error(`Expected statistical columns for ${resultColumn.columnName} not found`); + } + + statisticalColumns.mean?.addRowValues(resultColumn.statisticValues["mean"]); + statisticalColumns.stddev?.addRowValues(resultColumn.statisticValues["stddev"]); + statisticalColumns.p90?.addRowValues(resultColumn.statisticValues["p90"]); + statisticalColumns.p10?.addRowValues(resultColumn.statisticValues["p10"]); + statisticalColumns.min?.addRowValues(resultColumn.statisticValues["min"]); + statisticalColumns.max?.addRowValues(resultColumn.statisticValues["max"]); + } + + // Fill in untouched results with null for statistics + const nullArray = Array(numRows).fill(null); + for (const untouchedResult of untouchedResultStatistics) { + const statisticalColumns = resultStatisticalColumns.get(untouchedResult); + + if (!statisticalColumns) { + throw new Error(`Expected statistical columns for ${untouchedResult} not found`); + } + + for (const keyStr of Object.keys(statisticalColumns)) { + const key = keyStr as InplaceVolumetricStatistic_api; + statisticalColumns[key]?.addRowValues(nullArray); + } + } + } + } + + return { + nonStatisticalColumns: Array.from(nonStatisticalColumns.values()), + resultStatisticalColumns: resultStatisticalColumns, + }; +} diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/types.ts b/frontend/src/modules/_shared/InplaceVolumetrics/types.ts new file mode 100644 index 000000000..880f3bc9d --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/types.ts @@ -0,0 +1,73 @@ +import { + InplaceStatisticalVolumetricTableDataPerFluidSelection_api, + InplaceVolumetricStatistic_api, + InplaceVolumetricTableDataPerFluidSelection_api, + InplaceVolumetricsIdentifier_api, +} from "@api"; +import { EnsembleIdent } from "@framework/EnsembleIdent"; + +import { Column } from "./Table"; + +export type InplaceVolumetricsTableData = { + ensembleIdent: EnsembleIdent; + tableName: string; + data: InplaceVolumetricTableDataPerFluidSelection_api; +}; + +export type InplaceVolumetricsStatisticalTableData = { + ensembleIdent: EnsembleIdent; + tableName: string; + data: InplaceStatisticalVolumetricTableDataPerFluidSelection_api; +}; + +export enum TableType { + PER_REALIZATION = "PER_REALIZATION", + STATISTICAL = "STATISTICAL", +} + +export const TableTypeToStringMapping = { + [TableType.PER_REALIZATION]: "Per realization", + [TableType.STATISTICAL]: "Statistical", +}; + +export enum SourceIdentifier { + ENSEMBLE = "ENSEMBLE", + TABLE_NAME = "TABLE_NAME", + FLUID_ZONE = "FLUID_ZONE", +} + +const sourceAndTableIdentifiersUnion = { ...SourceIdentifier, ...InplaceVolumetricsIdentifier_api }; +export type SourceAndTableIdentifierUnion = + (typeof sourceAndTableIdentifiersUnion)[keyof typeof sourceAndTableIdentifiersUnion]; + +export enum RealSelector { + REAL = "REAL", +} +export const selectorColumns = { ...RealSelector, ...InplaceVolumetricsIdentifier_api } as const; +export type SelectorColumn = (typeof selectorColumns)[keyof typeof selectorColumns]; + +export const AccumulationOption = { + FLUID_ZONE: "FLUID_ZONE", + ...InplaceVolumetricsIdentifier_api, +}; + +export type StatisticalColumns = Partial<{ + [key in InplaceVolumetricStatistic_api]: Column; +}>; + +export type StatisticalTableColumnData = { + // Statistical tables has two types of columns: + // - Non statistical columns: Column with name and row values (e.g. ensemble, table, fluid zone, etc.) + // - Statistical columns: Map with result name as key, and its statistical columns as value. One column per statistical type (e.g. mean, min, max, etc.) + nonStatisticalColumns: Column[]; + resultStatisticalColumns: Map; +}; + +export const InplaceVolumetricStatisticEnumToStringMapping = { + [InplaceVolumetricStatistic_api.MEAN]: "Mean", + [InplaceVolumetricStatistic_api.MIN]: "Min", + [InplaceVolumetricStatistic_api.MAX]: "Max", + [InplaceVolumetricStatistic_api.STDDEV]: "Stddev", + [InplaceVolumetricStatistic_api.P10]: "P10", + [InplaceVolumetricStatistic_api.P90]: "P90", +}; diff --git a/frontend/src/modules/_shared/InplaceVolumetrics/volumetricStringUtils.ts b/frontend/src/modules/_shared/InplaceVolumetrics/volumetricStringUtils.ts new file mode 100644 index 000000000..1bd1b76cf --- /dev/null +++ b/frontend/src/modules/_shared/InplaceVolumetrics/volumetricStringUtils.ts @@ -0,0 +1,28 @@ +import { ORDERED_VOLUME_DEFINITIONS, VolumeDefinition } from "@assets/volumeDefinitions"; + +/** + * Returns volume definition for vector if it exists, otherwise returns null. + * + * @param volumeName - Volume name to get definition for. + * @returns Volume definition for vector if it exists, otherwise returns null. + */ +export function getVolumeDefinition(volumeName: string): VolumeDefinition | null { + if (volumeName in ORDERED_VOLUME_DEFINITIONS) { + return ORDERED_VOLUME_DEFINITIONS[volumeName]; + } + return null; +} + +/** + * Create hover text for requested volume name. + * + * @param volumeName - Volume name to create hover text for. + * @returns Hover text for requested volume name. + */ +export function createHoverTextForVolume(volumeName: string): string { + const volumeDefinition = getVolumeDefinition(volumeName); + if (volumeDefinition) { + return `${volumeDefinition.description}${volumeDefinition.unit ? ` [${volumeDefinition.unit}]` : ""}`; + } + return volumeName; +} diff --git a/frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/index.ts b/frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/index.ts new file mode 100644 index 000000000..c941bb65e --- /dev/null +++ b/frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/index.ts @@ -0,0 +1 @@ +export { InplaceVolumetricsFilterComponent } from "./inplaceVolumetricsFilterComponent"; diff --git a/frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/inplaceVolumetricsFilterComponent.tsx b/frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/inplaceVolumetricsFilterComponent.tsx new file mode 100644 index 000000000..6846512c3 --- /dev/null +++ b/frontend/src/modules/_shared/components/InplaceVolumetricsFilterComponent/inplaceVolumetricsFilterComponent.tsx @@ -0,0 +1,290 @@ +import React from "react"; + +import { FluidZone_api, InplaceVolumetricsIdentifierWithValues_api, InplaceVolumetricsIdentifier_api } from "@api"; +import { EnsembleIdent } from "@framework/EnsembleIdent"; +import { EnsembleSet } from "@framework/EnsembleSet"; +import { SettingsContext } from "@framework/ModuleContext"; +import { SyncSettingKey, SyncSettingsHelper } from "@framework/SyncSettings"; +import { WorkbenchServices } from "@framework/WorkbenchServices"; +import { EnsembleSelect } from "@framework/components/EnsembleSelect"; +import { InplaceVolumetricsFilter } from "@framework/types/inplaceVolumetricsFilter"; +import { CollapsibleGroup } from "@lib/components/CollapsibleGroup"; +import { PendingWrapper } from "@lib/components/PendingWrapper"; +import { Select } from "@lib/components/Select"; + +import { cloneDeep, isEqual } from "lodash"; + +export type InplaceVolumetricsFilterComponentProps = { + ensembleSet: EnsembleSet; + settingsContext: SettingsContext; + workbenchServices: WorkbenchServices; + availableTableNames: string[]; + availableFluidZones: FluidZone_api[]; + availableIdentifiersWithValues: InplaceVolumetricsIdentifierWithValues_api[]; + selectedEnsembleIdents: EnsembleIdent[]; + selectedTableNames: string[]; + selectedFluidZones: FluidZone_api[]; + selectedIdentifiersValues: InplaceVolumetricsIdentifierWithValues_api[]; + onChange: (filter: InplaceVolumetricsFilter) => void; + isPending?: boolean; + errorMessage?: string; + additionalSettings?: React.ReactNode; + areCurrentlySelectedTablesComparable?: boolean; + debounceMs?: number; +}; + +export function InplaceVolumetricsFilterComponent(props: InplaceVolumetricsFilterComponentProps): React.ReactNode { + const [ensembleIdents, setEnsembleIdents] = React.useState(props.selectedEnsembleIdents); + const [tableNames, setTableNames] = React.useState(props.selectedTableNames); + const [fluidZones, setFluidZones] = React.useState(props.selectedFluidZones); + const [identifiersValues, setIdentifiersValues] = React.useState( + props.selectedIdentifiersValues + ); + + const [prevEnsembleIdents, setPrevEnsembleIdents] = React.useState(props.selectedEnsembleIdents); + const [prevTableNames, setPrevTableNames] = React.useState(props.selectedTableNames); + const [prevFluidZones, setPrevFluidZones] = React.useState(props.selectedFluidZones); + const [prevIdentifiersValues, setPrevIdentifiersValues] = React.useState< + InplaceVolumetricsIdentifierWithValues_api[] + >(props.selectedIdentifiersValues); + const [prevSyncedFilter, setPrevSyncedFilter] = React.useState(null); + + const debounceTimeoutRef = React.useRef | null>(null); + + if (!isEqual(props.selectedEnsembleIdents, prevEnsembleIdents)) { + setEnsembleIdents(props.selectedEnsembleIdents); + setPrevEnsembleIdents(props.selectedEnsembleIdents); + } + + if (!isEqual(props.selectedTableNames, prevTableNames)) { + setTableNames(props.selectedTableNames); + setPrevTableNames(props.selectedTableNames); + } + + if (!isEqual(props.selectedFluidZones, prevFluidZones)) { + setFluidZones(props.selectedFluidZones); + setPrevFluidZones(props.selectedFluidZones); + } + + if (!isEqual(props.selectedIdentifiersValues, prevIdentifiersValues)) { + setIdentifiersValues((prev) => { + const newIdentifiersValues = [...prev]; + for (const [index, identifier] of props.selectedIdentifiersValues.entries()) { + if ( + !isEqual( + prevIdentifiersValues.find((filter) => filter.identifier === identifier.identifier)?.values, + identifier.values + ) + ) { + newIdentifiersValues[index] = { ...identifier }; + } + } + return newIdentifiersValues; + }); + setPrevIdentifiersValues(props.selectedIdentifiersValues); + } + + const syncedSettingKeys = props.settingsContext.useSyncedSettingKeys(); + const syncHelper = new SyncSettingsHelper(syncedSettingKeys, props.workbenchServices); + + const syncedFilter = syncHelper.useValue( + SyncSettingKey.INPLACE_VOLUMETRICS_FILTER, + "global.syncValue.inplaceVolumetricsFilter" + ); + + if (!isEqual(syncedFilter, prevSyncedFilter)) { + if (syncedFilter) { + const filter = { ensembleIdents, tableNames, fluidZones, identifiersValues }; + + if (!isEqual(syncedFilter.ensembleIdents, ensembleIdents)) { + filter.ensembleIdents = [...syncedFilter.ensembleIdents]; + } + + if (!isEqual(syncedFilter.tableNames, tableNames)) { + filter.tableNames = [...syncedFilter.tableNames]; + } + + if (!isEqual(syncedFilter.fluidZones, fluidZones)) { + filter.fluidZones = [...syncedFilter.fluidZones]; + } + + if (!isEqual(syncedFilter.identifiersValues, identifiersValues)) { + const newIdentifiersValues = cloneDeep(identifiersValues); + + for (const identifier of syncedFilter.identifiersValues) { + const identifierValues = newIdentifiersValues.find( + (filter) => filter.identifier === identifier.identifier + ); + if (!identifierValues) { + newIdentifiersValues.push({ ...identifier }); + } else { + identifierValues.values = [...identifier.values]; + } + } + setIdentifiersValues(newIdentifiersValues); + filter.identifiersValues = newIdentifiersValues; + } + + props.onChange(filter); + } + + setPrevSyncedFilter(syncedFilter); + } + + React.useEffect(function mountEffect() { + const currentDebounceTimeoutRef = debounceTimeoutRef.current; + return function unmountEffect() { + if (currentDebounceTimeoutRef) { + clearTimeout(currentDebounceTimeoutRef); + } + }; + }, []); + + function callOnChangeAndMaybePublish(filter: InplaceVolumetricsFilter, publish: boolean): void { + if (debounceTimeoutRef.current) { + clearTimeout(debounceTimeoutRef.current); + } + props.onChange(filter); + if (publish) { + syncHelper.publishValue( + SyncSettingKey.INPLACE_VOLUMETRICS_FILTER, + "global.syncValue.inplaceVolumetricsFilter", + filter + ); + } + } + + function maybeDebounceOnChange(filter: InplaceVolumetricsFilter, publish: boolean): void { + if (debounceTimeoutRef.current) { + clearTimeout(debounceTimeoutRef.current); + } + + if (!props.debounceMs) { + callOnChangeAndMaybePublish(filter, publish); + return; + } + + debounceTimeoutRef.current = setTimeout(() => { + callOnChangeAndMaybePublish(filter, publish); + }, props.debounceMs); + } + + function handleEnsembleIdentsChange(newEnsembleIdents: EnsembleIdent[], publish = true): void { + setEnsembleIdents(newEnsembleIdents); + const filter = { + ensembleIdents: newEnsembleIdents, + tableNames: tableNames, + fluidZones, + identifiersValues: identifiersValues, + }; + callOnChangeAndMaybePublish(filter, publish); + } + + function handleTableNamesChange(newTableNames: string[], publish = true): void { + setTableNames(newTableNames); + const filter = { ensembleIdents, tableNames: newTableNames, fluidZones, identifiersValues: identifiersValues }; + callOnChangeAndMaybePublish(filter, publish); + } + + function handleFluidZoneChange(newFluidZones: FluidZone_api[], publish = true): void { + setFluidZones(newFluidZones); + const filter = { + ensembleIdents, + tableNames: tableNames, + fluidZones: newFluidZones, + identifiersValues: identifiersValues, + }; + maybeDebounceOnChange(filter, publish); + } + + function handleIdentifierValuesChange( + identifier: InplaceVolumetricsIdentifier_api, + values: (string | number)[], + publish = true + ): void { + const newIdentifiersValues = cloneDeep(identifiersValues); + const identifierValues = newIdentifiersValues.find((filter) => filter.identifier === identifier); + if (!identifierValues) { + newIdentifiersValues.push({ identifier: identifier, values }); + } else { + identifierValues.values = [...values]; + } + setIdentifiersValues(newIdentifiersValues); + const filter = { ensembleIdents, tableNames: tableNames, fluidZones, identifiersValues: newIdentifiersValues }; + maybeDebounceOnChange(filter, publish); + } + + const tableSourceOptions = props.availableTableNames.map((source) => ({ value: source, label: source })); + const fluidZoneOptions = props.availableFluidZones.map((zone) => ({ value: zone, label: zone })); + + let errorMessage: string | undefined = undefined; + if (props.areCurrentlySelectedTablesComparable === false) { + errorMessage = "Selected tables are not comparable"; + } + + return ( + <> + + + + +
{props.additionalSettings}
+
+ + + + + +
+ {props.availableIdentifiersWithValues.map((identifier) => ( + + ); // Click on first element and expect selection - let options = await select.locator("div").first().locator("div").first().locator("div"); + let options = await select.locator("div").first().locator("div").nth(1).locator("div"); await options.first().click(); expect(selection.includes(selectOptions1[0].value)).toBeTruthy(); @@ -81,7 +82,7 @@ test.describe("Select", () => { expect(select).toContainText(selectOptions1[0].value); // Click on fourth element and expect selection - options = await select.locator("div").first().locator("div").first().locator("div"); + options = await select.locator("div").first().locator("div").nth(1).locator("div"); await options.nth(3).click(); expect(selection.includes(selectOptions1[3].value)).toBeTruthy(); }); @@ -95,7 +96,7 @@ test.describe("Select", () => { const select = await mount( + ); + + // Find the "Select all" button and click it + const selectAllButton = select.locator("button[title='Select all']"); + await selectAllButton.click(); + + // Expect all options to be selected + expect(selection.length === selectOptions1.length).toBeTruthy(); + expect( + arrayContainsOtherArray( + selection, + selectOptions1.map((option) => option.value) + ) + ).toBeTruthy(); + + // Find the "Unselect all" button and click it + const unselectAllButton = select.locator("button[title='Unselect all']"); + await unselectAllButton.click(); + + // Expect no options to be selected + expect(selection.length === 0).toBeTruthy(); + }); }); From 2642c568069f914818aebfef05511695d8354e35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B8rgen=20Herje?= <82032112+jorgenherje@users.noreply.github.com> Date: Wed, 18 Sep 2024 12:54:48 +0200 Subject: [PATCH 2/3] Remove seismic intersection module (#719) --- .../modules/SeismicIntersection/interfaces.ts | 51 -- .../SeismicIntersection/loadModule.tsx | 10 - .../SeismicIntersection/queryHooks.tsx | 115 ---- .../SeismicIntersection/registerModule.ts | 18 - .../settings/atoms/baseAtoms.ts | 12 - .../settings/hooks/queryHooks.tsx | 19 - .../SeismicIntersection/settings/settings.tsx | 585 ------------------ .../SeismicIntersection/typesAndEnums.ts | 28 - .../utils/esvIntersectionControllerUtils.ts | 171 ----- .../utils/esvIntersectionDataConversion.ts | 269 -------- .../utils/esvIntersectionHooks.ts | 65 -- .../utils/esvIntersectionTypes.ts | 40 -- .../utils/queryDataTransforms.ts | 22 - .../utils/seismicCubeDirectory.ts | 68 -- .../src/modules/SeismicIntersection/view.tsx | 331 ---------- frontend/src/modules/registerAllModules.ts | 1 - 16 files changed, 1805 deletions(-) delete mode 100644 frontend/src/modules/SeismicIntersection/interfaces.ts delete mode 100644 frontend/src/modules/SeismicIntersection/loadModule.tsx delete mode 100644 frontend/src/modules/SeismicIntersection/queryHooks.tsx delete mode 100644 frontend/src/modules/SeismicIntersection/registerModule.ts delete mode 100644 frontend/src/modules/SeismicIntersection/settings/atoms/baseAtoms.ts delete mode 100644 frontend/src/modules/SeismicIntersection/settings/hooks/queryHooks.tsx delete mode 100644 frontend/src/modules/SeismicIntersection/settings/settings.tsx delete mode 100644 frontend/src/modules/SeismicIntersection/typesAndEnums.ts delete mode 100644 frontend/src/modules/SeismicIntersection/utils/esvIntersectionControllerUtils.ts delete mode 100644 frontend/src/modules/SeismicIntersection/utils/esvIntersectionDataConversion.ts delete mode 100644 frontend/src/modules/SeismicIntersection/utils/esvIntersectionHooks.ts delete mode 100644 frontend/src/modules/SeismicIntersection/utils/esvIntersectionTypes.ts delete mode 100644 frontend/src/modules/SeismicIntersection/utils/queryDataTransforms.ts delete mode 100644 frontend/src/modules/SeismicIntersection/utils/seismicCubeDirectory.ts delete mode 100644 frontend/src/modules/SeismicIntersection/view.tsx diff --git a/frontend/src/modules/SeismicIntersection/interfaces.ts b/frontend/src/modules/SeismicIntersection/interfaces.ts deleted file mode 100644 index aee98b07c..000000000 --- a/frontend/src/modules/SeismicIntersection/interfaces.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { InterfaceInitialization } from "@framework/UniDirectionalModuleComponentsInterface"; -import { Wellbore } from "@framework/types/wellbore"; - -import { - extensionAtom, - seismicAddressAtom, - surfaceAddressAtom, - wellboreAddressAtom, - wellborePickCaseUuidAtom, - wellborePickSelectionAtom, - zScaleAtom, -} from "./settings/atoms/baseAtoms"; -import { SeismicAddress, SurfaceAddress, WellborePickSelectionType } from "./typesAndEnums"; - -type SettingsToViewInterface = { - wellboreAddress: Wellbore | null; - seismicAddress: SeismicAddress | null; - surfaceAddress: SurfaceAddress | null; - wellborePickCaseUuid: string | null; - wellborePickSelection: WellborePickSelectionType; - extension: number; - zScale: number; -}; - -export type Interfaces = { - settingsToView: SettingsToViewInterface; -}; - -export const settingsToViewInterfaceInitialization: InterfaceInitialization = { - wellboreAddress: (get) => { - return get(wellboreAddressAtom); - }, - seismicAddress: (get) => { - return get(seismicAddressAtom); - }, - surfaceAddress: (get) => { - return get(surfaceAddressAtom); - }, - wellborePickCaseUuid: (get) => { - return get(wellborePickCaseUuidAtom); - }, - wellborePickSelection: (get) => { - return get(wellborePickSelectionAtom); - }, - extension: (get) => { - return get(extensionAtom); - }, - zScale: (get) => { - return get(zScaleAtom); - }, -}; diff --git a/frontend/src/modules/SeismicIntersection/loadModule.tsx b/frontend/src/modules/SeismicIntersection/loadModule.tsx deleted file mode 100644 index 68b75e9d5..000000000 --- a/frontend/src/modules/SeismicIntersection/loadModule.tsx +++ /dev/null @@ -1,10 +0,0 @@ -import { ModuleRegistry } from "@framework/ModuleRegistry"; - -import { Interfaces, settingsToViewInterfaceInitialization } from "./interfaces"; -import { Settings } from "./settings/settings"; -import { View } from "./view"; - -const module = ModuleRegistry.initModule("SeismicIntersection", { settingsToViewInterfaceInitialization }); - -module.viewFC = View; -module.settingsFC = Settings; diff --git a/frontend/src/modules/SeismicIntersection/queryHooks.tsx b/frontend/src/modules/SeismicIntersection/queryHooks.tsx deleted file mode 100644 index 5ece3012c..000000000 --- a/frontend/src/modules/SeismicIntersection/queryHooks.tsx +++ /dev/null @@ -1,115 +0,0 @@ -import { - Body_post_get_seismic_fence_api, - Body_post_get_surface_intersection_api, - SeismicFencePolyline_api, - SurfaceIntersectionCumulativeLengthPolyline_api, - SurfaceIntersectionData_api, -} from "@api"; -import { apiService } from "@framework/ApiService"; -import { UseQueryResult, useQueries, useQuery } from "@tanstack/react-query"; - -import { SeismicFenceData_trans, transformSeismicFenceData } from "./utils/queryDataTransforms"; - -const STALE_TIME = 60 * 1000; -const CACHE_TIME = 60 * 1000; - -export function useSeismicFenceDataQuery( - caseUuid: string | null, - ensembleName: string | null, - realizationNum: number | null, - seismicAttribute: string | null, - timeOrIntervalStr: string | null, - observed: boolean | null, - polyline: SeismicFencePolyline_api | null, - allowEnable: boolean -): UseQueryResult { - const bodyPolyline: Body_post_get_seismic_fence_api = { polyline: polyline ?? { x_points: [], y_points: [] } }; - return useQuery({ - queryKey: [ - "postGetSeismicFence", - caseUuid, - ensembleName, - realizationNum, - seismicAttribute, - timeOrIntervalStr, - observed, - bodyPolyline, - ], - queryFn: () => - apiService.seismic.postGetSeismicFence( - caseUuid ?? "", - ensembleName ?? "", - realizationNum ?? 0, - seismicAttribute ?? "", - timeOrIntervalStr ?? "", - observed ?? false, - bodyPolyline - ), - select: transformSeismicFenceData, - staleTime: STALE_TIME, - gcTime: CACHE_TIME, - enabled: !!( - allowEnable && - caseUuid && - ensembleName && - realizationNum !== null && - seismicAttribute && - timeOrIntervalStr && - observed !== null && - polyline !== null - ), - }); -} - -export function useSurfaceIntersectionQueries( - caseUuid: string | null, - ensembleName: string | null, - realizationNum: number | null, - surfaceNames: string[] | null, - attribute: string | null, - timeOrIntervalStr: string | null, - cumulativeLengthPolyline: SurfaceIntersectionCumulativeLengthPolyline_api | null, - allowEnable: boolean -): UseQueryResult[] { - const bodyPolyline: Body_post_get_surface_intersection_api = { - cumulative_length_polyline: cumulativeLengthPolyline ?? { x_points: [], y_points: [], cum_lengths: [] }, - }; - - return useQueries({ - queries: (surfaceNames ?? []).map((surfaceName) => { - return { - queryKey: [ - "getSurfaceIntersection", - caseUuid, - ensembleName, - realizationNum, - surfaceName, - attribute, - timeOrIntervalStr, - bodyPolyline, - ], - queryFn: () => - apiService.surface.postGetSurfaceIntersection( - caseUuid ?? "", - ensembleName ?? "", - realizationNum ?? 0, - surfaceName ?? "", - attribute ?? "", - bodyPolyline, - timeOrIntervalStr // Can be null - ), - staleTime: STALE_TIME, - gcTime: CACHE_TIME, - enabled: !!( - allowEnable && - caseUuid && - ensembleName && - realizationNum !== null && - surfaceName && - attribute && - cumulativeLengthPolyline !== null - ), - }; - }), - }); -} diff --git a/frontend/src/modules/SeismicIntersection/registerModule.ts b/frontend/src/modules/SeismicIntersection/registerModule.ts deleted file mode 100644 index ddf8b5f8b..000000000 --- a/frontend/src/modules/SeismicIntersection/registerModule.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { ModuleCategory, ModuleDevState } from "@framework/Module"; -import { ModuleDataTagId } from "@framework/ModuleDataTags"; -import { ModuleRegistry } from "@framework/ModuleRegistry"; -import { SyncSettingKey } from "@framework/SyncSettings"; - -import { Interfaces } from "./interfaces"; - -const description = "Visualization of intersection data with a wellbore and seismic fence."; - -ModuleRegistry.registerModule({ - moduleName: "SeismicIntersection", - defaultTitle: "Seismic Intersection", - category: ModuleCategory.MAIN, - devState: ModuleDevState.DEPRECATED, - dataTagIds: [ModuleDataTagId.SEISMIC, ModuleDataTagId.DRILLED_WELLS], - syncableSettingKeys: [SyncSettingKey.ENSEMBLE], - description, -}); diff --git a/frontend/src/modules/SeismicIntersection/settings/atoms/baseAtoms.ts b/frontend/src/modules/SeismicIntersection/settings/atoms/baseAtoms.ts deleted file mode 100644 index 33d41c1ff..000000000 --- a/frontend/src/modules/SeismicIntersection/settings/atoms/baseAtoms.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Wellbore } from "@framework/types/wellbore"; -import { SeismicAddress, SurfaceAddress, WellborePickSelectionType } from "@modules/SeismicIntersection/typesAndEnums"; - -import { atom } from "jotai"; - -export const wellboreAddressAtom = atom(null); -export const seismicAddressAtom = atom(null); -export const surfaceAddressAtom = atom(null); -export const wellborePickCaseUuidAtom = atom(null); -export const wellborePickSelectionAtom = atom(WellborePickSelectionType.NONE); -export const extensionAtom = atom(1000); -export const zScaleAtom = atom(5); diff --git a/frontend/src/modules/SeismicIntersection/settings/hooks/queryHooks.tsx b/frontend/src/modules/SeismicIntersection/settings/hooks/queryHooks.tsx deleted file mode 100644 index 1f16594a0..000000000 --- a/frontend/src/modules/SeismicIntersection/settings/hooks/queryHooks.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import { SeismicCubeMeta_api } from "@api"; -import { apiService } from "@framework/ApiService"; -import { UseQueryResult, useQuery } from "@tanstack/react-query"; - -const STALE_TIME = 60 * 1000; -const CACHE_TIME = 60 * 1000; - -export function useSeismicCubeMetaListQuery( - caseUuid: string | undefined, - ensembleName: string | undefined -): UseQueryResult { - return useQuery({ - queryKey: ["getSeismicCubeMetaList", caseUuid, ensembleName], - queryFn: () => apiService.seismic.getSeismicCubeMetaList(caseUuid ?? "", ensembleName ?? ""), - staleTime: STALE_TIME, - gcTime: CACHE_TIME, - enabled: !!(caseUuid && ensembleName), - }); -} diff --git a/frontend/src/modules/SeismicIntersection/settings/settings.tsx b/frontend/src/modules/SeismicIntersection/settings/settings.tsx deleted file mode 100644 index 52bfc0dce..000000000 --- a/frontend/src/modules/SeismicIntersection/settings/settings.tsx +++ /dev/null @@ -1,585 +0,0 @@ -import React from "react"; - -import { SurfaceAttributeType_api } from "@api"; -import { EnsembleIdent } from "@framework/EnsembleIdent"; -import { ModuleSettingsProps } from "@framework/Module"; -import { useSettingsStatusWriter } from "@framework/StatusWriter"; -import { SyncSettingKey, SyncSettingsHelper } from "@framework/SyncSettings"; -import { useEnsembleSet } from "@framework/WorkbenchSession"; -import { EnsembleDropdown } from "@framework/components/EnsembleDropdown"; -import { Wellbore } from "@framework/types/wellbore"; -import { fixupEnsembleIdent, maybeAssignFirstSyncedEnsemble } from "@framework/utils/ensembleUiHelpers"; -import { CircularProgress } from "@lib/components/CircularProgress"; -import { CollapsibleGroup } from "@lib/components/CollapsibleGroup"; -import { Dropdown } from "@lib/components/Dropdown"; -import { Input } from "@lib/components/Input"; -import { Label } from "@lib/components/Label"; -import { QueryStateWrapper } from "@lib/components/QueryStateWrapper"; -import { RadioGroup } from "@lib/components/RadioGroup"; -import { Select, SelectOption } from "@lib/components/Select"; -import { useValidArrayState } from "@lib/hooks/useValidArrayState"; -import { useValidState } from "@lib/hooks/useValidState"; -import { SurfaceDirectory, SurfaceTimeType } from "@modules/_shared/Surface"; -import { useRealizationSurfacesMetadataQuery } from "@modules/_shared/Surface"; -import { useDrilledWellboreHeadersQuery } from "@modules/_shared/WellBore"; -import { usePropagateApiErrorToStatusWriter } from "@modules/_shared/hooks/usePropagateApiErrorToStatusWriter"; - -import { useAtom, useSetAtom } from "jotai"; -import { isEqual } from "lodash"; - -import { - extensionAtom, - seismicAddressAtom, - surfaceAddressAtom, - wellboreAddressAtom, - wellborePickCaseUuidAtom, - wellborePickSelectionAtom, - zScaleAtom, -} from "./atoms/baseAtoms"; -import { useSeismicCubeMetaListQuery } from "./hooks/queryHooks"; - -import { Interfaces } from "../interfaces"; -import { - SeismicAddress, - SurfaceAddress, - WellborePickSelectionType, - WellborePickSelectionTypeEnumToStringMapping, -} from "../typesAndEnums"; -import { SeismicCubeMetaDirectory, SeismicTimeType } from "../utils/seismicCubeDirectory"; - -const SeismicTimeTypeEnumToSurveyTypeStringMapping = { - [SeismicTimeType.TimePoint]: "3D", - [SeismicTimeType.Interval]: "4D", -}; -const SeismicTimeTypeEnumToSeismicTimeTypeStringMapping = { - [SeismicTimeType.TimePoint]: "Seismic timestamps", - [SeismicTimeType.Interval]: "Seismic intervals", -}; - -enum SeismicDataSource { - SIMULATED = "Simulated", - OBSERVED = "Observed", -} - -const SeismicDataSourceTypeToStringMapping = { - [SeismicDataSource.SIMULATED]: "Simulated", - [SeismicDataSource.OBSERVED]: "Observed", -}; - -// To be a variable in the future? -const WELLBORE_TYPE = "smda"; - -// Hardcoded min/max limits for input elements -const EXTENSION_LIMITS = { min: 100, max: 100000 }; // Min/max extension in meters outside both sides of the well path [m] -const Z_SCALE_LIMITS = { min: 1, max: 100 }; // Minimum z-scale factor - -// Hardcoded surface time type - no surface as function of time -const SURFACE_TIME_TYPE = SurfaceTimeType.None; - -export function Settings({ - settingsContext, - workbenchSession, - workbenchServices, -}: ModuleSettingsProps): React.ReactNode { - const syncedSettingKeys = settingsContext.useSyncedSettingKeys(); - const syncHelper = new SyncSettingsHelper(syncedSettingKeys, workbenchServices); - const syncedValueEnsembles = syncHelper.useValue(SyncSettingKey.ENSEMBLE, "global.syncValue.ensembles"); - const ensembleSet = useEnsembleSet(workbenchSession); - const statusWriter = useSettingsStatusWriter(settingsContext); - - const setSeismicAddress = useSetAtom(seismicAddressAtom); - const setSurfaceAddress = useSetAtom(surfaceAddressAtom); - const [wellboreAddress, setWellboreAddress] = useAtom(wellboreAddressAtom); - const setWellborePickCaseUuid = useSetAtom(wellborePickCaseUuidAtom); - const [wellborePickSelection, setWellborePickSelection] = useAtom(wellborePickSelectionAtom); - const [extension, setExtension] = useAtom(extensionAtom); - const [zScale, setZScale] = useAtom(zScaleAtom); - - const [fetchedSurfaceNames, setFetchedSurfaceNames] = React.useState([]); - const [fetchedSurfaceAttributes, setFetchedSurfaceAttributes] = React.useState([]); - - const [selectedEnsembleIdent, setSelectedEnsembleIdent] = React.useState(null); - const [realizationNumber, setRealizationNumber] = React.useState(0); - const [isObserved, setIsObserved] = React.useState(false); - - const [seismicTimeType, setSeismicTimeType] = React.useState(SeismicTimeType.TimePoint); - const [selectedWellboreAddress, setSelectedWellboreAddress] = React.useState(wellboreAddress); - const [selectedWellborePickSelection, setSelectedWellborePickSelection] = - React.useState(wellborePickSelection); - - const candidateEnsembleIdent = maybeAssignFirstSyncedEnsemble(selectedEnsembleIdent, syncedValueEnsembles); - const computedEnsembleIdent = fixupEnsembleIdent(candidateEnsembleIdent, ensembleSet); - if (computedEnsembleIdent && !computedEnsembleIdent.equals(selectedEnsembleIdent)) { - setSelectedEnsembleIdent(computedEnsembleIdent); - } - - const isValidRealizationNumber = selectedEnsembleIdent - ? ensembleSet.findEnsemble(selectedEnsembleIdent)?.getRealizations().includes(realizationNumber) ?? false - : false; - if (!isValidRealizationNumber) { - statusWriter.addError("Realization number does not exist in ensemble"); - } - - // Queries - const wellHeadersQuery = useDrilledWellboreHeadersQuery(computedEnsembleIdent?.getCaseUuid()); - const seismicCubeMetaListQuery = useSeismicCubeMetaListQuery( - computedEnsembleIdent?.getCaseUuid(), - computedEnsembleIdent?.getEnsembleName() - ); - const surfaceMetadataQuery = useRealizationSurfacesMetadataQuery( - computedEnsembleIdent?.getCaseUuid(), - computedEnsembleIdent?.getEnsembleName() - ); - usePropagateApiErrorToStatusWriter(wellHeadersQuery, statusWriter); - usePropagateApiErrorToStatusWriter(seismicCubeMetaListQuery, statusWriter); - usePropagateApiErrorToStatusWriter(surfaceMetadataQuery, statusWriter); - - if (seismicCubeMetaListQuery.data && seismicCubeMetaListQuery.data.length === 0) { - statusWriter.addWarning("No seismic cubes found for ensemble"); - } - if (surfaceMetadataQuery.data && surfaceMetadataQuery.data.surfaces.length === 0) { - statusWriter.addWarning("No surfaces found for ensemble"); - } - - // Handling well headers query - const syncedWellBore = syncHelper.useValue(SyncSettingKey.WELLBORE, "global.syncValue.wellBore"); - const availableWellboreList: Wellbore[] = - wellHeadersQuery.data?.map((wellbore) => ({ - type: WELLBORE_TYPE, - uwi: wellbore.uniqueWellboreIdentifier, - uuid: wellbore.wellboreUuid, - })) || []; - const computedWellboreAddress = fixupSyncedOrSelectedOrFirstWellbore( - syncedWellBore || null, - selectedWellboreAddress || null, - availableWellboreList - ); - - if (!isEqual(computedWellboreAddress, selectedWellboreAddress)) { - setSelectedWellboreAddress(computedWellboreAddress); - } - - // Create surface directory (depth and time to match attributes for seismic cube) - const surfaceDirectory = new SurfaceDirectory({ - realizationMetaSet: surfaceMetadataQuery.data, - timeType: SURFACE_TIME_TYPE, - includeAttributeTypes: [SurfaceAttributeType_api.DEPTH, SurfaceAttributeType_api.TIME], - }); - - // Get attributes for available surfaces and set valid state hook - let computedSurfaceAttributes: string[] = fetchedSurfaceAttributes; - const noSurfaceNameFilter = null; // No filter for surface attributes - const candidateSurfaceAttributes = surfaceDirectory.getAttributeNames(noSurfaceNameFilter); - if (surfaceMetadataQuery.data && !isEqual(computedSurfaceAttributes, candidateSurfaceAttributes)) { - computedSurfaceAttributes = candidateSurfaceAttributes; - setFetchedSurfaceAttributes(candidateSurfaceAttributes); - } - const [selectedSurfaceAttribute, setSelectedSurfaceAttribute] = useValidState({ - initialState: null, - validStates: computedSurfaceAttributes, - }); - - // Find surface names which has selected attribute and set valid state hook - let computedSurfaceNames: string[] = fetchedSurfaceNames; - const candidateSurfaceNames = surfaceDirectory.getSurfaceNames(selectedSurfaceAttribute); - if (surfaceMetadataQuery.data && !isEqual(computedSurfaceNames, candidateSurfaceNames)) { - computedSurfaceNames = candidateSurfaceNames; - setFetchedSurfaceNames(candidateSurfaceNames); - } - const [selectedSurfaceNames, setSelectedSurfaceNames] = useValidArrayState({ - initialState: [], - validStateArray: computedSurfaceNames, - }); - - // Create seismic cube directory - const seismicCubeMetaDirectory = seismicCubeMetaListQuery.data - ? new SeismicCubeMetaDirectory({ - seismicCubeMetaList: seismicCubeMetaListQuery.data, - timeType: seismicTimeType, - useObservedSeismicCubes: isObserved, - }) - : null; - - const [selectedSeismicAttribute, setSelectedSeismicAttribute] = useValidState({ - initialState: null, - validStates: seismicCubeMetaDirectory?.getAttributeNames() ?? [], - }); - const [selectedSeismicTime, setSelectedSeismicTime] = useValidState({ - initialState: null, - validStates: seismicCubeMetaDirectory?.getTimeOrIntervalStrings() ?? [], - }); - - const seismicAttributeOptions = seismicCubeMetaDirectory - ? seismicCubeMetaDirectory.getAttributeNames().map((attribute) => { - return { label: attribute, value: attribute }; - }) - : []; - const seismicTimeOptions = seismicCubeMetaDirectory - ? createOptionsFromTimeOrIntervalStrings(seismicCubeMetaDirectory.getTimeOrIntervalStrings()) - : []; - - React.useEffect( - function propagateSeismicAddressToView() { - let seismicAddress: SeismicAddress | null = null; - if (computedEnsembleIdent && selectedSeismicAttribute && selectedSeismicTime && isValidRealizationNumber) { - seismicAddress = { - caseUuid: computedEnsembleIdent.getCaseUuid(), - ensemble: computedEnsembleIdent.getEnsembleName(), - realizationNumber: realizationNumber, - attribute: selectedSeismicAttribute, - timeString: selectedSeismicTime, - observed: isObserved, - }; - } - setSeismicAddress(seismicAddress); - }, - [ - computedEnsembleIdent, - selectedSeismicAttribute, - selectedSeismicTime, - isObserved, - isValidRealizationNumber, - realizationNumber, - setSeismicAddress, - ] - ); - - React.useEffect( - function propagateSurfaceAddressToView() { - let surfaceAddress: SurfaceAddress | null = null; - if ( - computedEnsembleIdent && - selectedSurfaceAttribute && - selectedSurfaceNames.length !== 0 && - isValidRealizationNumber - ) { - surfaceAddress = { - caseUuid: computedEnsembleIdent.getCaseUuid(), - ensemble: computedEnsembleIdent.getEnsembleName(), - realizationNumber: realizationNumber, - surfaceNames: selectedSurfaceNames, - attribute: selectedSurfaceAttribute, - }; - } - setSurfaceAddress(surfaceAddress); - }, - [ - computedEnsembleIdent, - selectedSurfaceAttribute, - selectedSurfaceNames, - isValidRealizationNumber, - realizationNumber, - setSurfaceAddress, - ] - ); - - React.useEffect( - function propagateWellBoreAddressToView() { - setWellboreAddress(selectedWellboreAddress); - }, - [selectedWellboreAddress, setWellboreAddress] - ); - - React.useEffect( - function propagateWellborePickCaseUuidToView() { - setWellborePickCaseUuid(computedEnsembleIdent?.getCaseUuid() ?? null); - }, - [computedEnsembleIdent, setWellborePickCaseUuid] - ); - - React.useEffect( - function propagateWellborePickSelectionToView() { - setWellborePickSelection(selectedWellborePickSelection); - }, - [selectedWellborePickSelection, setWellborePickSelection] - ); - - function handleEnsembleSelectionChange(newEnsembleIdent: EnsembleIdent | null) { - setSelectedEnsembleIdent(newEnsembleIdent); - if (newEnsembleIdent) { - syncHelper.publishValue(SyncSettingKey.ENSEMBLE, "global.syncValue.ensembles", [newEnsembleIdent]); - } - } - - function handleRealizationTextChanged(event: React.ChangeEvent) { - const base10 = 10; - const realNum = Math.max(0, parseInt(event.target.value, base10)); - setRealizationNumber(realNum); - } - - function handleSurfaceNameChange(values: string[]) { - setSelectedSurfaceNames(values); - } - - function handleSurfaceAttributeChange(values: string[]) { - if (values.length === 0) { - setSelectedSurfaceAttribute(null); - return; - } - setSelectedSurfaceAttribute(values[0]); - } - - function handleSeismicAttributeChange(values: string[]) { - if (values.length === 0) { - setSelectedSeismicAttribute(null); - return; - } - setSelectedSeismicAttribute(values[0]); - } - - function handleSeismicTimeChange(values: string[]) { - if (values.length === 0) { - setSelectedSeismicTime(null); - return; - } - setSelectedSeismicTime(values[0]); - } - - function handleWellChange(selectedWellboreUuids: string[], validWellboreList: Wellbore[]) { - if (selectedWellboreUuids.length === 0) { - setSelectedWellboreAddress(null); - return; - } - - // Use only first wellbore - const wellboreUuid = selectedWellboreUuids[0]; - const wellUwi = validWellboreList.find((wellbore) => wellbore.uuid === wellboreUuid)?.uwi; - - if (!wellUwi) return; - - const newWellboreAddress: Wellbore = { type: WELLBORE_TYPE, uuid: wellboreUuid, uwi: wellUwi }; - setSelectedWellboreAddress(newWellboreAddress); - syncHelper.publishValue(SyncSettingKey.WELLBORE, "global.syncValue.wellBore", newWellboreAddress); - } - - function handleExtensionChange(event: React.ChangeEvent) { - const newExtension = parseInt(event.target.value, 10); - setExtension(newExtension); - } - - function handleZScaleChange(event: React.ChangeEvent) { - const newZScale = parseInt(event.target.value, 10); - setZScale(newZScale); - } - - return ( -
- -
- - -
-
- - } - > - - - - - -
- ); -} - -function fixupSyncedOrSelectedOrFirstWellbore( - syncedWellbore: Wellbore | null, - selectedWellbore: Wellbore | null, - legalWellbores: Wellbore[] -): Wellbore | null { - const allUuids = legalWellbores.map((elm) => elm.uuid); - if (syncedWellbore && allUuids.includes(syncedWellbore.uuid)) { - return syncedWellbore; - } - if (selectedWellbore && allUuids.includes(selectedWellbore.uuid)) { - return selectedWellbore; - } - if (legalWellbores.length !== 0) { - return legalWellbores[0]; - } - return null; -} - -function createOptionsFromTimeOrIntervalStrings(timeOrIntervalStrings: string[]): SelectOption[] { - if (timeOrIntervalStrings.length == 0) { - return []; - } - - // '2018-01-01T00:00:00.000/2019-07-01T00:00:00.000' to '2018-01-01/2019-07-01' - const options = timeOrIntervalStrings.map((elm) => { - const isInterval = elm.includes("/"); - return { value: elm, label: isInterval ? isoIntervalStringToDateLabel(elm) : isoStringToDateLabel(elm) }; - }); - return options; -} - -/** - * Extracts the date substring from an ISO string - * - * Input ISO string format: '2018-01-01T00:00:00.000' - * Returns: '2018-01-01' - */ -function isoStringToDateLabel(inputIsoString: string): string { - const date = inputIsoString.split("T")[0]; - return `${date}`; -} - -/** - * Extracts interval date substring from an ISO string - * - * Input ISO string format: '2018-01-01T00:00:00.000/2019-07-01T00:00:00.000' - * Returns: '2018-01-01/2019-07-01' - */ -function isoIntervalStringToDateLabel(inputIsoIntervalString: string): string { - const [start, end] = inputIsoIntervalString.split("/"); - const startDate = start.split("T")[0]; - const endDate = end.split("T")[0]; - return `${startDate}/${endDate}`; -} diff --git a/frontend/src/modules/SeismicIntersection/typesAndEnums.ts b/frontend/src/modules/SeismicIntersection/typesAndEnums.ts deleted file mode 100644 index b528075a6..000000000 --- a/frontend/src/modules/SeismicIntersection/typesAndEnums.ts +++ /dev/null @@ -1,28 +0,0 @@ -export type SeismicAddress = { - caseUuid: string; - ensemble: string; - realizationNumber: number; - attribute: string; - observed: boolean; - timeString?: string; -}; - -export type SurfaceAddress = { - caseUuid: string; - ensemble: string; - realizationNumber: number; - surfaceNames: string[]; - attribute: string; -}; - -export enum WellborePickSelectionType { - NONE = "None", - ALL = "All", - SELECTED_SURFACES = "SelectedSurfaces", -} - -export const WellborePickSelectionTypeEnumToStringMapping = { - [WellborePickSelectionType.NONE]: "None", - [WellborePickSelectionType.ALL]: "All", - [WellborePickSelectionType.SELECTED_SURFACES]: "Selected Surfaces", -}; diff --git a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionControllerUtils.ts b/frontend/src/modules/SeismicIntersection/utils/esvIntersectionControllerUtils.ts deleted file mode 100644 index 4bc2ced08..000000000 --- a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionControllerUtils.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { - Annotation, - CalloutCanvasLayer, - Controller, - GeomodelCanvasLayer, - GeomodelLabelsLayer, - OverlayMouseExitEvent, - OverlayMouseMoveEvent, - SeismicCanvasLayer, - SurfaceData, - WellborepathLayer, - getPicksData, - getSeismicInfo, - getSeismicOptions, - transformFormationData, -} from "@equinor/esv-intersection"; - -import { makeReferenceSystemFromTrajectoryXyzPoints } from "./esvIntersectionDataConversion"; -import { Pick, Unit } from "./esvIntersectionTypes"; - -/** - * Utility to add md overlay for hover to esv intersection controller - */ -export function addMDOverlay(controller: Controller) { - const elm = controller.overlay.create("md", { - onMouseMove: (event: OverlayMouseMoveEvent) => { - const { target, caller, x } = event; - const newX = caller.currentStateAsEvent.xScale.invert(x); - const referenceSystem = caller.referenceSystem; - - if (!referenceSystem || !(target instanceof HTMLElement)) return; - - const md = referenceSystem.unproject(newX); - target.textContent = Number.isFinite(md) ? `MD: ${md?.toFixed(1)}` : "-"; - if (md && (md < 0 || referenceSystem.length < md)) { - target.classList.replace("visible", "invisible"); - } else { - target.classList.replace("invisible", "visible"); - } - }, - onMouseExit: (event: OverlayMouseExitEvent) => { - if (event.target instanceof HTMLElement) { - event.target.classList.replace("visible", "invisible"); - } - }, - }); - - if (elm) { - elm.classList.add( - "invisible", - "inline-block", - "p-1", - "rounded", - "text-right", - "absolute", - "bg-black", - "bg-opacity-20", - "text-white", - "z-100" - ); - } -} - -/** - * Utility to add well bore trajectory to esv intersection controller - * - * Sets reference system with trajectory 3D coordinates, controller reference system must be handled outside - */ -export function addWellborePathLayer(controller: Controller, wellboreTrajectoryXyzPoints: number[][]): void { - const referenceSystem = makeReferenceSystemFromTrajectoryXyzPoints(wellboreTrajectoryXyzPoints); - controller.addLayer( - new WellborepathLayer("wellborepath", { - order: 3, - strokeWidth: "4px", - stroke: "black", - referenceSystem: referenceSystem, - }) - ); -} - -export type SeismicLayerOptions = { - curtain: number[][]; - xAxisOffset: number; - image: ImageBitmap; - dataValues: number[][]; - yAxisValues: number[]; -}; -/** - * Utility to add seismic layer to esv intersection controller - */ -export function addSeismicLayer( - controller: Controller, - { curtain, xAxisOffset, image, dataValues, yAxisValues }: SeismicLayerOptions -): void { - const info = getSeismicInfo({ datapoints: dataValues, yAxisValues }, curtain); - if (info) { - // Adjust x axis offset to account for curtain - info.minX = info.minX - xAxisOffset; - info.maxX = info.maxX - xAxisOffset; - } - const layer = new SeismicCanvasLayer("seismic", { - order: 1, - layerOpacity: 1, - }); - layer.data = { image: image, options: getSeismicOptions(info) }; - controller.addLayer(layer); -} - -export type SurfaceIntersectionData = { - name: string; - xyPoints: number[][]; // [x, y] points for surface intersection line in reference system -}; - -export type SurfacesLayerOptions = { - surfaceIntersectionDataList: SurfaceIntersectionData[]; - layerName: string; - surfaceColor: string; - surfaceWidth: number; -}; -export function addSurfacesLayer( - controller: Controller, - { surfaceIntersectionDataList, layerName, surfaceColor, surfaceWidth }: SurfacesLayerOptions -): void { - const surfaceIndicesWithLabels: { label: string; idx: number }[] = []; - surfaceIntersectionDataList.forEach((surface, idx) => { - if (surface.name !== surfaceIndicesWithLabels[surfaceIndicesWithLabels.length - 1]?.label) { - surfaceIndicesWithLabels.push({ label: surface.name, idx: idx }); - } - }); - - // Create surface intersection lines - const surfaceIntersectionLines: SurfaceData = { - areas: [], - lines: surfaceIntersectionDataList.map((surface) => { - return { - data: surface.xyPoints, - color: surfaceColor, - id: `${surface.name}-id`, - label: surface.name, - width: surfaceWidth, - }; - }), - }; - - const geomodelLayer = new GeomodelCanvasLayer(`${layerName}`, { - order: 3, - layerOpacity: 0.6, - data: surfaceIntersectionLines, - }); - const geomodelLabelsLayer = new GeomodelLabelsLayer(`${layerName}labels`, { - order: 3, - data: surfaceIntersectionLines, - maxFontSize: 16, - minFontSize: 10, - }); - controller.addLayer(geomodelLayer); - controller.addLayer(geomodelLabelsLayer); -} - -export function addWellborePicksLayer(controller: Controller, wellborePicks: Pick[], stratigraphicUnits: Unit[]) { - const picksData = transformFormationData(wellborePicks, stratigraphicUnits); - - const layer = new CalloutCanvasLayer("callout", { - order: 100, - data: getPicksData(picksData), - referenceSystem: controller.referenceSystem, - minFontSize: 12, - maxFontSize: 16, - }); - controller.addLayer(layer); -} diff --git a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionDataConversion.ts b/frontend/src/modules/SeismicIntersection/utils/esvIntersectionDataConversion.ts deleted file mode 100644 index a6218e464..000000000 --- a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionDataConversion.ts +++ /dev/null @@ -1,269 +0,0 @@ -import { SurfaceIntersectionData_api, WellborePicksAndStratigraphicUnits_api, WellboreTrajectory_api } from "@api"; -import { IntersectionReferenceSystem, Trajectory } from "@equinor/esv-intersection"; - -import { SurfaceIntersectionData } from "./esvIntersectionControllerUtils"; -import { Pick, Unit } from "./esvIntersectionTypes"; -import { SeismicFenceData_trans } from "./queryDataTransforms"; - -/** - * Utility to make extended trajectory object from array of 3D trajectory coordinates [x,y,z] and extension - * - * TODO: Number of samples. Needs some thought for future, perhaps detect num samples based on seismic metadata? - */ -export function makeExtendedTrajectoryFromTrajectoryXyzPoints( - trajectoryXyzPoints: number[][], - extension: number, - samplingIncrementMeters = 5 -): Trajectory { - const isVertical = isVerticalTrajectory(trajectoryXyzPoints); - if (isVertical) { - // Adds extension to top and bottom of vertical line - trajectoryXyzPoints = addStartAndEndPointsToTrajectoryForVerticalLine(trajectoryXyzPoints, extension); - } - - const referenceSystem = new IntersectionReferenceSystem(trajectoryXyzPoints); - - // Offset: md at start of well path - referenceSystem.offset = trajectoryXyzPoints[0][2]; - - const displacement = referenceSystem.displacement ?? 1; - const numPoints = Math.min(1000, Math.floor((displacement + extension * 2) / samplingIncrementMeters)); - const extendedTrajectory = isVertical - ? referenceSystem.getTrajectory(numPoints) - : referenceSystem.getExtendedTrajectory(numPoints, extension, extension); - - extendedTrajectory.points.forEach((point) => { - point[0] = parseFloat(point[0].toFixed(3)); - point[1] = parseFloat(point[1].toFixed(3)); - }); - - return extendedTrajectory; -} - -/** - * Helper function to check if a trajectory made of 3D coordinates [x,y,z] is a vertical line - * - * Checks for first coordinate with different x and y coordinates than the first point - */ -function isVerticalTrajectory(trajectoryXyzPoints: number[][]): boolean { - if (trajectoryXyzPoints.length === 0) return false; - - const firstPoint = trajectoryXyzPoints[0]; - - if (firstPoint.length !== 3) { - throw new Error("First coordinates of trajectory must be 3D coordinates of length 3"); - } - - // Detect first 3D point which is not on the same x and y coordinates as the first point and return false - for (let i = 1; i < trajectoryXyzPoints.length; ++i) { - const point = trajectoryXyzPoints[i]; - if (point.length !== 3) { - throw new Error("Trajectory points must be 3D coordinates of length 3"); - } - if (point[0] !== firstPoint[0] || point[1] !== firstPoint[1]) { - return false; - } - } - - return true; -} - -/** - * Helper function to add start and end points to array of 3D trajectory coordinates [x,y,z] to prevent pure vertical line - * - * This function assumes check of vertical line beforehand, and only performs adding of start and end points - * - * @param trajectoryXyzPoints - Array of 3D coordinates [x,y,z] - */ -function addStartAndEndPointsToTrajectoryForVerticalLine( - trajectoryXyzPoints: number[][], - extension: number -): number[][] { - if (trajectoryXyzPoints.length === 0) return []; - - const firstCoordinates = trajectoryXyzPoints[0]; - const lastCoordinates = trajectoryXyzPoints[trajectoryXyzPoints.length - 1]; - - if (firstCoordinates.length !== 3 || lastCoordinates.length !== 3) { - throw new Error("First and last coordinates of trajectory must be 3D coordinates of length 3"); - } - - const modifiedTrajectoryXyzPoints = [...trajectoryXyzPoints]; - - // Compare x (index 0) and y (index 1) coordinates of first and last points - // Add start and end coordinates to trajectory - // NOTE: Should be consider to create a 3D vector with length = extension, i.e. extension = sqrt(x^2 + y^2 + z^2), with z constant, - // i.e. -> x = sqrt(extension) and y = sqrt(extension)? - const firstXCoord = firstCoordinates[0] - extension; - const firstYCoord = firstCoordinates[1]; - const firstZCoord = firstCoordinates[2]; - - const lastXCoord = lastCoordinates[0] + extension; - const lastYCoord = lastCoordinates[1]; - const lastZCoord = lastCoordinates[2]; - - modifiedTrajectoryXyzPoints.unshift([firstXCoord, firstYCoord, firstZCoord]); - modifiedTrajectoryXyzPoints.push([lastXCoord, lastYCoord, lastZCoord]); - - return modifiedTrajectoryXyzPoints; -} - -/** - * Make an array of 3D coordinates [x,y,z] from a wellbore trajectory - * - * @param wellboreTrajectory - Wellbore trajectory object - * @returns Array of 3D coordinates [x,y,z] - with [x,y,z] = [easting, northing, tvd_msl] - */ -export function makeTrajectoryXyzPointsFromWellboreTrajectory(wellboreTrajectory: WellboreTrajectory_api): number[][] { - const eastingArr = wellboreTrajectory.eastingArr; - const northingArr = wellboreTrajectory.northingArr; - const tvdArr = wellboreTrajectory.tvdMslArr; - - if (eastingArr.length !== northingArr.length && northingArr.length !== tvdArr.length) { - throw new Error("Wellbore trajectory coordinate arrays are not of same length"); - } - - // Trajectory points: array of 3D coordinates [x,y,z] - const trajectoryXyzPoints = eastingArr.map((easting: number, idx: number) => [ - parseFloat(easting.toFixed(3)), - parseFloat(northingArr[idx].toFixed(3)), - parseFloat(tvdArr[idx].toFixed(3)), - ]); - - return trajectoryXyzPoints; -} - -/** - * Make a reference system from array of 3D coordinates [x,y,z] defined for a trajectory - */ -export function makeReferenceSystemFromTrajectoryXyzPoints( - trajectoryXyzPoints: number[][] -): IntersectionReferenceSystem { - const referenceSystem = new IntersectionReferenceSystem(trajectoryXyzPoints); - return referenceSystem; -} - -/** - * Utility function to convert the 1D array of values from the fence data to a 2D array of values - * for the seismic slice image. - * - * For the bit map image, the values are provided s.t. a seismic trace is a column in the image, - * thus the data will be transposed. - * - * trace a,b,c and d - * - * num_traces = 4 - * num_samples_per_trace = 3 - * fence_traces = [a1, a2, a3, b1, b2, b3, c1, c2, c3, d1, d2, d3] - * - * Image: - * - * a1 b1 c1 d1 - * a2 b2 c2 d2 - * a3 b3 c3 d3 - */ -export function createSeismicSliceImageDataArrayFromFenceData( - fenceData: SeismicFenceData_trans, - fillValue = 0 -): number[][] { - const imageArray: number[][] = []; - - const numTraces = fenceData.num_traces; - const numSamples = fenceData.num_samples_per_trace; - const fenceValues = fenceData.fenceTracesFloat32Arr; - - for (let i = 0; i < numSamples; ++i) { - const row: number[] = []; - for (let j = 0; j < numTraces; ++j) { - const index = i + j * numSamples; - const fenceValue = fenceValues[index]; - const validFenceValue = Number.isNaN(fenceValue) ? fillValue : fenceValue; - row.push(validFenceValue); - } - imageArray.push(row); - } - return imageArray; -} - -/** - * Utility to create an array of values for the Y axis of the seismic slice image. I.e. depth values - * for the seismic depth axis. - */ -export function createSeismicSliceImageYAxisValuesArrayFromFenceData(fenceData: SeismicFenceData_trans): number[] { - const yAxisValues: number[] = []; - - const numSamples = fenceData.num_samples_per_trace; - const minDepth = fenceData.min_fence_depth; - const maxDepth = fenceData.max_fence_depth; - - for (let i = 0; i < numSamples; ++i) { - yAxisValues.push(minDepth + ((maxDepth - minDepth) / numSamples) * i); - } - return yAxisValues; -} - -/** - * Utility to create an array of surface intersection data for the esv intersection layer. - * - * Takes the surface intersection data from API and converts it to an array of [x,y] points for each surface intersection line. - */ -export function createEsvSurfaceIntersectionDataArrayFromSurfaceIntersectionDataApiArray( - surfaceIntersectionData: SurfaceIntersectionData_api[] -): SurfaceIntersectionData[] { - const surfaceIntersectionDataArray: SurfaceIntersectionData[] = []; - - for (const surfaceIntersection of surfaceIntersectionData) { - if (surfaceIntersection.z_points.length !== surfaceIntersection.cum_lengths.length) { - throw new Error( - `Surface intersection data for ${surfaceIntersection.name} has different number of z_points and cum_lengths` - ); - } - - const xyPoints = surfaceIntersection.z_points.map((z: number, idx) => { - return [surfaceIntersection.cum_lengths[idx], z]; - }); - surfaceIntersectionDataArray.push({ name: surfaceIntersection.name, xyPoints: xyPoints }); - } - - return surfaceIntersectionDataArray; -} - -/** - * Utility to create an object of wellbore picks and stratigraphic units for the esv intersection layer. - * - * Converts the API data to the format required by the esv intersection layer. - */ -export function createEsvWellborePicksAndStratigraphicUnits( - wellborePicksAndStratigraphicUnits_api: WellborePicksAndStratigraphicUnits_api -): { wellborePicks: Pick[]; stratigraphicUnits: Unit[] } { - const wellborePicks: Pick[] = wellborePicksAndStratigraphicUnits_api.wellbore_picks.map((pick) => { - return { - pickIdentifier: pick.pickIdentifier, - confidence: pick.confidence, - depthReferencePoint: pick.depthReferencePoint, - md: pick.md, - mdUnit: pick.mdUnit, - tvd: pick.tvd, - }; - }); - - // lithologyType and stratUnitParent are defined as number in esv intersection layer, but is retrieved as string - // from back-end - const stratigraphicUnits: Unit[] = wellborePicksAndStratigraphicUnits_api.stratigraphic_units.map((unit) => { - return { - identifier: unit.identifier, - top: unit.top, - base: unit.base, - baseAge: unit.baseAge, - topAge: unit.topAge, - colorR: unit.colorR, - colorG: unit.colorG, - colorB: unit.colorB, - stratUnitLevel: unit.stratUnitLevel, - lithologyType: unit.lithologyType as number, - stratUnitParent: unit.stratUnitParent as unknown as number, - }; - }); - - return { wellborePicks: wellborePicks, stratigraphicUnits: stratigraphicUnits }; -} diff --git a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionHooks.ts b/frontend/src/modules/SeismicIntersection/utils/esvIntersectionHooks.ts deleted file mode 100644 index 5adc79d0e..000000000 --- a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionHooks.ts +++ /dev/null @@ -1,65 +0,0 @@ -import React from "react"; - -import { generateSeismicSliceImage } from "@equinor/esv-intersection"; - -import { isEqual } from "lodash"; - -export type SeismicSliceImageOptions = { - dataValues: number[][]; // Array of seismic image data values - yAxisValues: number[]; // Array of seismic image y axis values - trajectoryXyPoints: number[][]; // Array of 2D projected points [x, y] - colormap: string[]; - extension: number; // Needed to keep synched extension -}; - -export enum SeismicSliceImageStatus { - SUCCESS = "success", - LOADING = "loading", - ERROR = "error", -} - -export type SeismicSliceImageData = { - image: ImageBitmap | null; - synchedOptions: SeismicSliceImageOptions | null; - status: SeismicSliceImageStatus; -}; - -/** - * Hook to generate seismic slice image for async utility. - * - * Returns image, synched image options used to generate the image, and image status. - */ -export function useGenerateSeismicSliceImageData(imageOptions: SeismicSliceImageOptions | null): SeismicSliceImageData { - const [prevData, setPrevData] = React.useState(null); - const [image, setImage] = React.useState(null); - const [imageStatus, setImageStatus] = React.useState(SeismicSliceImageStatus.SUCCESS); - const [synchedImageOptions, setSynchedImageOptions] = React.useState(null); - - if (imageOptions !== null && !isEqual(imageOptions, prevData)) { - setPrevData(imageOptions); - setImageStatus(SeismicSliceImageStatus.LOADING); - - // Async generation of seismic slice image - generateSeismicSliceImage( - { datapoints: imageOptions.dataValues, yAxisValues: imageOptions.yAxisValues }, - imageOptions.trajectoryXyPoints, - imageOptions.colormap, - { - isLeftToRight: true, - } - ) - .then((result) => { - setImage(result ?? null); - setImageStatus(SeismicSliceImageStatus.SUCCESS); - setSynchedImageOptions(imageOptions); - }) - .catch(() => { - setImage(null); - setImageStatus(SeismicSliceImageStatus.ERROR); - setSynchedImageOptions(imageOptions); - }); - } - - // Slice image data - return { image: image, synchedOptions: synchedImageOptions, status: imageStatus }; -} diff --git a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionTypes.ts b/frontend/src/modules/SeismicIntersection/utils/esvIntersectionTypes.ts deleted file mode 100644 index 6a94cde78..000000000 --- a/frontend/src/modules/SeismicIntersection/utils/esvIntersectionTypes.ts +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Types for ESV Intersection component, as they are noe exported - * - * Version: "@equinor/esv-intersection@3.0.12" - * - * - */ - -/** - * Pick type from esv-intersection version 3.0.12 - * - * Compare this snippet from node_modules/@equinor/esv-intersection/src/datautils/picks.ts/Pick - */ -export type Pick = { - pickIdentifier?: string; - confidence: string | null; - depthReferencePoint: string; - md: number; - mdUnit: string; - tvd: number; -}; - -/** - * Unit type from esv-intersection version 3.0.12 - * - * Compare this snippet from node_modules/@equinor/esv-intersection/src/datautils/picks.ts/Unit - */ -export type Unit = { - identifier: string; - top: string; - base: string; - baseAge: number; - topAge: number; - colorR: number; - colorG: number; - colorB: number; - stratUnitLevel: number; - lithologyType: number; - stratUnitParent: number; -}; diff --git a/frontend/src/modules/SeismicIntersection/utils/queryDataTransforms.ts b/frontend/src/modules/SeismicIntersection/utils/queryDataTransforms.ts deleted file mode 100644 index cc640dee8..000000000 --- a/frontend/src/modules/SeismicIntersection/utils/queryDataTransforms.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { SeismicFenceData_api } from "@api"; -import { b64DecodeFloatArrayToFloat32 } from "@modules_shared/base64"; - -// Data structure for transformed data -// Remove the base64 encoded data and replace with a Float32Array -export type SeismicFenceData_trans = Omit & { - fenceTracesFloat32Arr: Float32Array; -}; - -export function transformSeismicFenceData(apiData: SeismicFenceData_api): SeismicFenceData_trans { - const startTS = performance.now(); - - const { fence_traces_b64arr, ...untransformedData } = apiData; - const dataFloat32Arr = b64DecodeFloatArrayToFloat32(fence_traces_b64arr); - - console.debug(`transformSurfaceData() took: ${(performance.now() - startTS).toFixed(1)}ms`); - - return { - ...untransformedData, - fenceTracesFloat32Arr: dataFloat32Arr, - }; -} diff --git a/frontend/src/modules/SeismicIntersection/utils/seismicCubeDirectory.ts b/frontend/src/modules/SeismicIntersection/utils/seismicCubeDirectory.ts deleted file mode 100644 index cbec52284..000000000 --- a/frontend/src/modules/SeismicIntersection/utils/seismicCubeDirectory.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { SeismicCubeMeta_api } from "@api"; -import { isIsoStringInterval } from "@framework/utils/timestampUtils"; - -// Time type for seismic cubes. -export enum SeismicTimeType { - TimePoint = "TimePoint", - Interval = "Interval", -} - -export type SeismicCubeMetaDirectoryOptions = { - seismicCubeMetaList: SeismicCubeMeta_api[]; - timeType: SeismicTimeType; - useObservedSeismicCubes?: boolean; -}; - -// Class responsible for managing a list of seismic cube meta. -export class SeismicCubeMetaDirectory { - private _seismicCubeList: SeismicCubeMeta_api[] = []; - - // Constructs a SeismicCubeDirectory with optional content filter criteria. - constructor(options: SeismicCubeMetaDirectoryOptions) { - if (!options) return; - - let filteredList = filterSeismicCubeMetaListOnTimeType(options.seismicCubeMetaList, options.timeType); - - if (options.useObservedSeismicCubes) { - filteredList = filteredList.filter((cube) => cube.is_observation); - } else { - filteredList = filteredList.filter((cube) => !cube.is_observation); - } - - this._seismicCubeList = filteredList; - } - - public getAttributeNames(): string[] { - return [...new Set(this._seismicCubeList.map((cube) => cube.seismic_attribute))].sort(); - } - - public getTimeOrIntervalStrings(requireAttributeName?: string): string[] { - if (requireAttributeName) { - const attributeDateOrIntervalStrings = this._seismicCubeList - .filter((cube) => cube.seismic_attribute === requireAttributeName) - .map((cube) => cube.iso_date_or_interval); - return [...new Set(attributeDateOrIntervalStrings)].sort(); - } - - return [...new Set(this._seismicCubeList.map((cube) => cube.iso_date_or_interval))].sort(); - } -} - -// Internal utility to filter directory based on time type. -function filterSeismicCubeMetaListOnTimeType( - seismicCubeMetaList: SeismicCubeMeta_api[], - timeType: SeismicTimeType -): SeismicCubeMeta_api[] { - switch (timeType) { - case SeismicTimeType.TimePoint: - return seismicCubeMetaList.filter( - (cube) => cube.iso_date_or_interval && !isIsoStringInterval(cube.iso_date_or_interval) - ); - case SeismicTimeType.Interval: - return seismicCubeMetaList.filter( - (cube) => cube.iso_date_or_interval && isIsoStringInterval(cube.iso_date_or_interval) - ); - default: - throw new Error("Invalid TimeType"); - } -} diff --git a/frontend/src/modules/SeismicIntersection/view.tsx b/frontend/src/modules/SeismicIntersection/view.tsx deleted file mode 100644 index f07dbf651..000000000 --- a/frontend/src/modules/SeismicIntersection/view.tsx +++ /dev/null @@ -1,331 +0,0 @@ -import React from "react"; - -import { - SeismicFencePolyline_api, - SurfaceIntersectionCumulativeLengthPolyline_api, - SurfaceIntersectionData_api, - WellborePicksAndStratigraphicUnits_api, -} from "@api"; -import { Controller, IntersectionReferenceSystem, Trajectory } from "@equinor/esv-intersection"; -import { ModuleViewProps } from "@framework/Module"; -import { useViewStatusWriter } from "@framework/StatusWriter"; -import { useElementSize } from "@lib/hooks/useElementSize"; -import { ColorScaleGradientType } from "@lib/utils/ColorScale"; -import { - useWellborePicksAndStratigraphicUnitsQuery, - useWellboreTrajectoriesQuery, -} from "@modules/_shared/WellBore/queryHooks"; -import { ContentError } from "@modules/_shared/components/ContentMessage"; -import { usePropagateApiErrorToStatusWriter } from "@modules/_shared/hooks/usePropagateApiErrorToStatusWriter"; - -import { isEqual } from "lodash"; - -import { Interfaces } from "./interfaces"; -import { useSeismicFenceDataQuery, useSurfaceIntersectionQueries } from "./queryHooks"; -import { WellborePickSelectionType } from "./typesAndEnums"; -import { - addMDOverlay, - addSeismicLayer, - addSurfacesLayer, - addWellborePathLayer, - addWellborePicksLayer, -} from "./utils/esvIntersectionControllerUtils"; -import { - createEsvSurfaceIntersectionDataArrayFromSurfaceIntersectionDataApiArray, - createEsvWellborePicksAndStratigraphicUnits, - createSeismicSliceImageDataArrayFromFenceData, - createSeismicSliceImageYAxisValuesArrayFromFenceData, - makeExtendedTrajectoryFromTrajectoryXyzPoints, - makeReferenceSystemFromTrajectoryXyzPoints, - makeTrajectoryXyzPointsFromWellboreTrajectory, -} from "./utils/esvIntersectionDataConversion"; -import { - SeismicSliceImageOptions, - SeismicSliceImageStatus, - useGenerateSeismicSliceImageData, -} from "./utils/esvIntersectionHooks"; - -export function View({ viewContext, workbenchSettings }: ModuleViewProps): React.ReactNode { - const wrapperDivRef = React.useRef(null); - const wrapperDivSize = useElementSize(wrapperDivRef); - const esvIntersectionContainerRef = React.useRef(null); - const esvIntersectionControllerRef = React.useRef(null); - - const statusWriter = useViewStatusWriter(viewContext); - - const seismicAddress = viewContext.useSettingsToViewInterfaceValue("seismicAddress"); - const surfaceAddress = viewContext.useSettingsToViewInterfaceValue("surfaceAddress"); - const wellboreAddress = viewContext.useSettingsToViewInterfaceValue("wellboreAddress"); - const wellborePickCaseUuid = viewContext.useSettingsToViewInterfaceValue("wellborePickCaseUuid"); - const wellborePickSelection = viewContext.useSettingsToViewInterfaceValue("wellborePickSelection"); - const extension = viewContext.useSettingsToViewInterfaceValue("extension"); - const zScale = viewContext.useSettingsToViewInterfaceValue("zScale"); - - const seismicColorScale = workbenchSettings.useDiscreteColorScale({ - gradientType: ColorScaleGradientType.Diverging, - }); - - const [seismicColors, setSeismicColors] = React.useState(seismicColorScale.getColorPalette().getColors()); - if (!isEqual(seismicColorScale.getColorPalette().getColors(), seismicColors)) { - setSeismicColors(seismicColorScale.getColorPalette().getColors()); - } - - // Extended wellbore trajectory for creating intersection/fence extended on both sides of wellbore - const [extendedWellboreTrajectory, setExtendedWellboreTrajectory] = React.useState(null); - - // Array of 3D points [x,y,z] for well trajectory layer in esv-intersection (to be in synch with seismic fence layer) - const [renderWellboreTrajectoryXyzPoints, setRenderWellboreTrajectoryXyzPoints] = React.useState( - null - ); - - // States to fetch seismic fence and surface intersection - const [seismicFencePolyline, setSeismicFencePolyline] = React.useState(null); - const [surfaceIntersectionCumulativeLengthPolyline, setSurfaceIntersectionCumulativeLengthPolyline] = - React.useState(null); - - // Async generating seismic slice image - const [generateSeismicSliceImageOptions, setGenerateSeismicSliceImageOptions] = - React.useState(null); - const generatedSeismicSliceImageData = useGenerateSeismicSliceImageData(generateSeismicSliceImageOptions); - - React.useEffect(function initializeEsvIntersectionController() { - if (esvIntersectionContainerRef.current) { - const axisOptions = { xLabel: "x", yLabel: "y", unitOfMeasure: "m" }; - esvIntersectionControllerRef.current = new Controller({ - container: esvIntersectionContainerRef.current, - axisOptions, - }); - - // Initialize/configure controller - addMDOverlay(esvIntersectionControllerRef.current); - esvIntersectionControllerRef.current.setBounds([10, 1000], [0, 3000]); - esvIntersectionControllerRef.current.setViewport(1000, 1650, 6000); - } - return () => { - esvIntersectionControllerRef.current?.destroy(); - }; - }, []); - - // Get well trajectories query - const wellTrajectoriesQuery = useWellboreTrajectoriesQuery(wellboreAddress ? [wellboreAddress.uuid] : undefined); - usePropagateApiErrorToStatusWriter(wellTrajectoriesQuery, statusWriter); - - // Use first trajectory and create polyline for seismic fence query, and extended wellbore trajectory for generating seismic fence image - let candidateSeismicFencePolyline = seismicFencePolyline; - let candidateSurfaceIntersectionCumulativeLengthPolyline = surfaceIntersectionCumulativeLengthPolyline; - if (wellTrajectoriesQuery.data && wellTrajectoriesQuery.data.length !== 0) { - const trajectoryXyzPoints = makeTrajectoryXyzPointsFromWellboreTrajectory(wellTrajectoriesQuery.data[0]); - const newExtendedWellboreTrajectory = makeExtendedTrajectoryFromTrajectoryXyzPoints( - trajectoryXyzPoints, - extension - ); - - const referenceSystem = makeReferenceSystemFromTrajectoryXyzPoints(trajectoryXyzPoints); - if (esvIntersectionControllerRef.current) { - esvIntersectionControllerRef.current.setReferenceSystem(referenceSystem); - } - - // If the new extended trajectory is different, update the polyline, but keep the seismic fence image - if (!isEqual(newExtendedWellboreTrajectory, extendedWellboreTrajectory)) { - setExtendedWellboreTrajectory(newExtendedWellboreTrajectory); - - const x_points = newExtendedWellboreTrajectory?.points.map((coord) => coord[0]) ?? []; - const y_points = newExtendedWellboreTrajectory?.points.map((coord) => coord[1]) ?? []; - candidateSeismicFencePolyline = { x_points, y_points }; - setSeismicFencePolyline(candidateSeismicFencePolyline); - - const cum_lengths = newExtendedWellboreTrajectory - ? IntersectionReferenceSystem.toDisplacement( - newExtendedWellboreTrajectory.points, - newExtendedWellboreTrajectory.offset - ).map((coord) => coord[0] - extension) - : []; - - candidateSurfaceIntersectionCumulativeLengthPolyline = { x_points, y_points, cum_lengths }; - setSurfaceIntersectionCumulativeLengthPolyline(candidateSurfaceIntersectionCumulativeLengthPolyline); - } - - // When new well trajectory 3D points are loaded, update the render trajectory and clear the seismic fence image - if (!isEqual(trajectoryXyzPoints, renderWellboreTrajectoryXyzPoints)) { - setRenderWellboreTrajectoryXyzPoints(trajectoryXyzPoints); - setGenerateSeismicSliceImageOptions(null); - } - } - - // Get seismic fence data from polyline - const seismicFenceDataQuery = useSeismicFenceDataQuery( - seismicAddress?.caseUuid ?? null, - seismicAddress?.ensemble ?? null, - seismicAddress?.realizationNumber ?? null, - seismicAddress?.attribute ?? null, - seismicAddress?.timeString ?? null, - seismicAddress?.observed ?? null, - candidateSeismicFencePolyline, - seismicAddress !== null - ); - - usePropagateApiErrorToStatusWriter(seismicFenceDataQuery, statusWriter); - - // Get surface intersection data from polyline - const surfaceIntersectionDataQueries = useSurfaceIntersectionQueries( - surfaceAddress?.caseUuid ?? null, - surfaceAddress?.ensemble ?? null, - surfaceAddress?.realizationNumber ?? null, - surfaceAddress?.surfaceNames ?? null, - surfaceAddress?.attribute ?? null, - null, // Time string not used for surface intersection - candidateSurfaceIntersectionCumulativeLengthPolyline, - surfaceAddress !== null - ); - for (const query of surfaceIntersectionDataQueries) { - if (!query.isError) continue; - - const queryIndex = surfaceIntersectionDataQueries.indexOf(query); - const surfaceName = surfaceAddress?.surfaceNames ? surfaceAddress?.surfaceNames[queryIndex] : "unknown"; - statusWriter.addWarning(`Error loading surface intersection data for "${surfaceName}"`); - } - - // Get all well bore picks - const wellborePicksAndStratigraphicUnitsQuery = useWellborePicksAndStratigraphicUnitsQuery( - wellborePickCaseUuid ?? undefined, - wellboreAddress ? wellboreAddress.uuid : undefined, - wellborePickSelection !== WellborePickSelectionType.NONE - ); - usePropagateApiErrorToStatusWriter(wellborePicksAndStratigraphicUnitsQuery, statusWriter); - - // Filter wellbore picks and stratigraphic units based on selected surface names - const selectedWellborePicksAndStratigraphicUnits: WellborePicksAndStratigraphicUnits_api | null = - React.useMemo(() => { - if ( - !wellborePicksAndStratigraphicUnitsQuery.data || - wellborePickSelection === WellborePickSelectionType.NONE - ) { - return null; - } - - if (wellborePickSelection === WellborePickSelectionType.ALL) { - return wellborePicksAndStratigraphicUnitsQuery.data; - } - - if (wellborePickSelection === WellborePickSelectionType.SELECTED_SURFACES) { - const selectedSurfaceNames = surfaceAddress?.surfaceNames ?? []; - return { - wellbore_picks: wellborePicksAndStratigraphicUnitsQuery.data.wellbore_picks.filter((pick) => - selectedSurfaceNames.includes(pick.pickIdentifier) - ), - stratigraphic_units: wellborePicksAndStratigraphicUnitsQuery.data.stratigraphic_units, - }; - } - - return wellborePicksAndStratigraphicUnitsQuery.data; - }, [wellborePicksAndStratigraphicUnitsQuery.data, wellborePickSelection, surfaceAddress?.surfaceNames]); - - if (seismicFenceDataQuery.data) { - // Get an array of projected 2D points [x, y], as 2D curtain projection from a set of trajectory 3D points and offset - const newExtendedWellboreTrajectoryXyProjection: number[][] = extendedWellboreTrajectory - ? IntersectionReferenceSystem.toDisplacement( - extendedWellboreTrajectory.points, - extendedWellboreTrajectory.offset - ) - : []; - - const newSeismicImageDataArray = createSeismicSliceImageDataArrayFromFenceData(seismicFenceDataQuery.data); - const newSeismicImageYAxisValues = createSeismicSliceImageYAxisValuesArrayFromFenceData( - seismicFenceDataQuery.data - ); - - const newGenerateSeismicSliceImageOptions: SeismicSliceImageOptions = { - dataValues: newSeismicImageDataArray, - yAxisValues: newSeismicImageYAxisValues, - trajectoryXyPoints: newExtendedWellboreTrajectoryXyProjection, - colormap: seismicColors, - extension: extension, - }; - - if (!isEqual(generateSeismicSliceImageOptions, newGenerateSeismicSliceImageOptions)) { - setGenerateSeismicSliceImageOptions(newGenerateSeismicSliceImageOptions); - } - } - - // Update esv-intersection controller when data is ready - keep old data to prevent blank view when fetching new data - if (esvIntersectionControllerRef.current && renderWellboreTrajectoryXyzPoints) { - esvIntersectionControllerRef.current.removeAllLayers(); - esvIntersectionControllerRef.current.clearAllData(); - - addWellborePathLayer(esvIntersectionControllerRef.current, renderWellboreTrajectoryXyzPoints); - - if ( - seismicAddress && - generateSeismicSliceImageOptions && - generatedSeismicSliceImageData.synchedOptions && - generatedSeismicSliceImageData.image && - generatedSeismicSliceImageData.status === SeismicSliceImageStatus.SUCCESS - ) { - addSeismicLayer(esvIntersectionControllerRef.current, { - curtain: generatedSeismicSliceImageData.synchedOptions.trajectoryXyPoints, - xAxisOffset: generatedSeismicSliceImageData.synchedOptions.extension, - image: generatedSeismicSliceImageData.image, - dataValues: generatedSeismicSliceImageData.synchedOptions.dataValues, - yAxisValues: generatedSeismicSliceImageData.synchedOptions.yAxisValues, - }); - } - - const fetchedSurfaceIntersections: SurfaceIntersectionData_api[] = []; - for (const surfaceIntersectionDataQuery of surfaceIntersectionDataQueries) { - if (!surfaceIntersectionDataQuery.data) continue; - - fetchedSurfaceIntersections.push(surfaceIntersectionDataQuery.data); - } - if (fetchedSurfaceIntersections.length !== 0) { - const convertedSurfaceIntersectionDataList = - createEsvSurfaceIntersectionDataArrayFromSurfaceIntersectionDataApiArray(fetchedSurfaceIntersections); - addSurfacesLayer(esvIntersectionControllerRef.current, { - surfaceIntersectionDataList: convertedSurfaceIntersectionDataList, - layerName: "Surface intersection", - surfaceColor: "red", - surfaceWidth: 10, - }); - } - - if (selectedWellborePicksAndStratigraphicUnits) { - const { wellborePicks, stratigraphicUnits } = createEsvWellborePicksAndStratigraphicUnits( - selectedWellborePicksAndStratigraphicUnits - ); - addWellborePicksLayer(esvIntersectionControllerRef.current, wellborePicks, stratigraphicUnits); - } - - // Update layout - esvIntersectionControllerRef.current.zoomPanHandler.zFactor = Math.max(1.0, zScale); // Prevent scaling to zero - esvIntersectionControllerRef.current.adjustToSize( - Math.max(0, wrapperDivSize.width), - Math.max(0, wrapperDivSize.height - 100) - ); - } - - statusWriter.setLoading( - wellTrajectoriesQuery.isFetching || - seismicFenceDataQuery.isFetching || - surfaceIntersectionDataQueries.some((query) => query.isFetching) - ); - - const hasErrorForEverySurfaceIntersectionQuery = - surfaceIntersectionDataQueries.length > 0 && surfaceIntersectionDataQueries.every((query) => query.isError); - - return ( -
- {seismicFenceDataQuery.isError && wellTrajectoriesQuery.isError ? ( - Error loading well trajectories and seismic fence data - ) : seismicFenceDataQuery.isError ? ( - Error loading seismic fence data - ) : wellTrajectoriesQuery.isError ? ( - Error loading well trajectories - ) : hasErrorForEverySurfaceIntersectionQuery ? ( - Error loading surface intersection data - ) : generatedSeismicSliceImageData.status === SeismicSliceImageStatus.ERROR ? ( - Error generating seismic slice image - ) : ( -
- )} -
- ); -} diff --git a/frontend/src/modules/registerAllModules.ts b/frontend/src/modules/registerAllModules.ts index 78096c46c..f16f380d4 100644 --- a/frontend/src/modules/registerAllModules.ts +++ b/frontend/src/modules/registerAllModules.ts @@ -10,7 +10,6 @@ import "./Map/registerModule"; import "./ParameterDistributionMatrix/registerModule"; import "./Pvt/registerModule"; import "./Rft/registerModule"; -import "./SeismicIntersection/registerModule"; import "./SimulationTimeSeries/registerModule"; import "./SimulationTimeSeriesSensitivity/registerModule"; import "./SubsurfaceMap/registerModule"; From 3ac446c5c33343b86f8f8d41a655007a42e94a68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B8rgen=20Herje?= <82032112+jorgenherje@users.noreply.github.com> Date: Wed, 18 Sep 2024 13:03:16 +0200 Subject: [PATCH 3/3] Update well-completions-plot version (#723) --- frontend/package-lock.json | 54 +++++++++++++++++++++----------------- frontend/package.json | 2 +- 2 files changed, 31 insertions(+), 25 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 8a93b7938..4d40595bf 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -18,7 +18,7 @@ "@types/geojson": "^7946.0.14", "@webviz/group-tree-plot": "^1.1.14", "@webviz/subsurface-viewer": "^0.25.2", - "@webviz/well-completions-plot": "^0.0.1-alpha.1", + "@webviz/well-completions-plot": "^1.4.1", "animate.css": "^4.1.1", "axios": "^1.6.5", "culori": "^3.2.0", @@ -5047,12 +5047,24 @@ } }, "node_modules/@webviz/well-completions-plot": { - "version": "0.0.1-alpha.1", - "resolved": "https://registry.npmjs.org/@webviz/well-completions-plot/-/well-completions-plot-0.0.1-alpha.1.tgz", - "integrity": "sha512-Hjslr5zpjxj6aXwgwWrIm0Rg/ABithEZKYB2JxXtccP14xf33hmYAS8t70/s3FmlBqn+kl+QXrnQH44A0wNLFg==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@webviz/well-completions-plot/-/well-completions-plot-1.4.1.tgz", + "integrity": "sha512-Cr2tRS9QLiDgWt7+EMwDolZXkORlGHinMnnFnnR2nSMJb/5di20bbuA71/ew7iah4bnQX5UXCKttewgk7PCOHA==", "dependencies": { - "react-resize-detector": "^9.1.0", - "react-tooltip": "^4.2.21" + "react-resize-detector": "^10.0.1", + "react-tooltip": "^5.27.0" + } + }, + "node_modules/@webviz/well-completions-plot/node_modules/react-resize-detector": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/react-resize-detector/-/react-resize-detector-10.0.1.tgz", + "integrity": "sha512-CR2EdP83ycGlWkhhrd6+hhZVhPJO4xnzClFCTBXlODVTHOgiDJQu77sBt67J7P3gfU4ec/kOuf2c5EcyTUNLXQ==", + "dependencies": { + "lodash": "^4.17.21" + }, + "peerDependencies": { + "react": "^18.0.0", + "react-dom": "^18.0.0" } }, "node_modules/@webviz/wsc-common": { @@ -5740,6 +5752,11 @@ "integrity": "sha512-kgMuFyE78OC6Dyu3Dy7vcx4uy97EIbVxJB/B0eJ3bUNAkwdNcxYzgKltnyADiYwsR7SEqkkUPsEUT//OVS6XMA==", "peer": true }, + "node_modules/classnames": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" + }, "node_modules/client-only": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", @@ -11693,27 +11710,16 @@ } }, "node_modules/react-tooltip": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-4.5.1.tgz", - "integrity": "sha512-Zo+CSFUGXar1uV+bgXFFDe7VeS2iByeIp5rTgTcc2HqtuOS5D76QapejNNfx320MCY91TlhTQat36KGFTqgcvw==", + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-5.28.0.tgz", + "integrity": "sha512-R5cO3JPPXk6FRbBHMO0rI9nkUG/JKfalBSQfZedZYzmqaZQgq7GLzF8vcCWx6IhUCKg0yPqJhXIzmIO5ff15xg==", "dependencies": { - "prop-types": "^15.8.1", - "uuid": "^7.0.3" - }, - "engines": { - "npm": ">=6.13" + "@floating-ui/dom": "^1.6.1", + "classnames": "^2.3.0" }, "peerDependencies": { - "react": ">=16.0.0", - "react-dom": ">=16.0.0" - } - }, - "node_modules/react-tooltip/node_modules/uuid": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", - "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==", - "bin": { - "uuid": "dist/bin/uuid" + "react": ">=16.14.0", + "react-dom": ">=16.14.0" } }, "node_modules/react-transition-group": { diff --git a/frontend/package.json b/frontend/package.json index 9225d5e0d..c817cbfa3 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -29,7 +29,7 @@ "@types/geojson": "^7946.0.14", "@webviz/group-tree-plot": "^1.1.14", "@webviz/subsurface-viewer": "^0.25.2", - "@webviz/well-completions-plot": "^0.0.1-alpha.1", + "@webviz/well-completions-plot": "^1.4.1", "animate.css": "^4.1.1", "axios": "^1.6.5", "culori": "^3.2.0",