diff --git a/backend_py/primary/poetry.lock b/backend_py/primary/poetry.lock index 05846eed7..18ce8620e 100644 --- a/backend_py/primary/poetry.lock +++ b/backend_py/primary/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -793,13 +793,13 @@ files = [ [[package]] name = "fmu-sumo" -version = "1.2.5" +version = "1.2.6" description = "Python package for interacting with Sumo in an FMU setting" optional = false python-versions = ">=3.8" files = [ - {file = "fmu_sumo-1.2.5-py3-none-any.whl", hash = "sha256:11e39d548a220a891892b5af4f94bf059a7db36bf345665bcf18a61c935d396f"}, - {file = "fmu_sumo-1.2.5.tar.gz", hash = "sha256:8d9c5f65df070fd90b737d78f9a7e9c1590a1af0ec57d54645b682dd5ece9737"}, + {file = "fmu_sumo-1.2.6-py3-none-any.whl", hash = "sha256:6dc54da158eed11c4a39df0c168d4355363ccc052bb868c4b0f6ac90c629e62a"}, + {file = "fmu_sumo-1.2.6.tar.gz", hash = "sha256:02a5ea4337e74ff9acba3b3482a1301684c9574e466b3d839fc715631237df75"}, ] [package.dependencies] @@ -938,39 +938,40 @@ dev = ["sphinx", "sphinx-rtd-theme"] [[package]] name = "httpcore" -version = "0.17.0" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpcore-0.17.0-py3-none-any.whl", hash = "sha256:0fdfea45e94f0c9fd96eab9286077f9ff788dd186635ae61b312693e4d943599"}, - {file = "httpcore-0.17.0.tar.gz", hash = "sha256:cc045a3241afbf60ce056202301b4d8b6af08845e3294055eb26b09913ef903c"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] -anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = "==1.*" [package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.24.1" +version = "0.27.2" description = "The next generation HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, - {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] +anyio = "*" certifi = "*" -httpcore = ">=0.15.0,<0.18.0" +httpcore = "==1.*" idna = "*" sniffio = "*" @@ -979,6 +980,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" @@ -3187,13 +3189,13 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "sumo-wrapper-python" -version = "1.0.9" +version = "1.0.12" description = "Python wrapper for the Sumo API" optional = false python-versions = ">=3.8" files = [ - {file = "sumo_wrapper_python-1.0.9-py3-none-any.whl", hash = "sha256:43f380722933bf67c4192dbfa2fd0d693b1efa0b4a1500b9d73ea6d5ebc7b55f"}, - {file = "sumo_wrapper_python-1.0.9.tar.gz", hash = "sha256:7007bbf9b5895bd56bc5a58f86638181912f3017f9d56907454430d943417328"}, + {file = "sumo_wrapper_python-1.0.12-py3-none-any.whl", hash = "sha256:ca402e3443553c0c19abd7582572cfdc08000f6b0c88f0c4942ecfc7faa5d792"}, + {file = "sumo_wrapper_python-1.0.12.tar.gz", hash = "sha256:a13913ab72bda079f04a0d7b805336929c25aebcb41999917da62ee7c2613135"}, ] [package.dependencies] @@ -3202,10 +3204,10 @@ httpx = ">=0.24.1" msal = ">=1.20.0" msal-extensions = ">=1.0.0" pyjwt = ">=2.4.0" -tenacity = ">=8.2.2" +tenacity = ">=8.2.2,<8.4.0 || >8.4.0" [package.extras] -docs = ["autoapi", "sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-apidoc"] +docs = ["autoapi", "sphinx (==7.1.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-apidoc"] test = ["PyYAML", "pytest"] [[package]] @@ -3553,4 +3555,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "d1d506c6d9e81b2679b2e300c32f18e5f8109e69cb924f18a641c4522f070a0f" +content-hash = "705992a6990219b9c5bafc1936fc28c2ef58d6b90a5d77518d056b71071ca02e" diff --git a/backend_py/primary/primary/main.py b/backend_py/primary/primary/main.py index af023774d..f661d6a11 100644 --- a/backend_py/primary/primary/main.py +++ b/backend_py/primary/primary/main.py @@ -47,6 +47,7 @@ logging.getLogger("primary.services.sumo_access").setLevel(logging.DEBUG) logging.getLogger("primary.services.user_session_manager").setLevel(logging.DEBUG) logging.getLogger("primary.services.user_grid3d_service").setLevel(logging.DEBUG) +logging.getLogger("primary.routers.surface").setLevel(logging.DEBUG) logging.getLogger("primary.routers.grid3d").setLevel(logging.DEBUG) logging.getLogger("primary.routers.dev").setLevel(logging.DEBUG) diff --git a/backend_py/primary/primary/routers/surface/converters.py b/backend_py/primary/primary/routers/surface/converters.py index 6580fad97..ec6762a77 100644 --- a/backend_py/primary/primary/routers/surface/converters.py +++ b/backend_py/primary/primary/routers/surface/converters.py @@ -15,7 +15,24 @@ from . import schemas -def resample_to_surface_def( +def extract_surface_def_from_surface(xtgeo_surf: xtgeo.RegularSurface) -> schemas.SurfaceDef: + """ + Extract properties from xtgeo regular surface and populate new surface definition + """ + surface_def = schemas.SurfaceDef( + npoints_x=xtgeo_surf.ncol, + npoints_y=xtgeo_surf.nrow, + inc_x=xtgeo_surf.xinc, + inc_y=xtgeo_surf.yinc, + origin_utm_x=xtgeo_surf.xori, + origin_utm_y=xtgeo_surf.yori, + rot_deg=xtgeo_surf.rotation, + ) + + return surface_def + + +def resampled_to_surface_def_if_needed( source_surface: xtgeo.RegularSurface, target_surface_def: schemas.SurfaceDef ) -> xtgeo.RegularSurface: """ @@ -32,7 +49,9 @@ def resample_to_surface_def( rotation=target_surface_def.rot_deg, ) - if target_surface.compare_topology(source_surface): + # Ignore mask and compare only the grid definitions since target_surface will never have a mask set + if target_surface.compare_topology(source_surface, strict=False): + # Grid definitions are equal so no need to resample return source_surface target_surface.resample(source_surface) @@ -47,15 +66,7 @@ def to_api_surface_data_float(xtgeo_surf: xtgeo.RegularSurface) -> schemas.Surfa float32_np_arr: NDArray[np.float32] = surface_to_float32_numpy_array(xtgeo_surf) values_b64arr = b64_encode_float_array_as_float32(float32_np_arr) - surface_def = schemas.SurfaceDef( - npoints_x=xtgeo_surf.ncol, - npoints_y=xtgeo_surf.nrow, - inc_x=xtgeo_surf.xinc, - inc_y=xtgeo_surf.yinc, - origin_utm_x=xtgeo_surf.xori, - origin_utm_y=xtgeo_surf.yori, - rot_deg=xtgeo_surf.rotation, - ) + surface_def = extract_surface_def_from_surface(xtgeo_surf) trans_bb_utm = schemas.BoundingBox2d( min_x=xtgeo_surf.xmin, min_y=xtgeo_surf.ymin, max_x=xtgeo_surf.xmax, max_y=xtgeo_surf.ymax @@ -79,15 +90,7 @@ def to_api_surface_data_png(xtgeo_surf: xtgeo.RegularSurface) -> schemas.Surface png_bytes: bytes = surface_to_png_bytes_optimized(xtgeo_surf) png_bytes_base64 = base64.b64encode(png_bytes).decode("ascii") - surface_def = schemas.SurfaceDef( - npoints_x=xtgeo_surf.ncol, - npoints_y=xtgeo_surf.nrow, - inc_x=xtgeo_surf.xinc, - inc_y=xtgeo_surf.yinc, - origin_utm_x=xtgeo_surf.xori, - origin_utm_y=xtgeo_surf.yori, - rot_deg=xtgeo_surf.rotation, - ) + surface_def = extract_surface_def_from_surface(xtgeo_surf) trans_bb_utm = schemas.BoundingBox2d( min_x=xtgeo_surf.xmin, min_y=xtgeo_surf.ymin, max_x=xtgeo_surf.xmax, max_y=xtgeo_surf.ymax diff --git a/backend_py/primary/primary/routers/surface/router.py b/backend_py/primary/primary/routers/surface/router.py index 02d74bab0..00e783aec 100644 --- a/backend_py/primary/primary/routers/surface/router.py +++ b/backend_py/primary/primary/routers/surface/router.py @@ -2,14 +2,17 @@ import logging from typing import Annotated, List, Optional, Literal +import xtgeo from fastapi import APIRouter, Depends, HTTPException, Query, Response, Body, status from webviz_pkg.core_utils.perf_metrics import PerfMetrics +from fmu.sumo.explorer.explorer import SumoClient from primary.services.sumo_access.case_inspector import CaseInspector from primary.services.sumo_access.surface_access import SurfaceAccess from primary.services.smda_access.stratigraphy_access import StratigraphyAccess, StratigraphicUnit from primary.services.smda_access.stratigraphy_utils import sort_stratigraphic_names_by_hierarchy from primary.services.smda_access.mocked_drogon_smda_access import _mocked_stratigraphy_access +from primary.services.sumo_access._helpers import create_sumo_client from primary.services.utils.statistic_function import StatisticFunction from primary.services.utils.surface_intersect_with_polyline import intersect_surface_with_polyline from primary.services.utils.authenticated_user import AuthenticatedUser @@ -134,51 +137,17 @@ async def get_surface_data( perf_metrics = ResponsePerfMetrics(response) access_token = authenticated_user.get_sumo_access_token() + sumo_client = create_sumo_client(access_token) addr = decode_surf_addr_str(surf_addr_str) if not isinstance(addr, RealizationSurfaceAddress | ObservedSurfaceAddress | StatisticalSurfaceAddress): raise HTTPException(status_code=404, detail="Endpoint only supports address types REAL, OBS and STAT") - if addr.address_type == "REAL": - access = SurfaceAccess.from_case_uuid(access_token, addr.case_uuid, addr.ensemble_name) - xtgeo_surf = await access.get_realization_surface_data_async( - real_num=addr.realization, - name=addr.name, - attribute=addr.attribute, - time_or_interval_str=addr.iso_time_or_interval, - ) - perf_metrics.record_lap("get-surf") - if not xtgeo_surf: - raise HTTPException(status_code=404, detail="Could not get realization surface") - - elif addr.address_type == "STAT": - service_stat_func_to_compute = StatisticFunction.from_string_value(addr.stat_function) - if service_stat_func_to_compute is None: - raise HTTPException(status_code=404, detail="Invalid statistic requested") - - access = SurfaceAccess.from_case_uuid(access_token, addr.case_uuid, addr.ensemble_name) - xtgeo_surf = await access.get_statistical_surface_data_async( - statistic_function=service_stat_func_to_compute, - name=addr.name, - attribute=addr.attribute, - realizations=addr.stat_realizations, - time_or_interval_str=addr.iso_time_or_interval, - ) - perf_metrics.record_lap("sumo-calc") - if not xtgeo_surf: - raise HTTPException(status_code=404, detail="Could not get or compute statistical surface") - - elif addr.address_type == "OBS": - access = SurfaceAccess.from_case_uuid_no_iteration(access_token, addr.case_uuid) - xtgeo_surf = await access.get_observed_surface_data_async( - name=addr.name, attribute=addr.attribute, time_or_interval_str=addr.iso_time_or_interval - ) - perf_metrics.record_lap("get-surf") - if not xtgeo_surf: - raise HTTPException(status_code=404, detail="Could not get observed surface") + # Does its own error handling by throwing HTTPException + xtgeo_surf = await _get_fully_addressed_surf_async(sumo_client, addr, perf_metrics, None) if resample_to is not None: - xtgeo_surf = converters.resample_to_surface_def(xtgeo_surf, resample_to) + xtgeo_surf = converters.resampled_to_surface_def_if_needed(xtgeo_surf, resample_to) perf_metrics.record_lap("resample") surf_data_response: schemas.SurfaceDataFloat | schemas.SurfaceDataPng @@ -275,8 +244,80 @@ async def get_delta_surface_data( data_format: Annotated[Literal["float", "png"], Query(description="Format of binary data in the response")] = "float", resample_to: Annotated[schemas.SurfaceDef | None, Depends(dependencies.get_resample_to_param_from_keyval_str)] = None, # fmt:on -) -> list[schemas.SurfaceDataFloat]: - raise HTTPException(status.HTTP_501_NOT_IMPLEMENTED) +) -> schemas.SurfaceDataFloat | schemas.SurfaceDataPng: + perf_metrics = ResponsePerfMetrics(response) + + access_token = authenticated_user.get_sumo_access_token() + perf_metrics.record_lap("get-token") + sumo_client = create_sumo_client(access_token) + perf_metrics.record_lap("create-sumo-client") + + addr_a = decode_surf_addr_str(surf_a_addr_str) + addr_b = decode_surf_addr_str(surf_b_addr_str) + if not isinstance(addr_a, RealizationSurfaceAddress | ObservedSurfaceAddress | StatisticalSurfaceAddress): + raise HTTPException(status_code=404, detail="Endpoint only supports address types REAL, OBS and STAT") + if not isinstance(addr_b, RealizationSurfaceAddress | ObservedSurfaceAddress | StatisticalSurfaceAddress): + raise HTTPException(status_code=404, detail="Endpoint only supports address types REAL, OBS and STAT") + + async with asyncio.TaskGroup() as tg: + surf_a_task = tg.create_task( + _get_fully_addressed_surf_async(sumo_client, addr_a, perf_metrics.create_sub_metrics_object(), "A") + ) + surf_b_task = tg.create_task( + _get_fully_addressed_surf_async(sumo_client, addr_b, perf_metrics.create_sub_metrics_object(), "B") + ) + + xtgeo_surf_a = surf_a_task.result() + xtgeo_surf_b = surf_b_task.result() + perf_metrics.record_lap("obtain-both-surfs") + + # xtgeo_surf_a = await _get_fully_addressed_surf_async(access_token, addr_a, perf_metrics, "A") + # xtgeo_surf_b = await _get_fully_addressed_surf_async(access_token, addr_b, perf_metrics, "B") + + LOGGER.debug("===============================") + LOGGER.debug(f"SURF_A rotation = {xtgeo_surf_a.rotation}") + LOGGER.debug(f"SURF_A xori,yori = {xtgeo_surf_a.xori}, {xtgeo_surf_a.yori}") + LOGGER.debug(f"SURF_A xinc,yinc = {xtgeo_surf_a.xinc}, {xtgeo_surf_a.yinc}") + LOGGER.debug(f"SURF_A ncol,nrow = {xtgeo_surf_a.ncol}, {xtgeo_surf_a.nrow}") + LOGGER.debug("-------------------------------") + LOGGER.debug(f"SURF_B rotation = {xtgeo_surf_b.rotation}") + LOGGER.debug(f"SURF_B xori,yori = {xtgeo_surf_b.xori}, {xtgeo_surf_b.yori}") + LOGGER.debug(f"SURF_B xinc,yinc = {xtgeo_surf_b.xinc}, {xtgeo_surf_b.yinc}") + LOGGER.debug(f"SURF_B ncol,nrow = {xtgeo_surf_b.ncol}, {xtgeo_surf_b.nrow}") + LOGGER.debug("===============================") + + # From the xtgeo code it seems the subtract operation on the surfaces will automatically resample + # the data to the target surface (surface A), if needed. + # If the two surfaces differ only in their mask, it seems the subtract operation will do an unnecessary + # resampling, but for now this is probably not critical, so just utilize the xtgeo subtract method. + xtgeo_surf_a.subtract(xtgeo_surf_b) + perf_metrics.record_lap("calc-delta") + + # It could probably be debated whether the resampling should be done after computing the delta + # surface or if we should resample the B surface first. For now, leave the resampling to last. + if resample_to is not None: + xtgeo_surf_a = converters.resampled_to_surface_def_if_needed(xtgeo_surf_a, resample_to) + perf_metrics.record_lap("resample") + + LOGGER.debug("===============================") + LOGGER.debug(f"SURF_RESULT rotation = {xtgeo_surf_a.rotation}") + LOGGER.debug(f"SURF_RESULT xori,yori = {xtgeo_surf_a.xori}, {xtgeo_surf_a.yori}") + LOGGER.debug(f"SURF_RESULT xinc,yinc = {xtgeo_surf_a.xinc}, {xtgeo_surf_a.yinc}") + LOGGER.debug(f"SURF_RESULT ncol,nrow = {xtgeo_surf_a.ncol}, {xtgeo_surf_a.nrow}") + LOGGER.debug("===============================") + LOGGER.debug(f"\n{str(xtgeo_surf_a)}") + + surf_data_response: schemas.SurfaceDataFloat | schemas.SurfaceDataPng + if data_format == "float": + surf_data_response = converters.to_api_surface_data_float(xtgeo_surf_a) + elif data_format == "png": + surf_data_response = converters.to_api_surface_data_png(xtgeo_surf_a) + + perf_metrics.record_lap("convert") + + LOGGER.info(f"Created delta surface in: {perf_metrics.to_string()}") + + return surf_data_response @router.get("/misfit_surface_data") @@ -316,3 +357,53 @@ async def _get_stratigraphic_units_for_case_async( LOGGER.info(f"Got stratigraphic units for case in : {perf_metrics.to_string()}") return strat_units + + +async def _get_fully_addressed_surf_async( + sumo_client: SumoClient, + addr: RealizationSurfaceAddress | ObservedSurfaceAddress | StatisticalSurfaceAddress, + perf_metrics: ResponsePerfMetrics, + suffix_str: str | None, +) -> xtgeo.RegularSurface: + metrics_suffix = f"-{suffix_str}" if suffix_str else "" + exception_suffix = f" {suffix_str}" if suffix_str else "" + + if addr.address_type == "REAL": + access = SurfaceAccess.from_case_uuid_using_sumo_client(sumo_client, addr.case_uuid, addr.ensemble_name) + xtgeo_surf = await access.get_realization_surface_data_async( + real_num=addr.realization, + name=addr.name, + attribute=addr.attribute, + time_or_interval_str=addr.iso_time_or_interval, + ) + perf_metrics.record_lap("get-surf" + metrics_suffix) + if not xtgeo_surf: + raise HTTPException(status_code=404, detail="Could not get realization surface" + exception_suffix) + + elif addr.address_type == "STAT": + service_stat_func_to_compute = StatisticFunction.from_string_value(addr.stat_function) + if service_stat_func_to_compute is None: + raise HTTPException(status_code=404, detail="Invalid statistic requested for surface" + exception_suffix) + + access = SurfaceAccess.from_case_uuid_using_sumo_client(sumo_client, addr.case_uuid, addr.ensemble_name) + xtgeo_surf = await access.get_statistical_surface_data_async( + statistic_function=service_stat_func_to_compute, + name=addr.name, + attribute=addr.attribute, + realizations=addr.stat_realizations, + time_or_interval_str=addr.iso_time_or_interval, + ) + perf_metrics.record_lap("sumo-calc" + metrics_suffix) + if not xtgeo_surf: + raise HTTPException(status_code=404, detail="Could not compute statistical surface" + exception_suffix) + + elif addr.address_type == "OBS": + access = SurfaceAccess.from_case_uuid_no_iteration_using_sumo_client(sumo_client, addr.case_uuid) + xtgeo_surf = await access.get_observed_surface_data_async( + name=addr.name, attribute=addr.attribute, time_or_interval_str=addr.iso_time_or_interval + ) + perf_metrics.record_lap("get-surf" + metrics_suffix) + if not xtgeo_surf: + raise HTTPException(status_code=404, detail="Could not get observed surface" + exception_suffix) + + return xtgeo_surf diff --git a/backend_py/primary/primary/services/sumo_access/surface_access.py b/backend_py/primary/primary/services/sumo_access/surface_access.py index f781aaffe..6995ae6ec 100644 --- a/backend_py/primary/primary/services/sumo_access/surface_access.py +++ b/backend_py/primary/primary/services/sumo_access/surface_access.py @@ -38,6 +38,16 @@ def from_case_uuid_no_iteration(cls, access_token: str, case_uuid: str) -> "Surf sumo_client = create_sumo_client(access_token) return SurfaceAccess(sumo_client=sumo_client, case_uuid=case_uuid, iteration_name=None) + @classmethod + def from_case_uuid_using_sumo_client( + cls, sumo_client: SumoClient, case_uuid: str, iteration_name: str + ) -> "SurfaceAccess": + return SurfaceAccess(sumo_client=sumo_client, case_uuid=case_uuid, iteration_name=iteration_name) + + @classmethod + def from_case_uuid_no_iteration_using_sumo_client(cls, sumo_client: SumoClient, case_uuid: str) -> "SurfaceAccess": + return SurfaceAccess(sumo_client=sumo_client, case_uuid=case_uuid, iteration_name=None) + async def get_realization_surfaces_metadata_async(self) -> SurfaceMetaSet: if not self._iteration_name: raise InvalidParameterError( diff --git a/backend_py/primary/primary/utils/response_perf_metrics.py b/backend_py/primary/primary/utils/response_perf_metrics.py index b1ff2fa5d..d48a0200a 100644 --- a/backend_py/primary/primary/utils/response_perf_metrics.py +++ b/backend_py/primary/primary/utils/response_perf_metrics.py @@ -15,6 +15,12 @@ def __init__(self, target_response_for_metrics: Response | None = None): if target_response_for_metrics is not None: self._headers = target_response_for_metrics.headers + def create_sub_metrics_object(self) -> "ResponsePerfMetrics": + sub_metrics = ResponsePerfMetrics(None) + sub_metrics._metrics_dict = self._metrics_dict + sub_metrics._headers = self._headers + return sub_metrics + def set_metric(self, metric_name: str, duration_ms: int | float) -> None: int_duration_ms = int(duration_ms) self._metrics_dict[metric_name] = int_duration_ms diff --git a/backend_py/primary/pyproject.toml b/backend_py/primary/pyproject.toml index 240326c1e..027bbae3e 100644 --- a/backend_py/primary/pyproject.toml +++ b/backend_py/primary/pyproject.toml @@ -19,9 +19,9 @@ pydantic = "^2.3.0" numpy = "^1.24.1" orjson = "^3.8.10" pandas = {version = "2.0.1", extras = ["performance"]} -httpx = "^0.24.0" -fmu-sumo = "1.2.5" -sumo-wrapper-python = "1.0.9" +httpx = "^0.27.2" +fmu-sumo = "1.2.6" +sumo-wrapper-python = "1.0.12" azure-monitor-opentelemetry = "^1.1.0" requests-toolbelt = "^1.0.0" pottery = "^3.0.0" diff --git a/frontend/src/api/services/SurfaceService.ts b/frontend/src/api/services/SurfaceService.ts index 5dd76c84f..294ace696 100644 --- a/frontend/src/api/services/SurfaceService.ts +++ b/frontend/src/api/services/SurfaceService.ts @@ -191,7 +191,7 @@ export class SurfaceService { * @param surfBAddrStr Address string of surface B, supported types: *REAL*, *OBS* and *STAT* * @param dataFormat Format of binary data in the response * @param resampleToDefStr Definition of the surface onto which the data should be resampled. *SurfaceDef* object properties encoded as a `KeyValStr` string. - * @returns SurfaceDataFloat Successful Response + * @returns any Successful Response * @throws ApiError */ public getDeltaSurfaceData( @@ -199,7 +199,7 @@ export class SurfaceService { surfBAddrStr: string, dataFormat: 'float' | 'png' = 'float', resampleToDefStr?: (string | null), - ): CancelablePromise> { + ): CancelablePromise<(SurfaceDataFloat | SurfaceDataPng)> { return this.httpRequest.request({ method: 'GET', url: '/surface/delta_surface_data', diff --git a/frontend/src/modules/Map/view.tsx b/frontend/src/modules/Map/view.tsx index e82842f58..2c5907ba7 100644 --- a/frontend/src/modules/Map/view.tsx +++ b/frontend/src/modules/Map/view.tsx @@ -6,7 +6,8 @@ import { useViewStatusWriter } from "@framework/StatusWriter"; import { Vec2, rotatePoint2Around } from "@lib/utils/vec2"; import { ContentError, ContentInfo } from "@modules/_shared/components/ContentMessage"; import { usePropagateApiErrorToStatusWriter } from "@modules/_shared/hooks/usePropagateApiErrorToStatusWriter"; -import { useSurfaceDataQueryByAddress } from "@modules_shared/Surface"; +//import { useSurfaceDataQueryByAddress } from "@modules_shared/Surface"; +import { useDeltaSurfaceDataQueryByAddress } from "@modules_shared/Surface/queryHooks"; import SubsurfaceViewer from "@webviz/subsurface-viewer"; import { Interfaces } from "./interfaces"; @@ -17,7 +18,14 @@ export function MapView(props: ModuleViewProps): React.ReactNode { const statusWriter = useViewStatusWriter(props.viewContext); //const surfDataQuery = useSurfaceDataQueryByAddress(surfaceAddress, "png", null, true); - const surfDataQuery = useSurfaceDataQueryByAddress(surfaceAddress, "float", null, true); + //const surfDataQuery = useSurfaceDataQueryByAddress(surfaceAddress, "float", null, true); + + const surfAddrA = surfaceAddress; + const surfAddrB: any = surfaceAddress ? {...surfaceAddress} : null; + if (surfAddrB) { + surfAddrB.realizationNum += 1; + } + const surfDataQuery = useDeltaSurfaceDataQueryByAddress(surfAddrA, surfAddrB, "float", null, true); const isLoading = surfDataQuery.isFetching; statusWriter.setLoading(isLoading); @@ -27,6 +35,15 @@ export function MapView(props: ModuleViewProps): React.ReactNode { const surfData = surfDataQuery.data; + if (surfData?.valuesFloat32Arr) { + // Hack since MapLayer seems to freak out if all values are equal (probably chokes if min/max range is zero) + if (surfData.valuesFloat32Arr.every((v) => v === 0 || isNaN(v))) { + console.debug("All numeric values are zero, setting first value to 1"); + const firstZeroVal = surfData.valuesFloat32Arr.indexOf(0); + surfData.valuesFloat32Arr[firstZeroVal] = 1; + } + } + return (
{hasError ? ( @@ -51,8 +68,8 @@ export function MapView(props: ModuleViewProps): React.ReactNode { rotDeg: surfData.surface_def.rot_deg, }, - contours: [0, 100], - isContoursDepth: true, + contours: false, + isContoursDepth: false, gridLines: false, material: true, smoothShading: true, diff --git a/frontend/src/modules/_shared/Surface/queryHooks.ts b/frontend/src/modules/_shared/Surface/queryHooks.ts index ee531521c..59ed72c28 100644 --- a/frontend/src/modules/_shared/Surface/queryHooks.ts +++ b/frontend/src/modules/_shared/Surface/queryHooks.ts @@ -76,3 +76,48 @@ export function useSurfaceDataQueryByAddress( const surfAddrStr = surfAddr ? encodeSurfAddrStr(surfAddr) : null; return useSurfaceDataQuery(surfAddrStr, format, resampleTo, allowEnable); } + + +import { SurfaceDataFloat_api } from "@api"; + +function transformSurfaceData_HACKED( + apiData: SurfaceDataFloat_api | SurfaceDataPng_api +): SurfaceDataFloat_trans { + return transformSurfaceData(apiData) as SurfaceDataFloat_trans +} + +export function useDeltaSurfaceDataQuery(surfAddrStrA: string | null, surfAddrStrB: string | null, format: "float", resampleTo: SurfaceDef_api | null, allowEnable: boolean): UseQueryResult { + if (surfAddrStrA) { + const surfAddrType = peekSurfaceAddressType(surfAddrStrA); + if (surfAddrType !== "OBS" && surfAddrType !== "REAL" && surfAddrType !== "STAT") { + throw new Error("Invalid surface address type for surface A in delta surface data query"); + } + } + + if (surfAddrStrB) { + const surfAddrType = peekSurfaceAddressType(surfAddrStrB); + if (surfAddrType !== "OBS" && surfAddrType !== "REAL" && surfAddrType !== "STAT") { + throw new Error("Invalid surface address type for surface B in delta surface data query"); + } + } + + let resampleToKeyValStr: string | null = null; + if (resampleTo) { + resampleToKeyValStr = encodePropertiesAsKeyValStr(resampleTo); + } + + return useQuery({ + queryKey: ["getDeltaSurfaceData", surfAddrStrA, surfAddrStrB, resampleToKeyValStr, format], + queryFn: () => apiService.surface.getDeltaSurfaceData(surfAddrStrA ?? "", surfAddrStrB ?? "", format, resampleToKeyValStr), + select: transformSurfaceData_HACKED, + staleTime: STALE_TIME, + gcTime: CACHE_TIME, + enabled: allowEnable && Boolean(surfAddrStrA) && Boolean(surfAddrStrB), + }); +} + +export function useDeltaSurfaceDataQueryByAddress(surfAddrA: FullSurfaceAddress | null, surfAddrB: FullSurfaceAddress | null, format: "float", resampleTo: SurfaceDef_api | null, allowEnable: boolean): UseQueryResult { + const surfAddrStrA = surfAddrA ? encodeSurfAddrStr(surfAddrA) : null; + const surfAddrStrB = surfAddrB ? encodeSurfAddrStr(surfAddrB) : null; + return useDeltaSurfaceDataQuery(surfAddrStrA, surfAddrStrB, format, resampleTo, allowEnable); +}