diff --git a/.github/workflows/subsurface.yml b/.github/workflows/subsurface.yml index edc14a09aa..761ca28ea3 100644 --- a/.github/workflows/subsurface.yml +++ b/.github/workflows/subsurface.yml @@ -74,10 +74,10 @@ jobs: env: # If you want the CI to (temporarily) run against your fork of the testdada, # change the value her from "equinor" to your username. - TESTDATA_REPO_OWNER: equinor + TESTDATA_REPO_OWNER: hanskallekleiv # If you want the CI to (temporarily) run against another branch than master, # change the value her from "master" to the relevant branch name. - TESTDATA_REPO_BRANCH: master + TESTDATA_REPO_BRANCH: update-to-support-new-map-viewer run: | git clone --depth 1 --branch $TESTDATA_REPO_BRANCH https://github.com/$TESTDATA_REPO_OWNER/webviz-subsurface-testdata.git # Copy any clientside script to the test folder before running tests diff --git a/setup.py b/setup.py index 765e11ff2b..d0a5bd861e 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,7 @@ "InplaceVolumes = webviz_subsurface.plugins:InplaceVolumes", "InplaceVolumesOneByOne = webviz_subsurface.plugins:InplaceVolumesOneByOne", "LinePlotterFMU = webviz_subsurface.plugins:LinePlotterFMU", + "MapViewerFMU = webviz_subsurface.plugins:MapViewerFMU", "MorrisPlot = webviz_subsurface.plugins:MorrisPlot", "ParameterAnalysis = webviz_subsurface.plugins:ParameterAnalysis", "ParameterCorrelation = webviz_subsurface.plugins:ParameterCorrelation", @@ -86,11 +87,13 @@ "ecl2df>=0.15.0; sys_platform=='linux'", "fmu-ensemble>=1.2.3", "fmu-tools>=1.8", + "geojson>=2.5.0", "jsonschema>=3.2.0", "opm>=2020.10.1; sys_platform=='linux'", "pandas>=1.1.5", "pillow>=6.1", "pyarrow>=5.0.0", + "pydeck>=0.6.2", "pyscal>=0.7.5", "scipy>=1.2", "statsmodels>=0.12.1", # indirect dependency through https://plotly.com/python/linear-fits/ diff --git a/webviz_subsurface/_components/deckgl_map/__init__.py b/webviz_subsurface/_components/deckgl_map/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/webviz_subsurface/_components/deckgl_map/deckgl_map_layers_model.py b/webviz_subsurface/_components/deckgl_map/deckgl_map_layers_model.py new file mode 100644 index 0000000000..412221338a --- /dev/null +++ b/webviz_subsurface/_components/deckgl_map/deckgl_map_layers_model.py @@ -0,0 +1,96 @@ +import warnings +from enum import Enum +from typing import Dict, List + +from .types.deckgl_props import LayerTypes + + +class DeckGLMapLayersModel: + """Handles updates to the DeckGLMap layers prop""" + + def __init__(self, layers: List[Dict]) -> None: + self._layers = layers + + def _update_layer_by_type(self, layer_type: Enum, layer_data: Dict) -> None: + """Update a layer specification by the layer type. If multiple layers are found, + no update is performed.""" + layers = list(filter(lambda x: x["@@type"] == layer_type, self._layers)) + if not layers: + warnings.warn(f"No {layer_type} found in layer specification!") + if len(layers) > 1: + warnings.warn( + f"Multiple layers of type {layer_type} found in layer specification!" + ) + if len(layers) == 1: + layer_idx = self._layers.index(layers[0]) + self._layers[layer_idx].update(layer_data) + + def update_layer_by_id(self, layer_id: str, layer_data: Dict) -> None: + """Update a layer specification by the layer id.""" + layers = list(filter(lambda x: x["id"] == layer_id, self._layers)) + if not layers: + warnings.warn(f"No layer with id {layer_id} found in layer specification!") + if len(layers) > 1: + warnings.warn( + f"Multiple layers with id {layer_id} found in layer specification!" + ) + if len(layers) == 1: + layer_idx = self._layers.index(layers[0]) + self._layers[layer_idx].update(layer_data) + + def set_propertymap( + self, + image_url: str, + bounds: List[float], + value_range: List[float], + ) -> None: + """Set the property map image url, bounds and value range in the + Colormap and Hillshading layer""" + self._update_layer_by_type( + layer_type=LayerTypes.HILLSHADING, + layer_data={ + "image": image_url, + "bounds": bounds, + "valueRange": value_range, + }, + ) + self._update_layer_by_type( + layer_type=LayerTypes.COLORMAP, + layer_data={ + "image": image_url, + "bounds": bounds, + "valueRange": value_range, + }, + ) + + def set_colormap_image(self, colormap: str) -> None: + """Set the colormap image url in the ColormapLayer""" + self._update_layer_by_type( + layer_type=LayerTypes.COLORMAP, + layer_data={ + "colormap": colormap, + }, + ) + + def set_colormap_range(self, colormap_range: List[float]) -> None: + """Set the colormap range in the ColormapLayer""" + self._update_layer_by_type( + layer_type=LayerTypes.COLORMAP, + layer_data={ + "colorMapRange": colormap_range, + }, + ) + + def set_well_data(self, well_data: List[Dict]) -> None: + """Set the well data json url in the WellsLayer""" + self._update_layer_by_type( + layer_type=LayerTypes.WELL, + layer_data={ + "data": well_data, + }, + ) + + @property + def layers(self) -> List[Dict]: + """Returns the full layers specification""" + return self._layers diff --git a/webviz_subsurface/_components/deckgl_map/types/__init__.py b/webviz_subsurface/_components/deckgl_map/types/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/webviz_subsurface/_components/deckgl_map/types/deckgl_props.py b/webviz_subsurface/_components/deckgl_map/types/deckgl_props.py new file mode 100644 index 0000000000..bc57318102 --- /dev/null +++ b/webviz_subsurface/_components/deckgl_map/types/deckgl_props.py @@ -0,0 +1,218 @@ +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List, Optional + +import pydeck +from geojson.feature import FeatureCollection +from pydeck.types import String +from typing_extensions import Literal + + +class LayerTypes(str, Enum): + HILLSHADING = "Hillshading2DLayer" + MAP3D = "Map3DLayer" + COLORMAP = "ColormapLayer" + WELL = "WellsLayer" + DRAWING = "DrawingLayer" + FAULTPOLYGONS = "FaultPolygonsLayer" + + +class LayerIds(str, Enum): + HILLSHADING = "hillshading-layer" + MAP3D = "map3d-layer" + COLORMAP = "colormap-layer" + WELL = "wells-layer" + DRAWING = "drawing-layer" + FAULTPOLYGONS = "fault-polygons-layer" + + +class LayerNames(str, Enum): + HILLSHADING = "Hillshading" + MAP3D = "Map" + COLORMAP = "Colormap" + WELL = "Wells" + DRAWING = "Drawings" + FAULTPOLYGONS = "Fault polygons" + + +@dataclass +class Bounds: + x_min: int = 0 + y_min: int = 0 + x_max: int = 10 + y_max: int = 10 + + +# pylint: disable=too-few-public-methods +class DeckGLMapProps: + """Default prop settings for DeckGLMap""" + + bounds: List[float] = [0, 0, 10000, 10000] + value_range: List[float] = [0, 1] + image: str = "/surface/UNDEF.png" + color_map_name: str = "Physics" + edited_data: Dict[str, Any] = { + "data": {"type": "FeatureCollection", "features": []}, + "selectedWell": "", + "selectedFeatureIndexes": [], + } + resources: Dict[str, Any] = {} + + +class Hillshading2DLayer(pydeck.Layer): + def __init__( + self, + image: str = DeckGLMapProps.image, + name: str = LayerNames.HILLSHADING, + bounds: List[float] = None, + value_range: List[float] = None, + uuid: Optional[str] = None, + **kwargs: Any, + ) -> None: + super().__init__( + type=LayerTypes.HILLSHADING, + id=uuid if uuid is not None else LayerIds.HILLSHADING, + image=String(image), + name=String(name), + bounds=bounds if bounds is not None else DeckGLMapProps.bounds, + valueRange=value_range if value_range is not None else [0, 1], + **kwargs, + ) + + +class Map3DLayer(pydeck.Layer): + # pylint: disable=too-many-arguments + def __init__( + self, + mesh: str = DeckGLMapProps.image, + property_texture: str = DeckGLMapProps.image, + color_map_name: str = DeckGLMapProps.color_map_name, + name: str = LayerNames.MAP3D, + bounds: List[float] = None, + mesh_value_range: List[float] = None, + mesh_max_error: int = 5, + property_value_range: List[float] = None, + color_map_range: List[float] = None, + contours: List[float] = None, + rot_deg: float = 0.0, + uuid: Optional[str] = None, + **kwargs: Any, + ) -> None: + super().__init__( + type=LayerTypes.MAP3D, + id=uuid if uuid is not None else LayerIds.MAP3D, + mesh=String(mesh), + propertyTexture=String(property_texture), + colorMapName=String(color_map_name), + name=String(name), + bounds=bounds if bounds is not None else DeckGLMapProps.bounds, + meshValueRange=mesh_value_range if mesh_value_range is not None else [0, 1], + propertyValueRange=property_value_range + if property_value_range is not None + else [0, 1], + colorMapRange=color_map_range if color_map_range is not None else [0, 1], + meshMaxError=mesh_max_error, + contours=contours if contours is not None else [0, 100], + rotDeg=rot_deg, + **kwargs, + ) + + +class ColormapLayer(pydeck.Layer): + def __init__( + self, + image: str = DeckGLMapProps.image, + color_map_name: str = DeckGLMapProps.color_map_name, + name: str = LayerNames.COLORMAP, + bounds: List[float] = None, + value_range: List[float] = None, + color_map_range: List[float] = None, + uuid: Optional[str] = None, + **kwargs: Any, + ) -> None: + super().__init__( + type=LayerTypes.COLORMAP, + id=uuid if uuid is not None else LayerIds.COLORMAP, + image=String(image), + colorMapName=String(color_map_name), + name=String(name), + bounds=bounds if bounds is not None else DeckGLMapProps.bounds, + valueRange=value_range if value_range is not None else [0, 1], + colorMapRange=color_map_range if color_map_range is not None else [0, 1], + **kwargs, + ) + + +class WellsLayer(pydeck.Layer): + def __init__( + self, + data: FeatureCollection = None, + name: str = LayerNames.WELL, + uuid: Optional[str] = None, + **kwargs: Any, + ) -> None: + super().__init__( + type="GeoJsonLayer", + id=uuid if uuid is not None else LayerIds.WELL, + name=String(name), + data={"type": "FeatureCollection", "features": []} + if data is None + else data, + get_text="properties.attribute", + get_text_size=12, + get_text_anchor=String("start"), + # logData=log_data, + # logrunName=log_run, + # logName=log_name, + # selectedWell=String(selected_well), + pointType=String("circle+text"), + lineWidthMinPixels=2, + pointRadiusMinPixels=2, + pickable=True, + **kwargs, + ) + + +class DrawingLayer(pydeck.Layer): + def __init__( + self, + mode: Literal[ # Use Enum? + "view", "modify", "transform", "drawPoint", "drawLineString", "drawPolygon" + ] = "view", + uuid: Optional[str] = None, + **kwargs: Any, + ): + super().__init__( + type=LayerTypes.DRAWING, + id=uuid if uuid is not None else LayerIds.DRAWING, + name=LayerNames.DRAWING, + mode=String(mode), + **kwargs, + ) + + +class FaultPolygonsLayer(pydeck.Layer): + def __init__( + self, + data: FeatureCollection = None, + name: str = LayerNames.FAULTPOLYGONS, + uuid: Optional[str] = None, + **kwargs: Any, + ) -> None: + super().__init__( + type=LayerTypes.FAULTPOLYGONS, + id=uuid if uuid is not None else LayerIds.FAULTPOLYGONS, + name=String(name), + data={ + "type": "FeatureCollection", + "features": [], + } + if data is None + else data, + **kwargs, + ) + + +class CustomLayer(pydeck.Layer): + def __init__(self, layer_type: str, uuid: str, name: str, **kwargs: Any) -> None: + super().__init__(type=layer_type, id=String(uuid), name=String(name), **kwargs) diff --git a/webviz_subsurface/_providers/__init__.py b/webviz_subsurface/_providers/__init__.py index ab3c0b687f..a5ae863593 100644 --- a/webviz_subsurface/_providers/__init__.py +++ b/webviz_subsurface/_providers/__init__.py @@ -1,3 +1,10 @@ +from .ensemble_fault_polygons_provider import ( + EnsembleFaultPolygonsProvider, + EnsembleFaultPolygonsProviderFactory, + FaultPolygonsAddress, + FaultPolygonsServer, + SimulatedFaultPolygonsAddress, +) from .ensemble_summary_provider.ensemble_summary_provider import ( EnsembleSummaryProvider, Frequency, @@ -6,5 +13,18 @@ from .ensemble_summary_provider.ensemble_summary_provider_factory import ( EnsembleSummaryProviderFactory, ) +from .ensemble_surface_provider import ( + EnsembleSurfaceProvider, + EnsembleSurfaceProviderFactory, + ObservedSurfaceAddress, + QualifiedDiffSurfaceAddress, + QualifiedSurfaceAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceAddress, + SurfaceMeta, + SurfaceServer, +) from .ensemble_table_provider import EnsembleTableProvider, EnsembleTableProviderSet from .ensemble_table_provider_factory import EnsembleTableProviderFactory +from .well_provider import WellProvider, WellProviderFactory, WellServer diff --git a/webviz_subsurface/_providers/ensemble_fault_polygons_provider/__init__.py b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/__init__.py new file mode 100644 index 0000000000..3da0bf53ab --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/__init__.py @@ -0,0 +1,8 @@ +from .ensemble_fault_polygons_provider import ( + EnsembleFaultPolygonsProvider, + SimulatedFaultPolygonsAddress, +) +from .ensemble_fault_polygons_provider_factory import ( + EnsembleFaultPolygonsProviderFactory, +) +from .fault_polygons_server import FaultPolygonsAddress, FaultPolygonsServer diff --git a/webviz_subsurface/_providers/ensemble_fault_polygons_provider/_fault_polygons_discovery.py b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/_fault_polygons_discovery.py new file mode 100644 index 0000000000..5cd88fc2c3 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/_fault_polygons_discovery.py @@ -0,0 +1,90 @@ +import glob +import os +import re +from dataclasses import dataclass +from pathlib import Path +from typing import Dict, List, Optional + +from fmu.ensemble import ScratchEnsemble + + +@dataclass(frozen=True) +class FaultPolygonsFileInfo: + path: str + real: int + name: str + attribute: str + + +def _discover_ensemble_realizations_fmu(ens_path: str) -> Dict[int, str]: + """Returns dict indexed by realization number and with runpath as value""" + scratch_ensemble = ScratchEnsemble("dummyEnsembleName", paths=ens_path).filter("OK") + real_dict = {i: r.runpath() for i, r in scratch_ensemble.realizations.items()} + return real_dict + + +def _discover_ensemble_realizations(ens_path: str) -> Dict[int, str]: + # Much faster than FMU impl above, but is it risky? + # Do we need to check for OK-file? + real_dict: Dict[int, str] = {} + + realidxregexp = re.compile(r"realization-(\d+)") + globbed_real_dirs = sorted(glob.glob(str(ens_path))) + for real_dir in globbed_real_dirs: + realnum: Optional[int] = None + for path_comp in reversed(real_dir.split(os.path.sep)): + realmatch = re.match(realidxregexp, path_comp) + if realmatch: + realnum = int(realmatch.group(1)) + break + + if realnum is not None: + real_dict[realnum] = real_dir + + return real_dict + + +@dataclass(frozen=True) +class FaultPolygonsIdent: + name: str + attribute: str + + +def _fault_polygons_ident_from_filename(filename: str) -> Optional[FaultPolygonsIdent]: + """Split the stem part of the fault polygons filename into fault polygons name and attribute""" + delimiter: str = "--" + parts = Path(filename).stem.split(delimiter) + if len(parts) != 2: + return None + + return FaultPolygonsIdent(name=parts[0], attribute=parts[1]) + + +def discover_per_realization_fault_polygons_files( + ens_path: str, +) -> List[FaultPolygonsFileInfo]: + rel_fault_polygons_folder: str = "share/results/polygons" + suffix: str = "*.pol" + + fault_polygons_files: List[FaultPolygonsFileInfo] = [] + + real_dict = _discover_ensemble_realizations_fmu(ens_path) + for realnum, runpath in sorted(real_dict.items()): + globbed_filenames = glob.glob( + str(Path(runpath) / rel_fault_polygons_folder / suffix) + ) + for fault_polygons_filename in sorted(globbed_filenames): + fault_polygons_ident = _fault_polygons_ident_from_filename( + fault_polygons_filename + ) + if fault_polygons_ident: + fault_polygons_files.append( + FaultPolygonsFileInfo( + path=fault_polygons_filename, + real=realnum, + name=fault_polygons_ident.name, + attribute=fault_polygons_ident.attribute, + ) + ) + + return fault_polygons_files diff --git a/webviz_subsurface/_providers/ensemble_fault_polygons_provider/_provider_impl_file.py b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/_provider_impl_file.py new file mode 100644 index 0000000000..6907b328ee --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/_provider_impl_file.py @@ -0,0 +1,232 @@ +import logging +import shutil +from enum import Enum +from pathlib import Path +from typing import List, Optional + +import pandas as pd +import xtgeo + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from ._fault_polygons_discovery import FaultPolygonsFileInfo +from .ensemble_fault_polygons_provider import ( + EnsembleFaultPolygonsProvider, + FaultPolygonsAddress, + SimulatedFaultPolygonsAddress, +) + +LOGGER = logging.getLogger(__name__) + +REL_SIM_DIR = "sim" + + +# pylint: disable=too-few-public-methods +class Col: + TYPE = "type" + REAL = "real" + ATTRIBUTE = "attribute" + NAME = "name" + ORIGINAL_PATH = "original_path" + REL_PATH = "rel_path" + + +class FaultPolygonsType(str, Enum): + SIMULATED = "simulated" + + +class ProviderImplFile(EnsembleFaultPolygonsProvider): + def __init__( + self, + provider_id: str, + provider_dir: Path, + fault_polygons_inventory_df: pd.DataFrame, + ) -> None: + self._provider_id = provider_id + self._provider_dir = provider_dir + self._inventory_df = fault_polygons_inventory_df + + @staticmethod + # pylint: disable=too-many-locals + def write_backing_store( + storage_dir: Path, + storage_key: str, + sim_fault_polygons: List[FaultPolygonsFileInfo], + ) -> None: + + timer = PerfTimer() + + # All data for this provider will be stored inside a sub-directory + # given by the storage key + provider_dir = storage_dir / storage_key + LOGGER.debug(f"Writing fault polygons backing store to: {provider_dir}") + provider_dir.mkdir(parents=True, exist_ok=True) + (provider_dir / REL_SIM_DIR).mkdir(parents=True, exist_ok=True) + + type_arr: List[FaultPolygonsType] = [] + real_arr: List[int] = [] + attribute_arr: List[str] = [] + name_arr: List[str] = [] + rel_path_arr: List[str] = [] + original_path_arr: List[str] = [] + + for fault_polygons_info in sim_fault_polygons: + rel_path_in_store = _compose_rel_sim_fault_polygons_path( + real=fault_polygons_info.real, + attribute=fault_polygons_info.attribute, + name=fault_polygons_info.name, + extension=Path(fault_polygons_info.path).suffix, + ) + type_arr.append(FaultPolygonsType.SIMULATED) + real_arr.append(fault_polygons_info.real) + attribute_arr.append(fault_polygons_info.attribute) + name_arr.append(fault_polygons_info.name) + rel_path_arr.append(str(rel_path_in_store)) + original_path_arr.append(fault_polygons_info.path) + + LOGGER.debug( + f"Copying {len(original_path_arr)} fault polygons into backing store..." + ) + timer.lap_s() + _copy_fault_polygons_into_provider_dir( + original_path_arr, rel_path_arr, provider_dir + ) + et_copy_s = timer.lap_s() + + fault_polygons_inventory_df = pd.DataFrame( + { + Col.TYPE: type_arr, + Col.REAL: real_arr, + Col.ATTRIBUTE: attribute_arr, + Col.NAME: name_arr, + Col.REL_PATH: rel_path_arr, + Col.ORIGINAL_PATH: original_path_arr, + } + ) + + parquet_file_name = provider_dir / "fault_polygons_inventory.parquet" + fault_polygons_inventory_df.to_parquet(path=parquet_file_name) + + LOGGER.debug( + f"Wrote fault polygons backing store in: {timer.elapsed_s():.2f}s (" + f"copy={et_copy_s:.2f}s)" + ) + + @staticmethod + def from_backing_store( + storage_dir: Path, + storage_key: str, + ) -> Optional["ProviderImplFile"]: + + provider_dir = storage_dir / storage_key + parquet_file_name = provider_dir / "fault_polygons_inventory.parquet" + + try: + fault_polygons_inventory_df = pd.read_parquet(path=parquet_file_name) + return ProviderImplFile( + storage_key, provider_dir, fault_polygons_inventory_df + ) + except FileNotFoundError: + return None + + def provider_id(self) -> str: + return self._provider_id + + def attributes(self) -> List[str]: + return sorted(list(self._inventory_df[Col.ATTRIBUTE].unique())) + + def fault_polygons_names_for_attribute( + self, fault_polygons_attribute: str + ) -> List[str]: + return sorted( + list( + self._inventory_df.loc[ + self._inventory_df[Col.ATTRIBUTE] == fault_polygons_attribute + ][Col.NAME].unique() + ) + ) + + def realizations(self) -> List[int]: + unique_reals = self._inventory_df[Col.REAL].unique() + + # Sort and strip out any entries with real == -1 + return sorted([r for r in unique_reals if r >= 0]) + + def get_fault_polygons( + self, + address: FaultPolygonsAddress, + ) -> Optional[xtgeo.Polygons]: + + if isinstance(address, SimulatedFaultPolygonsAddress): + return self._get_simulated_fault_polygons(address) + + raise TypeError("Unknown type of fault polygons address") + + def _get_simulated_fault_polygons( + self, address: SimulatedFaultPolygonsAddress + ) -> Optional[xtgeo.Polygons]: + """Returns a Xtgeo fault polygons instance of a single realization fault polygons""" + + timer = PerfTimer() + + fault_polygons_fns: List[str] = self._locate_simulated_fault_polygons( + attribute=address.attribute, + name=address.name, + realizations=[address.realization], + ) + + if len(fault_polygons_fns) == 0: + LOGGER.warning(f"No simulated fault polygons found for {address}") + return None + if len(fault_polygons_fns) > 1: + LOGGER.warning( + f"Multiple simulated fault polygonss found for: {address}" + "Returning first fault polygons." + ) + + fault_polygons = xtgeo.polygons_from_file(fault_polygons_fns[0]) + + LOGGER.debug(f"Loaded simulated fault polygons in: {timer.elapsed_s():.2f}s") + + return fault_polygons + + def _locate_simulated_fault_polygons( + self, attribute: str, name: str, realizations: List[int] + ) -> List[str]: + """Returns list of file names matching the specified filter criteria""" + df = self._inventory_df.loc[ + self._inventory_df[Col.TYPE] == FaultPolygonsType.SIMULATED + ] + + df = df.loc[ + (df[Col.ATTRIBUTE] == attribute) + & (df[Col.NAME] == name) + & (df[Col.REAL].isin(realizations)) + ] + + return [self._provider_dir / rel_path for rel_path in df[Col.REL_PATH]] + + +def _copy_fault_polygons_into_provider_dir( + original_path_arr: List[str], + rel_path_arr: List[str], + provider_dir: Path, +) -> None: + for src_path, dst_rel_path in zip(original_path_arr, rel_path_arr): + # LOGGER.debug(f"copying fault polygons from: {src_path}") + shutil.copyfile(src_path, provider_dir / dst_rel_path) + + # full_dst_path_arr = [storage_dir / dst_rel_path for dst_rel_path in store_path_arr] + # with ProcessPoolExecutor() as executor: + # executor.map(shutil.copyfile, original_path_arr, full_dst_path_arr) + + +def _compose_rel_sim_fault_polygons_path( + real: int, + attribute: str, + name: str, + extension: str, +) -> Path: + """Compose path to simulated fault polygons file, relative to provider's directory""" + fname = f"{real}--{name}--{attribute}{extension}" + return Path(REL_SIM_DIR) / fname diff --git a/webviz_subsurface/_providers/ensemble_fault_polygons_provider/ensemble_fault_polygons_provider.py b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/ensemble_fault_polygons_provider.py new file mode 100644 index 0000000000..ef8c87e687 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/ensemble_fault_polygons_provider.py @@ -0,0 +1,67 @@ +import abc +from dataclasses import dataclass +from typing import List, Optional + +import xtgeo + + +@dataclass(frozen=True) +class SimulatedFaultPolygonsAddress: + """Specifies a unique simulated fault polygon set for a given ensemble realization""" + + attribute: str + name: str + realization: int + + +# Type aliases used for signature readability +FaultPolygonsAddress = SimulatedFaultPolygonsAddress + + +# Class provides data for ensemble surfaces +class EnsembleFaultPolygonsProvider(abc.ABC): + @abc.abstractmethod + def provider_id(self) -> str: + """Returns string ID of the provider.""" + ... + + @abc.abstractmethod + def attributes(self) -> List[str]: + """Returns list of all available attributes.""" + ... + + @abc.abstractmethod + def fault_polygons_names_for_attribute( + self, fault_polygons_attribute: str + ) -> List[str]: + """Returns list of all available fault polygons names for a given attribute.""" + ... + + @abc.abstractmethod + def realizations(self) -> List[int]: + """Returns list of all available realizations.""" + ... + + @abc.abstractmethod + def get_fault_polygons( + self, + address: FaultPolygonsAddress, + ) -> Optional[xtgeo.Polygons]: + """Returns fault polygons for a given fault polygons address""" + ... + + # @abc.abstractmethod + # def get_surface_bounds(self, surface: EnsembleSurfaceContext) -> List[float]: + # """Returns the bounds for a surface [xmin,ymin, xmax,ymax]""" + # ... + + # @abc.abstractmethod + # def get_surface_value_range(self, surface: EnsembleSurfaceContext) -> List[float]: + # """Returns the value range for a given surface context [zmin, zmax]""" + # ... + + # @abc.abstractmethod + # def get_surface_as_rgba(self, surface: EnsembleSurfaceContext) -> io.BytesIO: + # """Returns surface as a greyscale png RGBA with encoded elevation values + # in a bytestream""" + # ... diff --git a/webviz_subsurface/_providers/ensemble_fault_polygons_provider/ensemble_fault_polygons_provider_factory.py b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/ensemble_fault_polygons_provider_factory.py new file mode 100644 index 0000000000..1fb84772d6 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/ensemble_fault_polygons_provider_factory.py @@ -0,0 +1,102 @@ +import hashlib +import logging +import os +from pathlib import Path + +from webviz_config.webviz_factory import WebvizFactory +from webviz_config.webviz_factory_registry import WEBVIZ_FACTORY_REGISTRY +from webviz_config.webviz_instance_info import WebvizRunMode + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from ._fault_polygons_discovery import discover_per_realization_fault_polygons_files +from ._provider_impl_file import ProviderImplFile +from .ensemble_fault_polygons_provider import EnsembleFaultPolygonsProvider + +LOGGER = logging.getLogger(__name__) + + +class EnsembleFaultPolygonsProviderFactory(WebvizFactory): + def __init__(self, root_storage_folder: Path, allow_storage_writes: bool) -> None: + self._storage_dir = Path(root_storage_folder) / __name__ + self._allow_storage_writes = allow_storage_writes + + LOGGER.info( + f"EnsembleFaultPolygonsProviderFactory init: storage_dir={self._storage_dir}" + ) + + if self._allow_storage_writes: + os.makedirs(self._storage_dir, exist_ok=True) + + @staticmethod + def instance() -> "EnsembleFaultPolygonsProviderFactory": + """Static method to access the singleton instance of the factory.""" + + factory = WEBVIZ_FACTORY_REGISTRY.get_factory( + EnsembleFaultPolygonsProviderFactory + ) + if not factory: + app_instance_info = WEBVIZ_FACTORY_REGISTRY.app_instance_info + storage_folder = app_instance_info.storage_folder + allow_writes = app_instance_info.run_mode != WebvizRunMode.PORTABLE + + factory = EnsembleFaultPolygonsProviderFactory(storage_folder, allow_writes) + + # Store the factory object in the global factory registry + WEBVIZ_FACTORY_REGISTRY.set_factory( + EnsembleFaultPolygonsProviderFactory, factory + ) + + return factory + + def create_from_ensemble_fault_polygons_files( + self, ens_path: str + ) -> EnsembleFaultPolygonsProvider: + timer = PerfTimer() + + storage_key = f"ens__{_make_hash_string(ens_path)}" + provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key) + if provider: + LOGGER.info( + f"Loaded fault polygons provider from backing store in {timer.elapsed_s():.2f}s (" + f"ens_path={ens_path})" + ) + return provider + + # We can only import data from data source if storage writes are allowed + if not self._allow_storage_writes: + raise ValueError(f"Failed to load fault polygons provider for {ens_path}") + + LOGGER.info(f"Importing/copying fault polygons data for: {ens_path}") + + timer.lap_s() + sim_fault_polygons_files = discover_per_realization_fault_polygons_files( + ens_path + ) + + et_discover_s = timer.lap_s() + + ProviderImplFile.write_backing_store( + self._storage_dir, + storage_key, + sim_fault_polygons=sim_fault_polygons_files, + ) + et_write_s = timer.lap_s() + + provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key) + if not provider: + raise ValueError( + f"Failed to load/create fault polygons provider for {ens_path}" + ) + + LOGGER.info( + f"Saved fault polygons provider to backing store in {timer.elapsed_s():.2f}s (" + f"discover={et_discover_s:.2f}s, write={et_write_s:.2f}s, ens_path={ens_path})" + ) + + return provider + + +def _make_hash_string(string_to_hash: str) -> str: + # There is no security risk here and chances of collision should be very slim + return hashlib.md5(string_to_hash.encode()).hexdigest() # nosec diff --git a/webviz_subsurface/_providers/ensemble_fault_polygons_provider/fault_polygons_server.py b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/fault_polygons_server.py new file mode 100644 index 0000000000..c1088e100f --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_fault_polygons_provider/fault_polygons_server.py @@ -0,0 +1,129 @@ +import json +import logging +from dataclasses import asdict, dataclass +from typing import Dict, Optional +from urllib.parse import quote + +import flask +import geojson +import xtgeo +from dash import Dash + +from .ensemble_fault_polygons_provider import ( + EnsembleFaultPolygonsProvider, + FaultPolygonsAddress, +) + +LOGGER = logging.getLogger(__name__) + +_ROOT_URL_PATH = "/FaultPolygonsServer" + +_FAULT_POLYGONS_SERVER_INSTANCE: Optional["FaultPolygonsServer"] = None + + +@dataclass(frozen=True) +class QualifiedAddress: + provider_id: str + address: FaultPolygonsAddress + + +class FaultPolygonsServer: + def __init__(self, app: Dash) -> None: + + self._setup_url_rule(app) + self._id_to_provider_dict: Dict[str, EnsembleFaultPolygonsProvider] = {} + + @staticmethod + def instance(app: Dash) -> "FaultPolygonsServer": + # pylint: disable=global-statement + global _FAULT_POLYGONS_SERVER_INSTANCE + if not _FAULT_POLYGONS_SERVER_INSTANCE: + LOGGER.debug("Initializing FaultPolygonsServer instance") + _FAULT_POLYGONS_SERVER_INSTANCE = FaultPolygonsServer(app) + + return _FAULT_POLYGONS_SERVER_INSTANCE + + def add_provider(self, provider: EnsembleFaultPolygonsProvider) -> None: + + provider_id = provider.provider_id() + LOGGER.debug(f"Adding provider with id={provider_id}") + + existing_provider = self._id_to_provider_dict.get(provider_id) + if existing_provider: + # Issue a warning if there already is a provider registered with the same + # id AND if the actual provider instance is different. + # This should not be a problem, but will happen until the provider factory + # gets caching. + if existing_provider is not provider: + LOGGER.warning( + f"Provider with id={provider_id} ignored, the id is already present" + ) + return + + self._id_to_provider_dict[provider_id] = provider + + def encode_partial_url( + self, + provider_id: str, + fault_polygons_address: FaultPolygonsAddress, + ) -> str: + if not provider_id in self._id_to_provider_dict: + raise ValueError("Could not find provider") + + url_path: str = ( + f"{_ROOT_URL_PATH}/{quote(provider_id)}" + f"/{quote(json.dumps(asdict(fault_polygons_address)))}" + ) + + return url_path + + def _setup_url_rule(self, app: Dash) -> None: + @app.server.route(_ROOT_URL_PATH + "//") + def _handle_fault_polygons_request( + provider_id: str, + fault_polygons_address: str, + ) -> flask.Response: + LOGGER.debug( + f"Handling fault_polygons_request: " + f"full_fault_polygons_address={fault_polygons_address} " + ) + + fault_polygons_geojson = None + # try: + + address = FaultPolygonsAddress(**json.loads(fault_polygons_address)) + provider = self._id_to_provider_dict[provider_id] + fault_polygons = provider.get_fault_polygons(address) + if fault_polygons is not None: + fault_polygons_geojson = _create_fault_polygons_geojson( + polygons=fault_polygons + ) + + # except Exception as e: + # LOGGER.error("Error decoding fault polygons address") + # print(e) + # # flask.abort(404) + featurecoll = ( + fault_polygons_geojson + if fault_polygons_geojson is not None + else { + "type": "FeatureCollection", + "features": [], + } + ) + + return flask.Response( + geojson.dumps(featurecoll), mimetype="application/geo+json" + ) + + +def _create_fault_polygons_geojson(polygons: xtgeo.Polygons) -> Dict: + feature_arr = [] + for name, polygon in polygons.dataframe.groupby("POLY_ID"): + coords = [list(zip(polygon.X_UTME, polygon.Y_UTMN))] + feature = geojson.Feature( + geometry=geojson.Polygon(coords), + properties={"name": f"id:{name}", "color": [0, 0, 0, 255]}, + ) + feature_arr.append(feature) + return geojson.FeatureCollection(features=feature_arr) diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/__init__.py b/webviz_subsurface/_providers/ensemble_surface_provider/__init__.py new file mode 100644 index 0000000000..7f54193ef0 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/__init__.py @@ -0,0 +1,14 @@ +from .ensemble_surface_provider import ( + EnsembleSurfaceProvider, + ObservedSurfaceAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceAddress, +) +from .ensemble_surface_provider_factory import EnsembleSurfaceProviderFactory +from .surface_server import ( + QualifiedDiffSurfaceAddress, + QualifiedSurfaceAddress, + SurfaceMeta, + SurfaceServer, +) diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_file.py b/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_file.py new file mode 100644 index 0000000000..cef8c3a686 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_file.py @@ -0,0 +1,474 @@ +import logging +import shutil +import warnings +from enum import Enum +from pathlib import Path +from typing import List, Optional, Set + +import numpy as np +import pandas as pd +import xtgeo + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from ._stat_surf_cache import StatSurfCache +from ._surface_discovery import SurfaceFileInfo +from .ensemble_surface_provider import ( + EnsembleSurfaceProvider, + ObservedSurfaceAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceAddress, + SurfaceStatistic, +) + +LOGGER = logging.getLogger(__name__) + +REL_SIM_DIR = "sim" +REL_OBS_DIR = "obs" +REL_STAT_CACHE_DIR = "stat_cache" + +# pylint: disable=too-few-public-methods +class Col: + TYPE = "type" + REAL = "real" + ATTRIBUTE = "attribute" + NAME = "name" + DATESTR = "datestr" + ORIGINAL_PATH = "original_path" + REL_PATH = "rel_path" + + +class SurfaceType(str, Enum): + OBSERVED = "observed" + SIMULATED = "simulated" + + +class ProviderImplFile(EnsembleSurfaceProvider): + def __init__( + self, provider_id: str, provider_dir: Path, surface_inventory_df: pd.DataFrame + ) -> None: + self._provider_id = provider_id + self._provider_dir = provider_dir + self._inventory_df = surface_inventory_df + + self._stat_surf_cache = StatSurfCache(self._provider_dir / REL_STAT_CACHE_DIR) + + @staticmethod + # pylint: disable=too-many-locals + def write_backing_store( + storage_dir: Path, + storage_key: str, + sim_surfaces: List[SurfaceFileInfo], + obs_surfaces: List[SurfaceFileInfo], + ) -> None: + + timer = PerfTimer() + + # All data for this provider will be stored inside a sub-directory + # given by the storage key + provider_dir = storage_dir / storage_key + LOGGER.debug(f"Writing surface backing store to: {provider_dir}") + provider_dir.mkdir(parents=True, exist_ok=True) + (provider_dir / REL_SIM_DIR).mkdir(parents=True, exist_ok=True) + (provider_dir / REL_OBS_DIR).mkdir(parents=True, exist_ok=True) + + type_arr: List[SurfaceType] = [] + real_arr: List[int] = [] + attribute_arr: List[str] = [] + name_arr: List[str] = [] + datestr_arr: List[str] = [] + rel_path_arr: List[str] = [] + original_path_arr: List[str] = [] + + for surfinfo in sim_surfaces: + rel_path_in_store = _compose_rel_sim_surf_path( + real=surfinfo.real, + attribute=surfinfo.attribute, + name=surfinfo.name, + datestr=surfinfo.datestr, + extension=Path(surfinfo.path).suffix, + ) + type_arr.append(SurfaceType.SIMULATED) + real_arr.append(surfinfo.real) + attribute_arr.append(surfinfo.attribute) + name_arr.append(surfinfo.name) + datestr_arr.append(surfinfo.datestr if surfinfo.datestr else "") + rel_path_arr.append(str(rel_path_in_store)) + original_path_arr.append(surfinfo.path) + + # We want to strip out observed surfaces without a matching simulated surface + valid_obs_surfaces = _find_observed_surfaces_corresponding_to_simulated( + obs_surfaces=obs_surfaces, sim_surfaces=sim_surfaces + ) + + for surfinfo in valid_obs_surfaces: + rel_path_in_store = _compose_rel_obs_surf_path( + attribute=surfinfo.attribute, + name=surfinfo.name, + datestr=surfinfo.datestr, + extension=Path(surfinfo.path).suffix, + ) + type_arr.append(SurfaceType.OBSERVED) + real_arr.append(-1) + attribute_arr.append(surfinfo.attribute) + name_arr.append(surfinfo.name) + datestr_arr.append(surfinfo.datestr if surfinfo.datestr else "") + rel_path_arr.append(str(rel_path_in_store)) + original_path_arr.append(surfinfo.path) + + LOGGER.debug(f"Copying {len(original_path_arr)} surfaces into backing store...") + timer.lap_s() + _copy_surfaces_into_provider_dir(original_path_arr, rel_path_arr, provider_dir) + et_copy_s = timer.lap_s() + + surface_inventory_df = pd.DataFrame( + { + Col.TYPE: type_arr, + Col.REAL: real_arr, + Col.ATTRIBUTE: attribute_arr, + Col.NAME: name_arr, + Col.DATESTR: datestr_arr, + Col.REL_PATH: rel_path_arr, + Col.ORIGINAL_PATH: original_path_arr, + } + ) + + parquet_file_name = provider_dir / "surface_inventory.parquet" + surface_inventory_df.to_parquet(path=parquet_file_name) + + LOGGER.debug( + f"Wrote surface backing store in: {timer.elapsed_s():.2f}s (" + f"copy={et_copy_s:.2f}s)" + ) + + @staticmethod + def from_backing_store( + storage_dir: Path, + storage_key: str, + ) -> Optional["ProviderImplFile"]: + + provider_dir = storage_dir / storage_key + parquet_file_name = provider_dir / "surface_inventory.parquet" + + try: + surface_inventory_df = pd.read_parquet(path=parquet_file_name) + return ProviderImplFile(storage_key, provider_dir, surface_inventory_df) + except FileNotFoundError: + return None + + def provider_id(self) -> str: + return self._provider_id + + def attributes(self) -> List[str]: + return sorted(list(self._inventory_df[Col.ATTRIBUTE].unique())) + + def surface_names_for_attribute(self, surface_attribute: str) -> List[str]: + return sorted( + list( + self._inventory_df.loc[ + self._inventory_df[Col.ATTRIBUTE] == surface_attribute + ][Col.NAME].unique() + ) + ) + + def surface_dates_for_attribute( + self, surface_attribute: str + ) -> Optional[List[str]]: + dates = sorted( + list( + self._inventory_df.loc[ + self._inventory_df[Col.ATTRIBUTE] == surface_attribute + ][Col.DATESTR].unique() + ) + ) + + if len(dates) == 1 and dates[0] is None: + return None + + return dates + + def realizations(self) -> List[int]: + unique_reals = self._inventory_df[Col.REAL].unique() + + # Sort and strip out any entries with real == -1 + return sorted([r for r in unique_reals if r >= 0]) + + def get_surface( + self, + address: SurfaceAddress, + ) -> Optional[xtgeo.RegularSurface]: + if isinstance(address, StatisticalSurfaceAddress): + return self._get_or_create_statistical_surface(address) + # return self._create_statistical_surface(address) + if isinstance(address, SimulatedSurfaceAddress): + return self._get_simulated_surface(address) + if isinstance(address, ObservedSurfaceAddress): + return self._get_observed_surface(address) + + raise TypeError("Unknown type of surface address") + + def _get_or_create_statistical_surface( + self, address: StatisticalSurfaceAddress + ) -> Optional[xtgeo.RegularSurface]: + + timer = PerfTimer() + + surf = self._stat_surf_cache.fetch(address) + if surf: + LOGGER.debug( + f"Fetched statistical surface from cache in: {timer.elapsed_s():.2f}s" + ) + return surf + + surf = self._create_statistical_surface(address) + et_create_s = timer.lap_s() + + self._stat_surf_cache.store(address, surf) + et_write_cache_s = timer.lap_s() + + LOGGER.debug( + f"Created and wrote statistical surface to cache in: {timer.elapsed_s():.2f}s (" + f"create={et_create_s:.2f}s, store={et_write_cache_s:.2f}s), " + f"[stat={address.statistic}, " + f"attr={address.attribute}, name={address.name}, date={address.datestr}]" + ) + + return surf + + def _create_statistical_surface( + self, address: StatisticalSurfaceAddress + ) -> Optional[xtgeo.RegularSurface]: + surf_fns: List[str] = self._locate_simulated_surfaces( + attribute=address.attribute, + name=address.name, + datestr=address.datestr if address.datestr is not None else "", + realizations=address.realizations, + ) + + if len(surf_fns) == 0: + LOGGER.warning(f"No input surfaces found for statistical surface {address}") + return None + + timer = PerfTimer() + + surfaces = xtgeo.Surfaces(surf_fns) + et_load_s = timer.lap_s() + + surf_count = len(surfaces.surfaces) + if surf_count == 0: + LOGGER.warning( + f"Could not load input surfaces for statistical surface {address}" + ) + return None + + # print("########################################################") + # first_surf = surfaces.surfaces[0] + # for surf in surfaces.surfaces: + # print( + # surf.dimensions, + # surf.xinc, + # surf.yinc, + # surf.xori, + # surf.yori, + # surf.rotation, + # surf.filesrc, + # ) + # print("########################################################") + + # Suppress numpy warnings when surfaces have undefined z-values + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", "All-NaN slice encountered") + warnings.filterwarnings("ignore", "Mean of empty slice") + warnings.filterwarnings("ignore", "Degrees of freedom <= 0 for slice") + + stat_surface = _calc_statistic_across_surfaces(address.statistic, surfaces) + et_calc_s = timer.lap_s() + + LOGGER.debug( + f"Created statistical surface in: {timer.elapsed_s():.2f}s (" + f"load={et_load_s:.2f}s, calc={et_calc_s:.2f}s), " + f"[#surfaces={surf_count}, stat={address.statistic}, " + f"attr={address.attribute}, name={address.name}, date={address.datestr}]" + ) + + return stat_surface + + def _get_simulated_surface( + self, address: SimulatedSurfaceAddress + ) -> Optional[xtgeo.RegularSurface]: + """Returns a Xtgeo surface instance of a single realization surface""" + + timer = PerfTimer() + + surf_fns: List[str] = self._locate_simulated_surfaces( + attribute=address.attribute, + name=address.name, + datestr=address.datestr if address.datestr is not None else "", + realizations=[address.realization], + ) + + if len(surf_fns) == 0: + LOGGER.warning(f"No simulated surface found for {address}") + return None + if len(surf_fns) > 1: + LOGGER.warning( + f"Multiple simulated surfaces found for: {address}" + "Returning first surface." + ) + + surf = xtgeo.surface_from_file(surf_fns[0]) + + LOGGER.debug(f"Loaded simulated surface in: {timer.elapsed_s():.2f}s") + + return surf + + def _get_observed_surface( + self, address: ObservedSurfaceAddress + ) -> Optional[xtgeo.RegularSurface]: + """Returns a Xtgeo surface instance for an observed surface""" + + timer = PerfTimer() + + surf_fns: List[str] = self._locate_observed_surfaces( + attribute=address.attribute, + name=address.name, + datestr=address.datestr if address.datestr is not None else "", + ) + + if len(surf_fns) == 0: + LOGGER.warning(f"No observed surface found for {address}") + return None + if len(surf_fns) > 1: + LOGGER.warning( + f"Multiple observed surfaces found for: {address}" + "Returning first surface." + ) + + surf = xtgeo.surface_from_file(surf_fns[0]) + + LOGGER.debug(f"Loaded simulated surface in: {timer.elapsed_s():.2f}s") + + return surf + + def _locate_simulated_surfaces( + self, attribute: str, name: str, datestr: str, realizations: List[int] + ) -> List[str]: + """Returns list of file names matching the specified filter criteria""" + df = self._inventory_df.loc[ + self._inventory_df[Col.TYPE] == SurfaceType.SIMULATED + ] + + df = df.loc[ + (df[Col.ATTRIBUTE] == attribute) + & (df[Col.NAME] == name) + & (df[Col.DATESTR] == datestr) + & (df[Col.REAL].isin(realizations)) + ] + + return [self._provider_dir / rel_path for rel_path in df[Col.REL_PATH]] + + def _locate_observed_surfaces( + self, attribute: str, name: str, datestr: str + ) -> List[str]: + """Returns file names of observed surfaces matching the criteria""" + df = self._inventory_df.loc[ + self._inventory_df[Col.TYPE] == SurfaceType.OBSERVED + ] + + df = df.loc[ + (df[Col.ATTRIBUTE] == attribute) + & (df[Col.NAME] == name) + & (df[Col.DATESTR] == datestr) + ] + + return [self._provider_dir / rel_path for rel_path in df[Col.REL_PATH]] + + +def _find_observed_surfaces_corresponding_to_simulated( + obs_surfaces: List[SurfaceFileInfo], sim_surfaces: List[SurfaceFileInfo] +) -> List[SurfaceFileInfo]: + """Returns only the observed surfaces that have a matching simulated surface""" + + unique_sim_surf_ids: Set[str] = set() + for surfinfo in sim_surfaces: + surf_id = f"{surfinfo.name}_{surfinfo.attribute}_{surfinfo.datestr}" + unique_sim_surf_ids.add(surf_id) + + valid_obs_surfaces: List[SurfaceFileInfo] = [] + for surfinfo in obs_surfaces: + surf_id = f"{surfinfo.name}_{surfinfo.attribute}_{surfinfo.datestr}" + if surf_id in unique_sim_surf_ids: + valid_obs_surfaces.append(surfinfo) + else: + LOGGER.debug( + f"Discarding observed surface without matching simulation surface {surfinfo.path}" + ) + + return valid_obs_surfaces + + +def _copy_surfaces_into_provider_dir( + original_path_arr: List[str], + rel_path_arr: List[str], + provider_dir: Path, +) -> None: + for src_path, dst_rel_path in zip(original_path_arr, rel_path_arr): + # LOGGER.debug(f"copying surface from: {src_path}") + shutil.copyfile(src_path, provider_dir / dst_rel_path) + + # full_dst_path_arr = [storage_dir / dst_rel_path for dst_rel_path in store_path_arr] + # with ProcessPoolExecutor() as executor: + # executor.map(shutil.copyfile, original_path_arr, full_dst_path_arr) + + +def _compose_rel_sim_surf_path( + real: int, + attribute: str, + name: str, + datestr: Optional[str], + extension: str, +) -> Path: + """Compose path to simulated surface file, relative to provider's directory""" + if datestr: + fname = f"{real}--{name}--{attribute}--{datestr}{extension}" + else: + fname = f"{real}--{name}--{attribute}{extension}" + return Path(REL_SIM_DIR) / fname + + +def _compose_rel_obs_surf_path( + attribute: str, + name: str, + datestr: Optional[str], + extension: str, +) -> Path: + """Compose path to observed surface file, relative to provider's directory""" + if datestr: + fname = f"{name}--{attribute}--{datestr}{extension}" + else: + fname = f"{name}--{attribute}{extension}" + return Path(REL_OBS_DIR) / fname + + +def _calc_statistic_across_surfaces( + statistic: SurfaceStatistic, surfaces: xtgeo.Surfaces +) -> xtgeo.RegularSurface: + """Calculates a statistical surface from a list of Xtgeo surface instances""" + + stat_surf: xtgeo.RegularSurface + + if statistic == SurfaceStatistic.MEAN: + stat_surf = surfaces.apply(np.mean, axis=0) + elif statistic == SurfaceStatistic.STDDEV: + stat_surf = surfaces.apply(np.std, axis=0) + elif statistic == SurfaceStatistic.MINIMUM: + stat_surf = surfaces.apply(np.min, axis=0) + elif statistic == SurfaceStatistic.MAXIMUM: + stat_surf = surfaces.apply(np.max, axis=0) + elif statistic == SurfaceStatistic.P10: + stat_surf = surfaces.apply(np.percentile, 10, axis=0) + elif statistic == SurfaceStatistic.P90: + stat_surf = surfaces.apply(np.percentile, 90, axis=0) + + return stat_surf diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_stat_surf_cache.py b/webviz_subsurface/_providers/ensemble_surface_provider/_stat_surf_cache.py new file mode 100644 index 0000000000..63dc7b14c5 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/_stat_surf_cache.py @@ -0,0 +1,93 @@ +import datetime +import hashlib +import logging +import os +import pickle # nosec +import uuid +from pathlib import Path +from typing import Optional + +import xtgeo + +from .ensemble_surface_provider import StatisticalSurfaceAddress + +LOGGER = logging.getLogger(__name__) + +# For some obscure reason, reading of a non-existent irap file segfaults, +# so use asymmetric file formats for read and write +FILE_FORMAT_WRITE = "irap_binary" +FILE_FORMAT_READ = "guess" +FILE_EXTENSION = ".gri" + +# FILE_FORMAT_WRITE = "xtgregsurf" +# FILE_FORMAT_READ = FILE_FORMAT_WRITE +# FILE_EXTENSION = ".xtgregsurf" + + +class StatSurfCache: + def __init__(self, cache_dir: Path) -> None: + self.cache_dir = cache_dir + + self.cache_dir.mkdir(parents=True, exist_ok=True) + placeholder_file = self.cache_dir / "placeholder.txt" + placeholder_file.write_text( + f"Placeholder -- {datetime.datetime.now()} -- {os.getpid()}" + ) + + def fetch( + self, address: StatisticalSurfaceAddress + ) -> Optional[xtgeo.RegularSurface]: + + full_surf_path = self.cache_dir / _compose_stat_surf_file_name( + address, FILE_EXTENSION + ) + + try: + surf = xtgeo.surface_from_file(full_surf_path, fformat=FILE_FORMAT_READ) + return surf + # pylint: disable=bare-except + except: + return None + + def store( + self, address: StatisticalSurfaceAddress, surface: xtgeo.RegularSurface + ) -> None: + + surf_fn = _compose_stat_surf_file_name(address, FILE_EXTENSION) + full_surf_path = self.cache_dir / surf_fn + + # Try and go via a temporary file which we don't rename until writing is finished. + # to make the cache writing more concurrency-friendly. + # One problem here is that we don't control the file handle (xtgeo does) so can't + # enforce flush and sync of the file to disk before the rename :-( + # Still, we probably need a more robust way of shring the cached surfaces... + tmp_surf_path = self.cache_dir / (surf_fn + f"__{uuid.uuid4().hex}.tmp") + try: + surface.to_file(tmp_surf_path, fformat=FILE_FORMAT_WRITE) + os.replace(tmp_surf_path, full_surf_path) + # pylint: disable=bare-except + except: + os.remove(tmp_surf_path) + + # surface.to_file(full_surf_path, fformat=FILE_FORMAT_WRITE) + + +def _compose_stat_surf_file_name( + address: StatisticalSurfaceAddress, extension: str +) -> str: + + # Should probably sort the realization list + # Also, what about duplicates + # And further, handling of missing realizations... + + pickled = pickle.dumps(address.realizations, pickle.HIGHEST_PROTOCOL) + real_hash = hashlib.md5(pickled).hexdigest() # nosec + return "--".join( + [ + f"{address.statistic}", + f"{address.name}", + f"{address.attribute}", + f"{address.datestr}", + f"{real_hash}{extension}", + ] + ) diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_discovery.py b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_discovery.py new file mode 100644 index 0000000000..a9ff8a3e13 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_discovery.py @@ -0,0 +1,130 @@ +import glob +import os +import re +from dataclasses import dataclass +from pathlib import Path +from typing import Dict, List, Optional + +from fmu.ensemble import ScratchEnsemble + + +@dataclass(frozen=True) +class SurfaceFileInfo: + path: str + real: int + name: str + attribute: str + datestr: Optional[str] + + +def _discover_ensemble_realizations_fmu(ens_path: str) -> Dict[int, str]: + """Returns dict indexed by realization number and with runpath as value""" + scratch_ensemble = ScratchEnsemble("dummyEnsembleName", paths=ens_path).filter("OK") + real_dict = {i: r.runpath() for i, r in scratch_ensemble.realizations.items()} + return real_dict + + +def _discover_ensemble_realizations(ens_path: str) -> Dict[int, str]: + # Much faster than FMU impl above, but is it risky? + # Do we need to check for OK-file? + real_dict: Dict[int, str] = {} + + realidxregexp = re.compile(r"realization-(\d+)") + globbed_real_dirs = sorted(glob.glob(str(ens_path))) + for real_dir in globbed_real_dirs: + realnum: Optional[int] = None + for path_comp in reversed(real_dir.split(os.path.sep)): + realmatch = re.match(realidxregexp, path_comp) + if realmatch: + realnum = int(realmatch.group(1)) + break + + if realnum is not None: + real_dict[realnum] = real_dir + + return real_dict + + +@dataclass(frozen=True) +class SurfaceIdent: + name: str + attribute: str + datestr: Optional[str] + + +def _surface_ident_from_filename(filename: str) -> Optional[SurfaceIdent]: + """Split the stem part of the surface filename into surface name, attribute and + optionally date part""" + delimiter: str = "--" + parts = Path(filename).stem.split(delimiter) + if len(parts) < 2: + return None + + return SurfaceIdent( + name=parts[0], attribute=parts[1], datestr=parts[2] if len(parts) >= 3 else None + ) + + +def discover_per_realization_surface_files( + ens_path: str, attribute_filter: List[str] = None +) -> List[SurfaceFileInfo]: + rel_surface_folder: str = "share/results/maps" + suffix: str = "*.gri" + + surface_files: List[SurfaceFileInfo] = [] + + real_dict = _discover_ensemble_realizations_fmu(ens_path) + for realnum, runpath in sorted(real_dict.items()): + globbed_filenames = glob.glob(str(Path(runpath) / rel_surface_folder / suffix)) + for surf_filename in sorted(globbed_filenames): + surf_ident = _surface_ident_from_filename(surf_filename) + if surf_ident: + if ( + attribute_filter is not None + and surf_ident.attribute not in attribute_filter + ): + continue + surface_files.append( + SurfaceFileInfo( + path=surf_filename, + real=realnum, + name=surf_ident.name, + attribute=surf_ident.attribute, + datestr=surf_ident.datestr, + ) + ) + + return surface_files + + +def discover_observed_surface_files( + ens_path: str, attribute_filter: List[str] = None +) -> List[SurfaceFileInfo]: + observed_surface_folder: str = "share/observations/maps" + suffix: str = "*.gri" + + surface_files: List[SurfaceFileInfo] = [] + + ens_root_path = ens_path.split("realization")[0] + globbed_filenames = glob.glob( + str(Path(ens_root_path) / observed_surface_folder / suffix) + ) + for surf_filename in sorted(globbed_filenames): + surf_ident = _surface_ident_from_filename(surf_filename) + if surf_ident: + if ( + attribute_filter is not None + and surf_ident.attribute not in attribute_filter + ): + continue + surface_files.append( + SurfaceFileInfo( + path=surf_filename, + real=-1, + name=surf_ident.name, + attribute=surf_ident.attribute, + datestr=surf_ident.datestr, + ) + ) + + return surface_files diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_image.py b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_image.py new file mode 100644 index 0000000000..6bb52ed110 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_image.py @@ -0,0 +1,145 @@ +import io +import logging + +import numpy as np +import xtgeo +from PIL import Image + +from webviz_subsurface._utils.perf_timer import PerfTimer + +# !!!!!!! +# This is basically a copy of surface_to_rgba() from _ensemble_surface_plugin._make_rgba.py +# with a slight change in signature + +LOGGER = logging.getLogger(__name__) + + +def surface_to_png_bytes(surface: xtgeo.RegularSurface) -> bytes: + """Converts a xtgeo Surface to RGBA array. Used to set the image when used in a + DeckGLMap component""" + + timer = PerfTimer() + + # surface.unrotate() + LOGGER.debug(f"unrotate: {timer.lap_s():.2f}s") + + surface.fill(np.nan) + values = surface.values + values = np.flip(values.transpose(), axis=0) + + # If all values are masked set to zero + if values.mask.all(): + values = np.zeros(values.shape) + + LOGGER.debug(f"fill/flip/mask: {timer.lap_s():.2f}s") + + min_val = np.nanmin(surface.values) + max_val = np.nanmax(surface.values) + if min_val == 0.0 and max_val == 0.0: + scale_factor = 1.0 + else: + scale_factor = (256 * 256 * 256 - 1) / (max_val - min_val) + + LOGGER.debug(f"minmax: {timer.lap_s():.2f}s") + + z_array = (values.copy() - min_val) * scale_factor + z_array = z_array.copy() + shape = z_array.shape + + LOGGER.debug(f"scale and copy: {timer.lap_s():.2f}s") + + z_array = np.repeat(z_array, 4) # This will flatten the array + + z_array[0::4][np.isnan(z_array[0::4])] = 0 # Red + z_array[1::4][np.isnan(z_array[1::4])] = 0 # Green + z_array[2::4][np.isnan(z_array[2::4])] = 0 # Blue + + z_array[0::4] = np.floor((z_array[0::4] / (256 * 256)) % 256) # Red + z_array[1::4] = np.floor((z_array[1::4] / 256) % 256) # Green + z_array[2::4] = np.floor(z_array[2::4] % 256) # Blue + z_array[3::4] = np.where(np.isnan(z_array[3::4]), 0, 255) # Alpha + + LOGGER.debug(f"bytestuff: {timer.lap_s():.2f}s") + + # Back to 2d shape + 1 dimension for the rgba values. + + z_array = z_array.reshape((shape[0], shape[1], 4)) + + image = Image.fromarray(np.uint8(z_array), "RGBA") + LOGGER.debug(f"create: {timer.lap_s():.2f}s") + + byte_io = io.BytesIO() + # Huge speed benefit from reducing compression level + image.save(byte_io, format="png", compress_level=1) + LOGGER.debug(f"save png to bytes: {timer.lap_s():.2f}s") + + byte_io.seek(0) + ret_bytes = byte_io.read() + LOGGER.debug(f"read bytes: {timer.lap_s():.2f}s") + + LOGGER.debug(f"Total time: {timer.elapsed_s():.2f}s") + + return ret_bytes + + +# pylint: disable=too-many-locals +def surface_to_png_bytes_optimized(surface: xtgeo.RegularSurface) -> bytes: + + timer = PerfTimer() + # Note that returned values array is a 2d masked array + surf_values_ma: np.ma.MaskedArray = surface.values + + surf_values_ma = np.flip(surf_values_ma.transpose(), axis=0) # type: ignore + LOGGER.debug(f"flip/transpose: {timer.lap_s():.2f}s") + + # This will be a flat bool array with true for all valid entries + valid_arr = np.invert(np.ma.getmaskarray(surf_values_ma).flatten()) + LOGGER.debug(f"get valid_arr: {timer.lap_s():.2f}s") + + shape = surf_values_ma.shape + min_val = surf_values_ma.min() + max_val = surf_values_ma.max() + LOGGER.debug(f"minmax: {timer.lap_s():.2f}s") + + if min_val == 0.0 and max_val == 0.0: + scale_factor = 1.0 + else: + scale_factor = (256 * 256 * 256 - 1) / (max_val - min_val) + + # Scale the values into the wanted range + scaled_values_ma = (surf_values_ma - min_val) * scale_factor + + # Get a NON-masked array with all undefined entries filled with 0 + scaled_values = scaled_values_ma.filled(0) + + LOGGER.debug(f"scale and fill: {timer.lap_s():.2f}s") + + val_arr = scaled_values.astype(np.uint32).ravel() + LOGGER.debug(f"cast and flatten: {timer.lap_s():.2f}s") + + val = val_arr.view(dtype=np.uint8) + rgba_arr = np.empty(4 * len(val_arr), dtype=np.uint8) + rgba_arr[0::4] = val[2::4] + rgba_arr[1::4] = val[1::4] + rgba_arr[2::4] = val[0::4] + rgba_arr[3::4] = np.multiply(valid_arr, 255).astype(np.uint8) + + LOGGER.debug(f"rgba combine: {timer.lap_s():.2f}s") + + # Back to 2d shape + 1 dimension for the rgba values. + rgba_arr_reshaped = rgba_arr.reshape((shape[0], shape[1], 4)) + + image = Image.fromarray(rgba_arr_reshaped, "RGBA") + LOGGER.debug(f"create: {timer.lap_s():.2f}s") + + byte_io = io.BytesIO() + image.save(byte_io, format="png", compress_level=1) + LOGGER.debug(f"save png to bytes: {timer.lap_s():.2f}s") + + byte_io.seek(0) + ret_bytes = byte_io.read() + LOGGER.debug(f"read bytes: {timer.lap_s():.2f}s") + + LOGGER.debug(f"Total time: {timer.elapsed_s():.2f}s") + + return ret_bytes diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/dev_experiments.py b/webviz_subsurface/_providers/ensemble_surface_provider/dev_experiments.py new file mode 100644 index 0000000000..863c6b372e --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/dev_experiments.py @@ -0,0 +1,120 @@ +import logging +from pathlib import Path + +from .ensemble_surface_provider import ( + EnsembleSurfaceProvider, + ObservedSurfaceAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceStatistic, +) +from .ensemble_surface_provider_factory import EnsembleSurfaceProviderFactory + + +def main() -> None: + print() + print("## Running EnsembleSurfaceProvider experiments") + print("## =================================================") + + logging.basicConfig( + level=logging.WARNING, + format="%(asctime)s %(levelname)-3s [%(name)s]: %(message)s", + ) + logging.getLogger("webviz_subsurface").setLevel(level=logging.DEBUG) + + root_storage_dir = Path("/home/sigurdp/buf/webviz_storage_dir") + + # fmt:off + # ensemble_path = "../webviz-subsurface-testdata/01_drogon_ahm/realization-*/iter-0" + ensemble_path = "../hk-webviz-subsurface-testdata/01_drogon_ahm/realization-*/iter-0" + # fmt:on + + # WEBVIZ_FACTORY_REGISTRY.initialize( + # WebvizInstanceInfo(WebvizRunMode.NON_PORTABLE, root_storage_dir), None + # ) + # factory = EnsembleSurfaceProviderFactory.instance() + + factory = EnsembleSurfaceProviderFactory( + root_storage_dir, allow_storage_writes=True + ) + + provider: EnsembleSurfaceProvider = factory.create_from_ensemble_surface_files( + ensemble_path + ) + + all_attributes = provider.attributes() + print() + print("all_attributes:") + print("------------------------") + print(*all_attributes, sep="\n") + + print() + print("attributes for names:") + print("------------------------") + for attr in all_attributes: + print(f"attr={attr}:") + print(f" surf_names={provider.surface_names_for_attribute(attr)}") + print(f" surf_dates={provider.surface_dates_for_attribute(attr)}") + + print() + all_realizations = provider.realizations() + print(f"all_realizations={all_realizations}") + + surf = provider.get_surface( + SimulatedSurfaceAddress( + attribute="oilthickness", + name="therys", + datestr="20200701_20180101", + realization=1, + ) + ) + print(surf) + + surf = provider.get_surface( + ObservedSurfaceAddress( + attribute="amplitude_mean", + name="basevolantis", + datestr="20180701_20180101", + ) + ) + print(surf) + + # surf = provider.get_surface( + # StatisticalSurfaceAddress( + # attribute="amplitude_mean", + # name="basevolantis", + # datestr="20180701_20180101", + # statistic=SurfaceStatistic.P10, + # realizations=[0, 1], + # ) + # ) + # print(surf) + + # surf = provider.get_surface( + # StatisticalSurfaceAddress( + # attribute="amplitude_mean", + # name="basevolantis", + # datestr="20180701_20180101", + # statistic=SurfaceStatistic.P10, + # realizations=all_realizations, + # ) + # ) + # print(surf) + + surf = provider.get_surface( + StatisticalSurfaceAddress( + attribute="ds_extract_postprocess-refined8", + name="topvolantis", + datestr=None, + statistic=SurfaceStatistic.P10, + realizations=all_realizations, + ) + ) + print(surf) + + +# Running: +# python -m webviz_subsurface._providers.ensemble_surface_provider.dev_experiments +# ------------------------------------------------------------------------- +if __name__ == "__main__": + main() diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/dev_surface_server_lazy.py b/webviz_subsurface/_providers/ensemble_surface_provider/dev_surface_server_lazy.py new file mode 100644 index 0000000000..8cf7ad7cee --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/dev_surface_server_lazy.py @@ -0,0 +1,199 @@ +# pylint: skip-file +import io +import json +import logging +from dataclasses import asdict +from typing import Dict, Optional, Union +from urllib.parse import quote_plus, unquote_plus +from uuid import uuid4 + +import flask +import flask_caching +from dash import Dash + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from ._surface_to_image import surface_to_png_bytes +from .ensemble_surface_provider import ( + EnsembleSurfaceProvider, + ObservedSurfaceAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, +) + +LOGGER = logging.getLogger(__name__) +ROOT_URL_PATH = "/SurfaceServerLazy" + + +class SurfaceServerLazy: + def __init__(self, app: Dash) -> None: + self._dash_app: Dash = app + self._id_to_provider_dict: Dict[str, EnsembleSurfaceProvider] = {} + + self._image_cache = None + # self._image_cache = flask_caching.Cache( + # config={ + # "CACHE_TYPE": "RedisCache", + # "CACHE_KEY_PREFIX": f"SurfaceServer_{uuid4()}", + # "CACHE_REDIS_HOST": "localhost", + # "CACHE_REDIS_PORT": 6379, + # "CACHE_REDIS_URL": "redis://localhost:6379", + # } + # ) + # self._image_cache = flask_caching.Cache( + # config={ + # "CACHE_TYPE": "FileSystemCache", + # "CACHE_DIR": "/home/sigurdp/buf/flask_filesys_cache", + # } + # ) + # self._image_cache.init_app(app.server) + + @staticmethod + def instance(app: Dash) -> "SurfaceServerLazy": + global SURFACE_SERVER_INSTANCE + if not SURFACE_SERVER_INSTANCE: + LOGGER.debug("Initializing SurfaceServerLazy instance") + SURFACE_SERVER_INSTANCE = SurfaceServerLazy(app) + + return SURFACE_SERVER_INSTANCE + + def add_provider(self, provider: EnsembleSurfaceProvider) -> None: + # Setup the url rule (our route) when the first provider is added + if not self._id_to_provider_dict: + self._setup_url_rule() + + provider_id = provider.provider_id() + LOGGER.debug(f"Adding provider with id={provider_id}") + + existing_provider = self._id_to_provider_dict.get(provider_id) + if existing_provider: + # Issue a warning if there already is a provider registered with the same + # id AND if the actual provider instance is different. + # This should not be a problem, but will happen until the provider factory + # gets caching. + if existing_provider is not provider: + LOGGER.warning( + f"Provider with id={provider_id} ignored, the id is already present" + ) + return + + self._id_to_provider_dict[provider_id] = provider + + # routes = [] + # for rule in self._dash_app.server.url_map.iter_rules(): + # routes.append("%s" % rule) + + # for route in routes: + # print(route) + + def encode_partial_url( + self, + provider_id: str, + address: Union[ + StatisticalSurfaceAddress, SimulatedSurfaceAddress, ObservedSurfaceAddress + ], + ) -> str: + if not provider_id in self._id_to_provider_dict: + raise ValueError("Could not find provider") + + if isinstance(address, StatisticalSurfaceAddress): + addr_type_str = "sta" + elif isinstance(address, SimulatedSurfaceAddress): + addr_type_str = "sim" + elif isinstance(address, ObservedSurfaceAddress): + addr_type_str = "obs" + + surf_address_str = quote_plus(json.dumps(asdict(address))) + + url_path: str = ( + f"{ROOT_URL_PATH}/{provider_id}/{addr_type_str}/{surf_address_str}" + ) + return url_path + + def _setup_url_rule(self) -> None: + @self._dash_app.server.route( + ROOT_URL_PATH + "///" + ) + def _handle_request( + provider_id: str, addr_type_str: str, surf_address_str: str + ) -> flask.Response: + LOGGER.debug( + f"Handling request: " + f"provider_id={provider_id} " + f"addr_type_str={addr_type_str} " + f"surf_address_str={surf_address_str}" + ) + + timer = PerfTimer() + + try: + provider = self._id_to_provider_dict[provider_id] + surf_address_dict = json.loads(unquote_plus(surf_address_str)) + address: Union[ + StatisticalSurfaceAddress, + SimulatedSurfaceAddress, + ObservedSurfaceAddress, + ] + if addr_type_str == "sta": + address = StatisticalSurfaceAddress(**surf_address_dict) + if addr_type_str == "sim": + address = SimulatedSurfaceAddress(**surf_address_dict) + if addr_type_str == "obs": + address = ObservedSurfaceAddress(**surf_address_dict) + except: + LOGGER.error("Error decoding surface address") + flask.abort(404) + + if self._image_cache: + img_cache_key = ( + f"provider_id={provider_id} " + f"addr_type={addr_type_str} address={surf_address_str}" + ) + LOGGER.debug( + f"Looking for image in cache (key={img_cache_key}, " + f"cache_type={self._image_cache.config['CACHE_TYPE']})" + ) + cached_img_bytes = self._image_cache.get(img_cache_key) + if cached_img_bytes: + response = flask.send_file( + io.BytesIO(cached_img_bytes), mimetype="image/png" + ) + LOGGER.debug( + f"Request handled from image cache in: {timer.elapsed_s():.2f}s" + ) + return response + + LOGGER.debug("Getting surface from provider...") + timer.lap_s() + surface = provider.get_surface(address) + if not surface: + LOGGER.error(f"Error getting surface for address: {address}") + flask.abort(404) + et_get_s = timer.lap_s() + LOGGER.debug( + f"Got surface (dimensions={surface.dimensions}, #cells={surface.ncol*surface.nrow})" + ) + + LOGGER.debug("Converting to PNG image...") + png_bytes: bytes = surface_to_png_bytes(surface) + LOGGER.debug( + f"Got PNG image, size={(len(png_bytes) / (1024 * 1024)):.2f}MB" + ) + et_to_image_s = timer.lap_s() + + LOGGER.debug("Sending image") + response = flask.send_file(io.BytesIO(png_bytes), mimetype="image/png") + et_send_s = timer.lap_s() + + if self._image_cache and img_cache_key: + self._image_cache.add(img_cache_key, png_bytes) + + LOGGER.debug( + f"Request handled in: {timer.elapsed_s():.2f}s (" + f"get={et_get_s:.2f}s, to_image={et_to_image_s:.2f}s, send={et_send_s:.2f}s)" + ) + + return response + + +SURFACE_SERVER_INSTANCE: Optional[SurfaceServerLazy] = None diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider.py b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider.py new file mode 100644 index 0000000000..d835f8a1dc --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider.py @@ -0,0 +1,104 @@ +import abc +from dataclasses import dataclass +from enum import Enum +from typing import List, Optional, Union + +import xtgeo + + +class SurfaceStatistic(str, Enum): + MEAN = "Mean" + STDDEV = "StdDev" + MINIMUM = "Minimum" + MAXIMUM = "Maximum" + P10 = "P10" + P90 = "P90" + + +@dataclass(frozen=True) +class StatisticalSurfaceAddress: + """Specifies a unique statistical surface in an ensemble""" + + attribute: str + name: str + datestr: Optional[str] + statistic: SurfaceStatistic + realizations: List[int] + + +@dataclass(frozen=True) +class SimulatedSurfaceAddress: + """Specifies a unique simulated surface for a given ensemble realization""" + + attribute: str + name: str + datestr: Optional[str] + realization: int + + +@dataclass(frozen=True) +class ObservedSurfaceAddress: + """Represents a unique observed surface""" + + attribute: str + name: str + datestr: Optional[str] + + +# Type aliases used for signature readability +SurfaceAddress = Union[ + StatisticalSurfaceAddress, SimulatedSurfaceAddress, ObservedSurfaceAddress +] + +# Class provides data for ensemble surfaces +class EnsembleSurfaceProvider(abc.ABC): + @abc.abstractmethod + def provider_id(self) -> str: + """Returns string ID of the provider.""" + ... + + @abc.abstractmethod + def attributes(self) -> List[str]: + """Returns list of all available attributes.""" + ... + + @abc.abstractmethod + def surface_names_for_attribute(self, surface_attribute: str) -> List[str]: + """Returns list of all available surface names for a given attribute.""" + ... + + @abc.abstractmethod + def surface_dates_for_attribute( + self, surface_attribute: str + ) -> Optional[List[str]]: + """Returns list of all available surface dates for a given attribute.""" + ... + + @abc.abstractmethod + def realizations(self) -> List[int]: + """Returns list of all available realizations.""" + ... + + @abc.abstractmethod + def get_surface( + self, + address: SurfaceAddress, + ) -> Optional[xtgeo.RegularSurface]: + """Returns a surface for a given surface address""" + ... + + # @abc.abstractmethod + # def get_surface_bounds(self, surface: EnsembleSurfaceContext) -> List[float]: + # """Returns the bounds for a surface [xmin,ymin, xmax,ymax]""" + # ... + + # @abc.abstractmethod + # def get_surface_value_range(self, surface: EnsembleSurfaceContext) -> List[float]: + # """Returns the value range for a given surface context [zmin, zmax]""" + # ... + + # @abc.abstractmethod + # def get_surface_as_rgba(self, surface: EnsembleSurfaceContext) -> io.BytesIO: + # """Returns surface as a greyscale png RGBA with encoded elevation values + # in a bytestream""" + # ... diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider_factory.py b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider_factory.py new file mode 100644 index 0000000000..e18d2ebbf7 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/ensemble_surface_provider_factory.py @@ -0,0 +1,105 @@ +import hashlib +import logging +import os +from pathlib import Path +from typing import List + +from webviz_config.webviz_factory import WebvizFactory +from webviz_config.webviz_factory_registry import WEBVIZ_FACTORY_REGISTRY +from webviz_config.webviz_instance_info import WebvizRunMode + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from ._provider_impl_file import ProviderImplFile +from ._surface_discovery import ( + discover_observed_surface_files, + discover_per_realization_surface_files, +) +from .ensemble_surface_provider import EnsembleSurfaceProvider + +LOGGER = logging.getLogger(__name__) + + +class EnsembleSurfaceProviderFactory(WebvizFactory): + def __init__(self, root_storage_folder: Path, allow_storage_writes: bool) -> None: + self._storage_dir = Path(root_storage_folder) / __name__ + self._allow_storage_writes = allow_storage_writes + + LOGGER.info( + f"EnsembleSurfaceProviderFactory init: storage_dir={self._storage_dir}" + ) + + if self._allow_storage_writes: + os.makedirs(self._storage_dir, exist_ok=True) + + @staticmethod + def instance() -> "EnsembleSurfaceProviderFactory": + """Static method to access the singleton instance of the factory.""" + + factory = WEBVIZ_FACTORY_REGISTRY.get_factory(EnsembleSurfaceProviderFactory) + if not factory: + app_instance_info = WEBVIZ_FACTORY_REGISTRY.app_instance_info + storage_folder = app_instance_info.storage_folder + allow_writes = app_instance_info.run_mode != WebvizRunMode.PORTABLE + + factory = EnsembleSurfaceProviderFactory(storage_folder, allow_writes) + + # Store the factory object in the global factory registry + WEBVIZ_FACTORY_REGISTRY.set_factory(EnsembleSurfaceProviderFactory, factory) + + return factory + + def create_from_ensemble_surface_files( + self, ens_path: str, attribute_filter: List[str] = None + ) -> EnsembleSurfaceProvider: + timer = PerfTimer() + string_to_hash = ( + f"{ens_path}" + if attribute_filter is None + else f"{ens_path}_{'_'.join([str(attr) for attr in attribute_filter])}" + ) + storage_key = f"ens__{_make_hash_string(string_to_hash)}" + provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key) + if provider: + LOGGER.info( + f"Loaded surface provider from backing store in {timer.elapsed_s():.2f}s (" + f"ens_path={ens_path})" + ) + return provider + + # We can only import data from data source if storage writes are allowed + if not self._allow_storage_writes: + raise ValueError(f"Failed to load surface provider for {ens_path}") + + LOGGER.info(f"Importing/copying surface data for: {ens_path}") + + timer.lap_s() + sim_surface_files = discover_per_realization_surface_files( + ens_path, attribute_filter + ) + obs_surface_files = discover_observed_surface_files(ens_path, attribute_filter) + et_discover_s = timer.lap_s() + + ProviderImplFile.write_backing_store( + self._storage_dir, + storage_key, + sim_surfaces=sim_surface_files, + obs_surfaces=obs_surface_files, + ) + et_write_s = timer.lap_s() + + provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key) + if not provider: + raise ValueError(f"Failed to load/create surface provider for {ens_path}") + + LOGGER.info( + f"Saved surface provider to backing store in {timer.elapsed_s():.2f}s (" + f"discover={et_discover_s:.2f}s, write={et_write_s:.2f}s, ens_path={ens_path})" + ) + + return provider + + +def _make_hash_string(string_to_hash: str) -> str: + # There is no security risk here and chances of collision should be very slim + return hashlib.md5(string_to_hash.encode()).hexdigest() # nosec diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/surface_server.py b/webviz_subsurface/_providers/ensemble_surface_provider/surface_server.py new file mode 100644 index 0000000000..3b3f414957 --- /dev/null +++ b/webviz_subsurface/_providers/ensemble_surface_provider/surface_server.py @@ -0,0 +1,300 @@ +import hashlib +import io +import json +import logging +import math +from dataclasses import asdict, dataclass +from typing import List, Optional, Tuple, Union +from urllib.parse import quote +from uuid import uuid4 + +import flask +import flask_caching +import xtgeo +from dash import Dash +from webviz_config.webviz_instance_info import WEBVIZ_INSTANCE_INFO + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from ._surface_to_image import surface_to_png_bytes_optimized +from .ensemble_surface_provider import ( + ObservedSurfaceAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceAddress, +) + +LOGGER = logging.getLogger(__name__) + +_ROOT_URL_PATH = "/SurfaceServer" + +_SURFACE_SERVER_INSTANCE: Optional["SurfaceServer"] = None + + +@dataclass(frozen=True) +class QualifiedSurfaceAddress: + provider_id: str + address: SurfaceAddress + + +@dataclass(frozen=True) +class QualifiedDiffSurfaceAddress: + provider_id_a: str + address_a: SurfaceAddress + provider_id_b: str + address_b: SurfaceAddress + + +@dataclass(frozen=True) +class SurfaceMeta: + x_min: float + x_max: float + y_min: float + y_max: float + val_min: float + val_max: float + deckgl_bounds: List[float] + deckgl_rot_deg: float # Around upper left corner + + +class SurfaceServer: + def __init__(self, app: Dash) -> None: + cache_dir = ( + WEBVIZ_INSTANCE_INFO.storage_folder / f"SurfaceServer_filecache_{uuid4()}" + ) + LOGGER.debug(f"Setting up file cache in: {cache_dir}") + self._image_cache = flask_caching.Cache( + config={ + "CACHE_TYPE": "FileSystemCache", + "CACHE_DIR": cache_dir, + "CACHE_DEFAULT_TIMEOUT": 0, + } + ) + self._image_cache.init_app(app.server) + + self._setup_url_rule(app) + + @staticmethod + def instance(app: Dash) -> "SurfaceServer": + # pylint: disable=global-statement + global _SURFACE_SERVER_INSTANCE + if not _SURFACE_SERVER_INSTANCE: + LOGGER.debug("Initializing SurfaceServer instance") + _SURFACE_SERVER_INSTANCE = SurfaceServer(app) + + return _SURFACE_SERVER_INSTANCE + + def publish_surface( + self, + qualified_address: Union[QualifiedSurfaceAddress, QualifiedDiffSurfaceAddress], + surface: xtgeo.RegularSurface, + ) -> None: + timer = PerfTimer() + + if isinstance(qualified_address, QualifiedSurfaceAddress): + base_cache_key = _address_to_str( + qualified_address.provider_id, qualified_address.address + ) + else: + base_cache_key = _diff_address_to_str( + qualified_address.provider_id_a, + qualified_address.address_a, + qualified_address.provider_id_b, + qualified_address.address_b, + ) + + LOGGER.debug( + f"Publishing surface (dim={surface.dimensions}, #cells={surface.ncol*surface.nrow}), " + f"[base_cache_key={base_cache_key}]" + ) + + self._create_and_store_image_in_cache(base_cache_key, surface) + + LOGGER.debug(f"Surface published in: {timer.elapsed_s():.2f}s") + + def get_surface_metadata( + self, + qualified_address: Union[QualifiedSurfaceAddress, QualifiedDiffSurfaceAddress], + ) -> Optional[SurfaceMeta]: + + if isinstance(qualified_address, QualifiedSurfaceAddress): + base_cache_key = _address_to_str( + qualified_address.provider_id, qualified_address.address + ) + else: + base_cache_key = _diff_address_to_str( + qualified_address.provider_id_a, + qualified_address.address_a, + qualified_address.provider_id_b, + qualified_address.address_b, + ) + + meta_cache_key = "META:" + base_cache_key + meta: Optional[SurfaceMeta] = self._image_cache.get(meta_cache_key) + if not meta: + return None + + if not isinstance(meta, SurfaceMeta): + LOGGER.error("Error loading SurfaceMeta from cache") + return None + + return meta + + @staticmethod + def encode_partial_url( + qualified_address: Union[QualifiedSurfaceAddress, QualifiedDiffSurfaceAddress], + ) -> str: + + if isinstance(qualified_address, QualifiedSurfaceAddress): + address_str = _address_to_str( + qualified_address.provider_id, qualified_address.address + ) + else: + address_str = _diff_address_to_str( + qualified_address.provider_id_a, + qualified_address.address_a, + qualified_address.provider_id_b, + qualified_address.address_b, + ) + + url_path: str = f"{_ROOT_URL_PATH}/{quote(address_str)}" + return url_path + + def _setup_url_rule(self, app: Dash) -> None: + @app.server.route(_ROOT_URL_PATH + "/") + def _handle_surface_request(full_surf_address_str: str) -> flask.Response: + LOGGER.debug( + f"Handling surface_request: " + f"full_surf_address_str={full_surf_address_str} " + ) + + timer = PerfTimer() + + img_cache_key = "IMG:" + full_surf_address_str + LOGGER.debug(f"Looking for image in cache (key={img_cache_key}") + + cached_img_bytes = self._image_cache.get(img_cache_key) + if not cached_img_bytes: + LOGGER.error( + f"Error getting image for address: {full_surf_address_str}" + ) + flask.abort(404) + + response = flask.send_file( + io.BytesIO(cached_img_bytes), mimetype="image/png" + ) + LOGGER.debug( + f"Request handled from image cache in: {timer.elapsed_s():.2f}s" + ) + return response + + def _create_and_store_image_in_cache( + self, + base_cache_key: str, + surface: xtgeo.RegularSurface, + ) -> None: + + timer = PerfTimer() + + LOGGER.debug("Converting surface to PNG image...") + png_bytes: bytes = surface_to_png_bytes_optimized(surface) + LOGGER.debug(f"Got PNG image, size={(len(png_bytes) / (1024 * 1024)):.2f}MB") + et_to_image_s = timer.lap_s() + + img_cache_key = "IMG:" + base_cache_key + meta_cache_key = "META:" + base_cache_key + + self._image_cache.add(img_cache_key, png_bytes) + + # For debugging rotations + # unrot_surf = surface.copy() + # unrot_surf.unrotate() + # unrot_surf.quickplot("/home/sigurdp/gitRoot/hk-webviz-subsurface/quickplot.png") + + deckgl_bounds, deckgl_rot = _calc_map_component_bounds_and_rot(surface) + + meta = SurfaceMeta( + x_min=surface.xmin, + x_max=surface.xmax, + y_min=surface.ymin, + y_max=surface.ymax, + val_min=surface.values.min(), + val_max=surface.values.max(), + deckgl_bounds=deckgl_bounds, + deckgl_rot_deg=deckgl_rot, + ) + self._image_cache.add(meta_cache_key, meta) + et_write_cache_s = timer.lap_s() + + LOGGER.debug( + f"Created image and wrote to cache in in: {timer.elapsed_s():.2f}s (" + f"to_image={et_to_image_s:.2f}s, write_cache={et_write_cache_s:.2f}s), " + f"[base_cache_key={base_cache_key}]" + ) + + +def _address_to_str( + provider_id: str, + address: SurfaceAddress, +) -> str: + if isinstance(address, StatisticalSurfaceAddress): + addr_type_str = "sta" + elif isinstance(address, SimulatedSurfaceAddress): + addr_type_str = "sim" + elif isinstance(address, ObservedSurfaceAddress): + addr_type_str = "obs" + + addr_hash = hashlib.md5( # nosec + json.dumps(asdict(address), sort_keys=True).encode() + ).hexdigest() + + return f"{provider_id}___{addr_type_str}___{address.name}___{address.attribute}___{addr_hash}" + + +def _diff_address_to_str( + provider_id_a: str, + address_a: SurfaceAddress, + provider_id_b: str, + address_b: SurfaceAddress, +) -> str: + return ( + "diff~~~" + + _address_to_str(provider_id_a, address_a) + + "~~~" + + _address_to_str(provider_id_b, address_b) + ) + + +def _calc_map_component_bounds_and_rot( + surface: xtgeo.RegularSurface, +) -> Tuple[List[float], float]: + surf_corners = surface.get_map_xycorners() + rptx = surf_corners[2][0] + rpty = surf_corners[2][1] + min_x = math.inf + max_x = -math.inf + min_y = math.inf + max_y = -math.inf + angle = -surface.rotation * math.pi / 180 + for coord in surf_corners: + xpos = coord[0] + ypos = coord[1] + x_rotated = ( + rptx + ((xpos - rptx) * math.cos(angle)) - ((ypos - rpty) * math.sin(angle)) + ) + y_rotated = ( + rpty + ((xpos - rptx) * math.sin(angle)) + ((ypos - rpty) * math.cos(angle)) + ) + min_x = min(min_x, x_rotated) + max_x = max(max_x, x_rotated) + min_y = min(min_y, y_rotated) + max_y = max(max_y, y_rotated) + + bounds = [ + min_x, + min_y, + max_x, + max_y, + ] + + return bounds, surface.rotation diff --git a/webviz_subsurface/_providers/well_provider/__init__.py b/webviz_subsurface/_providers/well_provider/__init__.py new file mode 100644 index 0000000000..0066c742ac --- /dev/null +++ b/webviz_subsurface/_providers/well_provider/__init__.py @@ -0,0 +1,3 @@ +from .well_provider import WellProvider +from .well_provider_factory import WellProviderFactory +from .well_server import WellServer diff --git a/webviz_subsurface/_providers/well_provider/_provider_impl_file.py b/webviz_subsurface/_providers/well_provider/_provider_impl_file.py new file mode 100644 index 0000000000..f1f006ebe4 --- /dev/null +++ b/webviz_subsurface/_providers/well_provider/_provider_impl_file.py @@ -0,0 +1,133 @@ +import json +import logging +from pathlib import Path +from typing import Dict, List, Optional + +import xtgeo + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from .well_provider import WellPath, WellProvider + +LOGGER = logging.getLogger(__name__) + + +INV_KEY_REL_PATH = "rel_path" +INV_KEY_MD_LOGNAME = "md_logname" + + +class ProviderImplFile(WellProvider): + def __init__( + self, provider_id: str, provider_dir: Path, inventory: Dict[str, dict] + ) -> None: + self._provider_id = provider_id + self._provider_dir = provider_dir + self._inventory = inventory + + @staticmethod + def write_backing_store( + storage_dir: Path, + storage_key: str, + well_file_names: List[str], + md_logname: Optional[str], + ) -> None: + + timer = PerfTimer() + + # All data for this provider will be stored inside a sub-directory + # given by the storage key + provider_dir = storage_dir / storage_key + LOGGER.debug(f"Writing well backing store to: {provider_dir}") + provider_dir.mkdir(parents=True, exist_ok=True) + + inventory_dict: Dict[str, dict] = {} + + LOGGER.debug(f"Writing {len(well_file_names)} wells into backing store...") + + timer.lap_s() + for file_name in well_file_names: + well = xtgeo.well_from_file(wfile=file_name, mdlogname=md_logname) + + if well.mdlogname is None: + try: + well.geometrics() + except ValueError: + LOGGER.debug(f"Ignoring {well.name} as MD cannot be calculated") + continue + + print("well.mdlogname=", well.mdlogname) + + well_name = well.name + rel_path = f"{well_name}.rmswell" + # rel_path = f"{well_name}.hdf" + + dst_file = provider_dir / rel_path + print("dst_file=", dst_file) + well.to_file(wfile=dst_file, fformat="rmswell") + # well.to_hdf(wfile=dst_file) + + inventory_dict[well_name] = { + INV_KEY_REL_PATH: rel_path, + INV_KEY_MD_LOGNAME: well.mdlogname, + } + + et_copy_s = timer.lap_s() + + json_fn = provider_dir / "inventory.json" + with open(json_fn, "w") as file: + json.dump(inventory_dict, file) + + LOGGER.debug( + f"Wrote well backing store in: {timer.elapsed_s():.2f}s (" + f"copy={et_copy_s:.2f}s)" + ) + + @staticmethod + def from_backing_store( + storage_dir: Path, + storage_key: str, + ) -> Optional["ProviderImplFile"]: + + provider_dir = storage_dir / storage_key + json_fn = provider_dir / "inventory.json" + + try: + with open(json_fn, "r") as file: + inventory = json.load(file) + except FileNotFoundError: + return None + + return ProviderImplFile(storage_key, provider_dir, inventory) + + def provider_id(self) -> str: + return self._provider_id + + def well_names(self) -> List[str]: + return sorted(list(self._inventory.keys())) + + def get_well_path(self, well_name: str) -> WellPath: + well = self.get_well_xtgeo_obj(well_name) + df = well.dataframe + md_logname = well.mdlogname + + x_arr = df["X_UTME"].to_numpy() + y_arr = df["Y_UTMN"].to_numpy() + z_arr = df["Z_TVDSS"].to_numpy() + md_arr = df[md_logname].to_numpy() + + return WellPath(x_arr=x_arr, y_arr=y_arr, z_arr=z_arr, md_arr=md_arr) + + def get_well_xtgeo_obj(self, well_name: str) -> xtgeo.Well: + well_entry = self._inventory.get(well_name) + if not well_entry: + raise ValueError(f"Requested well name {well_name} not found") + + rel_fn = well_entry[INV_KEY_REL_PATH] + md_logname = well_entry[INV_KEY_MD_LOGNAME] + + full_file_name = self._provider_dir / rel_fn + well = xtgeo.well_from_file( + wfile=full_file_name, fformat="rmswell", mdlogname=md_logname + ) + + return well diff --git a/webviz_subsurface/_providers/well_provider/dev_experiments.py b/webviz_subsurface/_providers/well_provider/dev_experiments.py new file mode 100644 index 0000000000..9af528a145 --- /dev/null +++ b/webviz_subsurface/_providers/well_provider/dev_experiments.py @@ -0,0 +1,70 @@ +# pylint: skip-file +import logging +import time +from pathlib import Path + +from .well_provider import WellProvider +from .well_provider_factory import WellProviderFactory + + +def main() -> None: + print() + print("## Running WellProvider experiments") + print("## =================================================") + + logging.basicConfig( + level=logging.WARNING, + format="%(asctime)s %(levelname)-3s [%(name)s]: %(message)s", + ) + logging.getLogger("webviz_subsurface").setLevel(level=logging.DEBUG) + + root_storage_dir = Path("/home/sigurdp/buf/webviz_storage_dir") + + well_folder = "../hk-webviz-subsurface-testdata/01_drogon_ahm/realization-0/iter-0/share/results/wells" + well_suffix = ".rmswell" + md_logname = None + + factory = WellProviderFactory(root_storage_dir, allow_storage_writes=True) + + provider: WellProvider = factory.create_from_well_files( + well_folder=well_folder, + well_suffix=well_suffix, + md_logname=md_logname, + ) + + all_well_names = provider.well_names() + print() + print("all_well_names:") + print("------------------------") + print(*all_well_names, sep="\n") + + start_tim = time.perf_counter() + + for name in all_well_names: + # w = provider.get_well_xtgeo_obj(name) + wp = provider.get_well_path(name) + + elapsed_time_ms = int(1000 * (time.perf_counter() - start_tim)) + print(f"## get all wells took: {elapsed_time_ms}ms") + + well_name = "55_33-A-4" + + w = provider.get_well_xtgeo_obj(well_name) + print(w.describe()) + print("w.mdlogname=", w.mdlogname) + + wp = provider.get_well_path(well_name) + # print(wp) + + # comparewell = xtgeo.well_from_file( + # wfile=Path(well_folder) / "55_33-A-4.rmswell", mdlogname=md_logname + # ) + # print(comparewell.describe()) + # print("comparewell.mdlogname=", comparewell.mdlogname) + + +# Running: +# python -m webviz_subsurface._providers.well_provider.dev_experiments +# ------------------------------------------------------------------------- +if __name__ == "__main__": + main() diff --git a/webviz_subsurface/_providers/well_provider/well_provider.py b/webviz_subsurface/_providers/well_provider/well_provider.py new file mode 100644 index 0000000000..cbbd13660f --- /dev/null +++ b/webviz_subsurface/_providers/well_provider/well_provider.py @@ -0,0 +1,36 @@ +import abc +from dataclasses import dataclass +from typing import List + +import numpy as np +import xtgeo + + +@dataclass(frozen=True) +class WellPath: + x_arr: np.ndarray + y_arr: np.ndarray + z_arr: np.ndarray + md_arr: np.ndarray + + +# Class provides data for wells +class WellProvider(abc.ABC): + @abc.abstractmethod + def provider_id(self) -> str: + """Returns string ID of the provider.""" + ... + + @abc.abstractmethod + def well_names(self) -> List[str]: + """Returns list of all available well names.""" + ... + + @abc.abstractmethod + def get_well_path(self, well_name: str) -> WellPath: + """Returns the coordinates for the well path along with MD for the well.""" + ... + + @abc.abstractmethod + def get_well_xtgeo_obj(self, well_name: str) -> xtgeo.Well: + ... diff --git a/webviz_subsurface/_providers/well_provider/well_provider_factory.py b/webviz_subsurface/_providers/well_provider/well_provider_factory.py new file mode 100644 index 0000000000..c56082426c --- /dev/null +++ b/webviz_subsurface/_providers/well_provider/well_provider_factory.py @@ -0,0 +1,96 @@ +import hashlib +import logging +import os +from pathlib import Path +from typing import Optional + +from webviz_config.webviz_factory import WebvizFactory +from webviz_config.webviz_factory_registry import WEBVIZ_FACTORY_REGISTRY +from webviz_config.webviz_instance_info import WebvizRunMode + +from webviz_subsurface._utils.perf_timer import PerfTimer + +from ._provider_impl_file import ProviderImplFile +from .well_provider import WellProvider + +LOGGER = logging.getLogger(__name__) + + +class WellProviderFactory(WebvizFactory): + def __init__(self, root_storage_folder: Path, allow_storage_writes: bool) -> None: + self._storage_dir = Path(root_storage_folder) / __name__ + self._allow_storage_writes = allow_storage_writes + + LOGGER.info(f"WellProviderFactory init: storage_dir={self._storage_dir}") + + if self._allow_storage_writes: + os.makedirs(self._storage_dir, exist_ok=True) + + @staticmethod + def instance() -> "WellProviderFactory": + """Static method to access the singleton instance of the factory.""" + + factory = WEBVIZ_FACTORY_REGISTRY.get_factory(WellProviderFactory) + if not factory: + app_instance_info = WEBVIZ_FACTORY_REGISTRY.app_instance_info + storage_folder = app_instance_info.storage_folder + allow_writes = app_instance_info.run_mode != WebvizRunMode.PORTABLE + + factory = WellProviderFactory(storage_folder, allow_writes) + + # Store the factory object in the global factory registry + WEBVIZ_FACTORY_REGISTRY.set_factory(WellProviderFactory, factory) + + return factory + + def create_from_well_files( + self, well_folder: str, well_suffix: str, md_logname: Optional[str] + ) -> WellProvider: + timer = PerfTimer() + + file_pattern = str(Path(well_folder) / f"*{well_suffix}") + storage_key = f"from_files__{_make_hash_string(f'{file_pattern}_{md_logname}')}" + + provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key) + if provider: + LOGGER.info( + f"Loaded well provider from backing store in {timer.elapsed_s():.2f}s (" + f"file_pattern={file_pattern})" + ) + return provider + + # We can only import data from data source if storage writes are allowed + if not self._allow_storage_writes: + raise ValueError(f"Failed to load well provider for {file_pattern}") + + LOGGER.info(f"Importing/writing well data for: {file_pattern}") + + timer.lap_s() + src_file_names = sorted( + [str(filename) for filename in Path(well_folder).glob(f"*{well_suffix}")] + ) + et_discover_s = timer.lap_s() + + ProviderImplFile.write_backing_store( + self._storage_dir, + storage_key, + well_file_names=src_file_names, + md_logname=md_logname, + ) + et_write_s = timer.lap_s() + + provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key) + if not provider: + raise ValueError(f"Failed to load/create well provider for {file_pattern}") + + LOGGER.info( + f"Saved well provider to backing store in {timer.elapsed_s():.2f}s (" + f"discover={et_discover_s:.2f}s, write={et_write_s:.2f}s, file_pattern={file_pattern})" + ) + + return provider + + +def _make_hash_string(string_to_hash: str) -> str: + # There is no security risk here and chances of collision should be very slim + return hashlib.md5(string_to_hash.encode()).hexdigest() # nosec diff --git a/webviz_subsurface/_providers/well_provider/well_server.py b/webviz_subsurface/_providers/well_provider/well_server.py new file mode 100644 index 0000000000..7063855cb9 --- /dev/null +++ b/webviz_subsurface/_providers/well_provider/well_server.py @@ -0,0 +1,115 @@ +import logging +from typing import Dict, List, Optional +from urllib.parse import quote + +import flask +import geojson +from dash import Dash + +from webviz_subsurface._providers.well_provider.well_provider import WellProvider +from webviz_subsurface._utils.perf_timer import PerfTimer + +LOGGER = logging.getLogger(__name__) + +_ROOT_URL_PATH = "/WellServer" + +_WELL_SERVER_INSTANCE: Optional["WellServer"] = None + + +class WellServer: + def __init__(self, app: Dash) -> None: + self._setup_url_rule(app) + self._id_to_provider_dict: Dict[str, WellProvider] = {} + + @staticmethod + def instance(app: Dash) -> "WellServer": + # pylint: disable=global-statement + global _WELL_SERVER_INSTANCE + if not _WELL_SERVER_INSTANCE: + LOGGER.debug("Initializing SurfaceServer instance") + _WELL_SERVER_INSTANCE = WellServer(app) + + return _WELL_SERVER_INSTANCE + + def add_provider(self, provider: WellProvider) -> None: + + provider_id = provider.provider_id() + LOGGER.debug(f"Adding provider with id={provider_id}") + + existing_provider = self._id_to_provider_dict.get(provider_id) + if existing_provider: + # Issue a warning if there already is a provider registered with the same + # id AND if the actual provider instance is different. + # This should not be a problem, but will happen until the provider factory + # gets caching. + if existing_provider is not provider: + LOGGER.warning( + f"Provider with id={provider_id} ignored, the id is already present" + ) + return + + self._id_to_provider_dict[provider_id] = provider + + def encode_partial_url( + self, + provider_id: str, + well_names: List[str], + ) -> str: + + if not provider_id in self._id_to_provider_dict: + raise ValueError("Could not find provider") + + sorted_well_names_str = "~".join(sorted(well_names)) + + url_path: str = ( + f"{_ROOT_URL_PATH}/{quote(provider_id)}/{quote(sorted_well_names_str)}" + ) + + return url_path + + def _setup_url_rule(self, app: Dash) -> None: + @app.server.route(_ROOT_URL_PATH + "//") + def _handle_wells_request( + provider_id: str, well_names_str: str + ) -> flask.Response: + LOGGER.debug( + f"Handling well request: " + f"provider_id={provider_id} " + f"well_names_str={well_names_str} " + ) + + timer = PerfTimer() + + try: + provider = self._id_to_provider_dict[provider_id] + well_names_arr = well_names_str.split("~") + # pylint: disable=bare-except + except: + LOGGER.error("Error decoding wells address") + flask.abort(404) + + validate_geometry = True + feature_arr = [] + for wname in well_names_arr: + well_path = provider.get_well_path(wname) + + coords = list(zip(well_path.x_arr, well_path.y_arr, well_path.z_arr)) + # coords = coords[0::20] + point = geojson.Point( + coordinates=[coords[0][0], coords[0][1]], validate=validate_geometry + ) + + geocoll = geojson.GeometryCollection(geometries=[point]) + + feature = geojson.Feature( + id=wname, geometry=geocoll, properties={"name": wname} + ) + feature_arr.append(feature) + + featurecoll = geojson.FeatureCollection(features=feature_arr) + response = flask.Response( + geojson.dumps(featurecoll), mimetype="application/geo+json" + ) + + LOGGER.debug(f"Request handled in: {timer.elapsed_s():.2f}s") + return response diff --git a/webviz_subsurface/_utils/webvizstore_functions.py b/webviz_subsurface/_utils/webvizstore_functions.py index 00207a2f9a..c7e2f341f4 100644 --- a/webviz_subsurface/_utils/webvizstore_functions.py +++ b/webviz_subsurface/_utils/webvizstore_functions.py @@ -2,6 +2,7 @@ import json from pathlib import Path +import pandas as pd from webviz_config.webviz_store import webvizstore @@ -10,6 +11,11 @@ def get_path(path: str) -> Path: return Path(path) +@webvizstore +def read_csv(csv_file: Path) -> pd.DataFrame: + return pd.read_csv(csv_file) + + @webvizstore def find_files(folder: Path, suffix: str) -> io.BytesIO: return io.BytesIO( diff --git a/webviz_subsurface/plugins/__init__.py b/webviz_subsurface/plugins/__init__.py index d59f061c34..7912a08304 100644 --- a/webviz_subsurface/plugins/__init__.py +++ b/webviz_subsurface/plugins/__init__.py @@ -29,6 +29,7 @@ from ._inplace_volumes import InplaceVolumes from ._inplace_volumes_onebyone import InplaceVolumesOneByOne from ._line_plotter_fmu.line_plotter_fmu import LinePlotterFMU +from ._map_viewer_fmu import MapViewerFMU from ._morris_plot import MorrisPlot from ._parameter_analysis import ParameterAnalysis from ._parameter_correlation import ParameterCorrelation diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/__init__.py b/webviz_subsurface/plugins/_map_viewer_fmu/__init__.py new file mode 100644 index 0000000000..5207b4df22 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_fmu/__init__.py @@ -0,0 +1 @@ +from .map_viewer_fmu import MapViewerFMU diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/_tmp_well_pick_provider.py b/webviz_subsurface/plugins/_map_viewer_fmu/_tmp_well_pick_provider.py new file mode 100644 index 0000000000..0acc2da2d2 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_fmu/_tmp_well_pick_provider.py @@ -0,0 +1,80 @@ +from enum import Enum +from typing import Dict, List, Optional + +import geojson +import pandas as pd + + +class WellPickTableColumns(str, Enum): + X_UTME = "X_UTME" + Y_UTMN = "Y_UTMN" + Z_TVDSS = "Z_TVDSS" + MD = "MD" + WELL = "WELL" + HORIZON = "HORIZON" + + +class WellPickProvider: + def __init__( + self, + dframe: pd.DataFrame, + map_surface_names_to_well_pick_names: Optional[Dict[str, str]] = None, + ): + self.dframe = dframe.copy() + self.surface_name_mapper = ( + map_surface_names_to_well_pick_names + if map_surface_names_to_well_pick_names + else {} + ) + self._validate() + + def _validate(self) -> None: + for column in WellPickTableColumns: + if column not in self.dframe.columns: + raise KeyError(f"Well picks table is missing required column: {column}") + + def well_names(self) -> List[str]: + return list(self.dframe[WellPickTableColumns.WELL].unique()) + + def get_geojson( + self, + well_names: List[str], + surface_name: str, + attribute: str = WellPickTableColumns.WELL, + ) -> geojson.FeatureCollection: + dframe = self.dframe.loc[ + (self.dframe[WellPickTableColumns.WELL].isin(well_names)) + & ( + self.dframe[WellPickTableColumns.HORIZON] + == self.surface_name_mapper.get(surface_name, surface_name) + ) + ] + if dframe.empty: + return {} + validate_geometry = True + feature_arr = [] + for _, row in dframe.iterrows(): + + coords = [ + row[WellPickTableColumns.X_UTME], + row[WellPickTableColumns.Y_UTMN], + ] + + # coords = coords[0::20] + point = geojson.Point(coordinates=coords, validate=validate_geometry) + + geocoll = geojson.GeometryCollection(geometries=[point]) + + properties = { + "name": row[WellPickTableColumns.WELL], + "attribute": str(row[attribute]), + } + + feature = geojson.Feature( + id=row[WellPickTableColumns.WELL], + geometry=geocoll, + properties=properties, + ) + feature_arr.append(feature) + featurecoll = geojson.FeatureCollection(features=feature_arr) + return featurecoll diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/_types.py b/webviz_subsurface/plugins/_map_viewer_fmu/_types.py new file mode 100644 index 0000000000..a7bcf7583b --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_fmu/_types.py @@ -0,0 +1,40 @@ +# pylint: skip-file +from dataclasses import dataclass +from enum import Enum +from typing import List, Optional + + +# To be implemented +@dataclass +class ViewSetting: + ensemble: str + attribute: str + name: str + date: Optional[str] + mode: str + realizations: List[int] + wells: List[str] + surface_range: List[float] + colormap: str + color_range: List[float] + colormap_keep_range: bool = False + surf_type: Optional[str] = None + + def __post_init__(self) -> None: + self.ensemble = self.ensemble[0] + self.attribute = self.attribute[0] + self.name = self.name[0] + self.date = self.date[0] if self.date else None + self.mode = SurfaceMode(self.mode) + self.colormap_keep_range = True if self.colormap_keep_range else False + + +class SurfaceMode(str, Enum): + MEAN = "Mean" + REALIZATION = "Single realization" + OBSERVED = "Observed" + STDDEV = "StdDev" + MINIMUM = "Minimum" + MAXIMUM = "Maximum" + P10 = "P10" + P90 = "P90" diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/callbacks.py b/webviz_subsurface/plugins/_map_viewer_fmu/callbacks.py new file mode 100644 index 0000000000..0037e642ea --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_fmu/callbacks.py @@ -0,0 +1,845 @@ +import json +import math +from copy import deepcopy +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import numpy as np +from dash import ALL, MATCH, Input, Output, State, callback, callback_context, no_update +from dash.exceptions import PreventUpdate +from webviz_config.utils._dash_component_utils import calculate_slider_step + +from webviz_subsurface._components.deckgl_map.deckgl_map_layers_model import ( + DeckGLMapLayersModel, +) +from webviz_subsurface._providers import ( + EnsembleFaultPolygonsProvider, + EnsembleSurfaceProvider, + FaultPolygonsServer, + ObservedSurfaceAddress, + QualifiedDiffSurfaceAddress, + QualifiedSurfaceAddress, + SimulatedFaultPolygonsAddress, + SimulatedSurfaceAddress, + StatisticalSurfaceAddress, + SurfaceAddress, + SurfaceServer, +) + +from ._tmp_well_pick_provider import WellPickProvider +from ._types import SurfaceMode +from .layout import ( + DefaultSettings, + LayoutElements, + LayoutLabels, + SideBySideSelectorFlex, + Tabs, + update_map_layers, +) + + +# pylint: disable=too-many-locals,too-many-statements +def plugin_callbacks( + get_uuid: Callable, + ensemble_surface_providers: Dict[str, EnsembleSurfaceProvider], + surface_server: SurfaceServer, + ensemble_fault_polygons_providers: Dict[str, EnsembleFaultPolygonsProvider], + fault_polygons_server: FaultPolygonsServer, + map_surface_names_to_fault_polygons: Dict[str, str], + well_picks_provider: Optional[WellPickProvider], + fault_polygon_attribute: Optional[str], +) -> None: + def selections(tab: str, colorselector: bool = False) -> Dict[str, str]: + uuid = get_uuid( + LayoutElements.SELECTIONS + if not colorselector + else LayoutElements.COLORSELECTIONS + ) + return {"view": ALL, "id": uuid, "tab": tab, "selector": ALL} + + def selector_wrapper(tab: str, colorselector: bool = False) -> Dict[str, str]: + uuid = get_uuid( + LayoutElements.WRAPPER if not colorselector else LayoutElements.COLORWRAPPER + ) + return {"id": uuid, "tab": tab, "selector": ALL} + + def links(tab: str, colorselector: bool = False) -> Dict[str, str]: + uuid = get_uuid( + LayoutElements.LINK if not colorselector else LayoutElements.COLORLINK + ) + return {"id": uuid, "tab": tab, "selector": ALL} + + @callback( + Output(get_uuid(LayoutElements.OPTIONS_DIALOG), "open"), + Input({"id": get_uuid("Button"), "tab": ALL}, "n_clicks"), + State(get_uuid(LayoutElements.OPTIONS_DIALOG), "open"), + ) + def open_close_options_dialog(_n_click: list, is_open: bool) -> bool: + if any(click is not None for click in _n_click): + return not is_open + raise PreventUpdate + + # 2nd callback + @callback( + Output({"id": get_uuid(LayoutElements.LINKED_VIEW_DATA), "tab": MATCH}, "data"), + Output(selector_wrapper(MATCH), "children"), + Input(selections(MATCH), "value"), + Input({"id": get_uuid(LayoutElements.VIEWS), "tab": MATCH}, "value"), + Input({"id": get_uuid(LayoutElements.MULTI), "tab": MATCH}, "value"), + Input(links(MATCH), "value"), + Input(get_uuid(LayoutElements.REALIZATIONS_FILTER), "value"), + Input(get_uuid("tabs"), "value"), + State(selections(MATCH), "id"), + State(selector_wrapper(MATCH), "id"), + ) + def _update_components_and_selected_data( + mapselector_values: List[Dict[str, Any]], + number_of_views: int, + multi_selector: str, + selectorlinks: List[List[str]], + filtered_reals: List[int], + tab_name: str, + selector_ids: List[dict], + wrapper_ids: List[dict], + ) -> Tuple[List[dict], list]: + """Reads stored raw selections, stores valid selections as a dcc.Store + and updates visible and valid selections in layout""" + + # Prevent update if the pattern matched components does not match the current tab + datatab = wrapper_ids[0]["tab"] + if datatab != tab_name or number_of_views is None: + raise PreventUpdate + + ctx = callback_context.triggered[0]["prop_id"] + + selector_values = [] + for idx in range(number_of_views): + view_selections = combine_selector_values_and_name( + mapselector_values, selector_ids, view=idx + ) + view_selections["multi"] = multi_selector + if tab_name == Tabs.STATS: + view_selections["mode"] = DefaultSettings.VIEW_LAYOUT_STATISTICS_TAB[ + idx + ] + selector_values.append(view_selections) + + linked_selector_names = [l[0] for l in selectorlinks if l] + + component_properties = _update_selector_component_properties_from_provider( + selector_values=selector_values, + linked_selectors=linked_selector_names, + multi=multi_selector, + multi_in_ctx=get_uuid(LayoutElements.MULTI) in ctx, + filtered_realizations=filtered_reals, + ) + # retrive the updated selector values from the component properties + selector_values = [ + {key: val["value"] for key, val in data.items()} + for idx, data in enumerate(component_properties) + ] + + if multi_selector is not None: + selector_values = update_selections_with_multi( + selector_values, multi_selector + ) + selector_values = remove_data_if_not_valid(selector_values) + if tab_name == Tabs.DIFF and len(selector_values) == 2: + selector_values = add_diff_surface_to_values(selector_values) + + return ( + selector_values, + [ + SideBySideSelectorFlex( + tab_name, + get_uuid, + selector=id_val["selector"], + view_data=[ + data[id_val["selector"]] for data in component_properties + ], + link=id_val["selector"] in linked_selector_names, + dropdown=id_val["selector"] in ["ensemble", "mode"], + ) + for id_val in wrapper_ids + ], + ) + + # 3rd callback + @callback( + Output( + {"id": get_uuid(LayoutElements.VERIFIED_VIEW_DATA), "tab": MATCH}, "data" + ), + Output(selector_wrapper(MATCH, colorselector=True), "children"), + Input({"id": get_uuid(LayoutElements.LINKED_VIEW_DATA), "tab": MATCH}, "data"), + Input(selections(MATCH, colorselector=True), "value"), + Input( + {"view": ALL, "id": get_uuid(LayoutElements.RANGE_RESET), "tab": MATCH}, + "n_clicks", + ), + Input(links(MATCH, colorselector=True), "value"), + State({"id": get_uuid(LayoutElements.MULTI), "tab": MATCH}, "value"), + State(selector_wrapper(MATCH, colorselector=True), "id"), + State(get_uuid(LayoutElements.STORED_COLOR_SETTINGS), "data"), + State(get_uuid("tabs"), "value"), + State(selections(MATCH, colorselector=True), "id"), + ) + def _update_color_components_and_value( + selector_values: List[dict], + colorvalues: List[Dict[str, Any]], + _n_click: int, + colorlinks: List[List[str]], + multi: str, + color_wrapper_ids: List[dict], + stored_color_settings: Dict, + tab: str, + colorval_ids: List[dict], + ) -> Tuple[List[dict], list]: + """Adds color settings to validated stored selections, updates color component in layout + and writes validated selectors with colors to a dcc.Store""" + ctx = callback_context.triggered[0]["prop_id"] + + if selector_values is None: + raise PreventUpdate + + reset_color_index = ( + json.loads(ctx.split(".")[0])["view"] + if get_uuid(LayoutElements.RANGE_RESET) in ctx + else None + ) + color_update_index = ( + json.loads(ctx.split(".")[0]).get("view") + if LayoutElements.COLORSELECTIONS in ctx + else None + ) + + # if a selector is set as multi in the "Maps per Selector" tab + # color_range should be linked and the min and max surface ranges + # from all the views should be used as range + use_range_from_all = tab == Tabs.SPLIT and multi != "attribute" + + links = [l[0] for l in colorlinks if l] + links = links if not use_range_from_all else links + ["color_range"] + + # update selector_values with current values from the color components + for idx, data in enumerate(selector_values): + data.update( + combine_selector_values_and_name(colorvalues, colorval_ids, view=idx) + ) + color_component_properties = _update_color_component_properties( + values=selector_values, + links=links if not use_range_from_all else links + ["color_range"], + stored_color_settings=stored_color_settings, + reset_color_index=reset_color_index, + color_update_index=color_update_index, + ) + + if use_range_from_all: + ranges = [data["surface_range"] for data in selector_values] + min_max_for_all = [min(r[0] for r in ranges), max(r[1] for r in ranges)] + for data in color_component_properties: + data["color_range"]["range"] = min_max_for_all + if reset_color_index is not None: + data["color_range"]["value"] = min_max_for_all + + for idx, data in enumerate(color_component_properties): + for key, val in data.items(): + selector_values[idx][key] = val["value"] + + return ( + selector_values, + [ + SideBySideSelectorFlex( + tab, + get_uuid, + selector=id_val["selector"], + view_data=[ + data[id_val["selector"]] for data in color_component_properties + ], + link=id_val["selector"] in links, + dropdown=id_val["selector"] in ["colormap"], + ) + for id_val in color_wrapper_ids + ], + ) + + # 4th callback + @callback( + Output(get_uuid(LayoutElements.STORED_COLOR_SETTINGS), "data"), + Input({"id": get_uuid(LayoutElements.VERIFIED_VIEW_DATA), "tab": ALL}, "data"), + State(get_uuid("tabs"), "value"), + State(get_uuid(LayoutElements.STORED_COLOR_SETTINGS), "data"), + State({"id": get_uuid(LayoutElements.VERIFIED_VIEW_DATA), "tab": ALL}, "id"), + ) + def _update_color_store( + selector_values: List[List[dict]], + tab: str, + stored_color_settings: Dict[str, dict], + data_id: List[dict], + ) -> Dict[str, dict]: + """Update the color store with chosen color range and colormap for surfaces""" + if selector_values is None: + raise PreventUpdate + index = [x["tab"] for x in data_id].index(tab) + + stored_color_settings = ( + stored_color_settings if stored_color_settings is not None else {} + ) + for data in selector_values[index]: + surfaceid = ( + get_surface_id_for_diff_surf(selector_values[index]) + if data.get("surf_type") == "diff" + else get_surface_id_from_data(data) + ) + stored_color_settings[surfaceid] = { + "colormap": data["colormap"], + "color_range": data["color_range"], + } + + return stored_color_settings + + # 5th callback + @callback( + Output({"id": get_uuid(LayoutElements.DECKGLMAP), "tab": MATCH}, "layers"), + Output({"id": get_uuid(LayoutElements.DECKGLMAP), "tab": MATCH}, "bounds"), + Output({"id": get_uuid(LayoutElements.DECKGLMAP), "tab": MATCH}, "views"), + Input( + {"id": get_uuid(LayoutElements.VERIFIED_VIEW_DATA), "tab": MATCH}, "data" + ), + Input(get_uuid(LayoutElements.WELLS), "value"), + Input(get_uuid(LayoutElements.VIEW_COLUMNS), "value"), + Input(get_uuid(LayoutElements.OPTIONS), "value"), + State(get_uuid("tabs"), "value"), + State({"id": get_uuid(LayoutElements.MULTI), "tab": MATCH}, "value"), + ) + def _update_map( + surface_elements: List[dict], + selected_wells: List[str], + view_columns: Optional[int], + options: List[str], + tab_name: str, + multi: str, + ) -> tuple: + """Updates the map component with the stored, validated selections""" + + # Prevent update if the pattern matched components does not match the current tab + state_list = callback_context.states_list + if state_list[1]["id"]["tab"] != tab_name or surface_elements is None: + raise PreventUpdate + + if tab_name == Tabs.DIFF: + view_columns = 3 if view_columns is None else view_columns + + layers = update_map_layers( + views=len(surface_elements), + include_well_layer=well_picks_provider is not None, + visible_well_layer=LayoutLabels.SHOW_WELLS in options, + visible_fault_polygons_layer=LayoutLabels.SHOW_FAULTPOLYGONS in options, + visible_hillshading_layer=LayoutLabels.SHOW_HILLSHADING in options, + ) + layer_model = DeckGLMapLayersModel(layers) + + for idx, data in enumerate(surface_elements): + diff_surf = data.get("surf_type") == "diff" + surf_meta, img_url = ( + get_surface_metadata_and_image(data) + if not diff_surf + else get_surface_metadata_and_image_for_diff_surface(surface_elements) + ) + viewport_bounds = [ + surf_meta.x_min, + surf_meta.y_min, + surf_meta.x_max, + surf_meta.y_max, + ] + + # Map3DLayer currently not implemented. Will replace + # ColormapLayer and HillshadingLayer + # layer_data = { + # "mesh": img_url, + # "propertyTexture": img_url, + # "bounds": surf_meta.deckgl_bounds, + # "rotDeg": surf_meta.deckgl_rot_deg, + # "meshValueRange": [surf_meta.val_min, surf_meta.val_max], + # "propertyValueRange": [surf_meta.val_min, surf_meta.val_max], + # "colorMapName": data["colormap"], + # "colorMapRange": data["color_range"], + # } + + # layer_model.update_layer_by_id( + # layer_id=f"{LayoutElements.MAP3D_LAYER}-{idx}", + # layer_data=layer_data, + # ) + layer_data = { + "image": img_url, + "bounds": surf_meta.deckgl_bounds, + "rotDeg": surf_meta.deckgl_rot_deg, + "valueRange": [surf_meta.val_min, surf_meta.val_max], + } + layer_model.update_layer_by_id( + layer_id=f"{LayoutElements.COLORMAP_LAYER}-{idx}", + layer_data=layer_data, + ) + + layer_model.update_layer_by_id( + layer_id=f"{LayoutElements.HILLSHADING_LAYER}-{idx}", + layer_data=layer_data, + ) + layer_model.update_layer_by_id( + layer_id=f"{LayoutElements.COLORMAP_LAYER}-{idx}", + layer_data={ + "colorMapName": data["colormap"], + "colorMapRange": data["color_range"], + }, + ) + if ( + LayoutLabels.SHOW_FAULTPOLYGONS in options + and fault_polygon_attribute is not None + ): + # if diff surface use polygons from first view + data_for_faultpolygons = data if not diff_surf else surface_elements[0] + fault_polygons_provider = ensemble_fault_polygons_providers[ + data_for_faultpolygons["ensemble"][0] + ] + horizon_name = data_for_faultpolygons["name"][0] + fault_polygons_address = SimulatedFaultPolygonsAddress( + attribute=fault_polygon_attribute, + name=map_surface_names_to_fault_polygons.get( + horizon_name, horizon_name + ), + realization=int(data_for_faultpolygons["realizations"][0]), + ) + layer_model.update_layer_by_id( + layer_id=f"{LayoutElements.FAULTPOLYGONS_LAYER}-{idx}", + layer_data={ + "data": fault_polygons_server.encode_partial_url( + provider_id=fault_polygons_provider.provider_id(), + fault_polygons_address=fault_polygons_address, + ), + }, + ) + if LayoutLabels.SHOW_WELLS in options and well_picks_provider is not None: + layer_model.update_layer_by_id( + layer_id=f"{LayoutElements.WELLS_LAYER}-{idx}", + layer_data={ + "data": well_picks_provider.get_geojson( + selected_wells, data["name"][0] + ) + }, + ) + return ( + layer_model.layers, + viewport_bounds if surface_elements else no_update, + { + "layout": view_layout(len(surface_elements), view_columns), + "showLabel": True, + "viewports": [ + { + "id": f"{view}_view", + "show3D": False, + "layerIds": [ + # f"{LayoutElements.MAP3D_LAYER}-{view}", + f"{LayoutElements.COLORMAP_LAYER}-{view}", + f"{LayoutElements.HILLSHADING_LAYER}-{view}", + f"{LayoutElements.FAULTPOLYGONS_LAYER}-{view}", + f"{LayoutElements.WELLS_LAYER}-{view}", + ], + "name": make_viewport_label( + surface_elements[view], tab_name, multi + ), + } + for view in range(len(surface_elements)) + ], + }, + ) + + def make_viewport_label(data: dict, tab: str, multi: Optional[str]) -> str: + """Return text-label for each viewport based on which tab is selected""" + # For the difference view + if tab == Tabs.DIFF and data.get("surf_type") == "diff": + return "Difference Map (View1 - View2)" + + # For the statistics view 'mode' is used as label + if tab == Tabs.STATS: + if data["mode"] == SurfaceMode.REALIZATION: + return f"REAL {data['realizations'][0]}" + return data["mode"] + + # For the "map per selector" the chosen multi selector is used as label + if tab == Tabs.SPLIT: + if multi == "realizations": + return f"REAL {data['realizations'][0]}" + return data[multi][0] if multi == "mode" else data[multi] + + return general_label(data) + + def general_label(data: dict) -> str: + """Create a general map label with all available information""" + surfaceid = [data["ensemble"][0], data["attribute"][0], data["name"][0]] + if data["date"]: + surfaceid.append(data["date"][0]) + if data["mode"] != SurfaceMode.REALIZATION: + surfaceid.append(data["mode"]) + else: + surfaceid.append(f"REAL {data['realizations'][0]}") + return " ".join(surfaceid) + + # pylint: disable=too-many-branches + def _update_selector_component_properties_from_provider( + selector_values: List[dict], + linked_selectors: List[str], + multi: str, + multi_in_ctx: bool, + filtered_realizations: List[int], + ) -> List[Dict[str, dict]]: + """Return updated options and values for the different selector components using + the provider. If current selected value for a selector is in the options it will + be used as the new value""" + view_data = [] + for idx, data in enumerate(selector_values): + + if not ("ensemble" in linked_selectors and idx > 0): + ensembles = list(ensemble_surface_providers.keys()) + ensemble = data.get("ensemble", []) + ensemble = [ensemble] if isinstance(ensemble, str) else ensemble + if not ensemble or multi_in_ctx: + ensemble = ensembles if multi == "ensemble" else ensembles[:1] + + if not ("attribute" in linked_selectors and idx > 0): + attributes = [] + for ens in ensemble: + provider = ensemble_surface_providers[ens] + attributes.extend( + [x for x in provider.attributes() if x not in attributes] + ) + # only show attributes with date when multi is set to date + if multi == "date": + attributes = [ + x for x in attributes if attribute_has_date(x, provider) + ] + + attribute = [x for x in data.get("attribute", []) if x in attributes] + if not attribute or multi_in_ctx: + attribute = attributes if multi == "attribute" else attributes[:1] + + if not ("name" in linked_selectors and idx > 0): + names = [] + for ens in ensemble: + provider = ensemble_surface_providers[ens] + for attr in attribute: + attr_names = provider.surface_names_for_attribute(attr) + names.extend([x for x in attr_names if x not in names]) + + name = [x for x in data.get("name", []) if x in names] + if not name or multi_in_ctx: + name = names if multi == "name" else names[:1] + + if not ("date" in linked_selectors and idx > 0): + dates = [] + for ens in ensemble: + provider = ensemble_surface_providers[ens] + for attr in attribute: + attr_dates = provider.surface_dates_for_attribute(attr) + + if attr_dates is not None: + dates.extend([x for x in attr_dates if x not in dates]) + + interval_dates = [x for x in dates if "_" in x] + dates = [x for x in dates if x not in interval_dates] + interval_dates + + date = [x for x in data.get("date", []) if x in dates] + if not date or multi_in_ctx: + date = dates if multi == "date" else dates[:1] + + if not ("mode" in linked_selectors and idx > 0): + modes = list(SurfaceMode) + mode = data.get("mode", SurfaceMode.REALIZATION) + + if not ("realizations" in linked_selectors and idx > 0): + if mode == SurfaceMode.REALIZATION: + real = data.get("realizations", []) + if not real or real[0] not in filtered_realizations: + real = filtered_realizations[:1] + else: + real = filtered_realizations + + view_data.append( + { + "ensemble": { + "value": ensemble, + "options": ensembles, + "multi": multi == "ensemble", + }, + "attribute": { + "value": attribute, + "options": attributes, + "multi": multi == "attribute", + }, + "name": {"value": name, "options": names, "multi": multi == "name"}, + "date": {"value": date, "options": dates, "multi": multi == "date"}, + "mode": {"value": mode, "options": modes}, + "realizations": { + "value": real, + "disabled": mode != SurfaceMode.REALIZATION, + "options": filtered_realizations, + "multi": mode != SurfaceMode.REALIZATION + or multi == "realizations", + }, + } + ) + + return view_data + + def _update_color_component_properties( + values: List[dict], + links: List[str], + stored_color_settings: dict, + reset_color_index: Optional[int], + color_update_index: Optional[int], + ) -> List[dict]: + """Return updated options and values for the different color selector components. + If previous color settings are found it will be used, or set value to min and max + surface range unless the user has updated the component through interaction.""" + stored_color_settings = ( + stored_color_settings if stored_color_settings is not None else {} + ) + colormaps = DefaultSettings.COLORMAP_OPTIONS + + surfids: List[str] = [] + color_data: List[dict] = [] + for idx, data in enumerate(values): + surfaceid = ( + get_surface_id_for_diff_surf(values) + if data.get("surf_type") == "diff" + else get_surface_id_from_data(data) + ) + # if surfaceid exist in another view use the color settings + # from that view and disable the color component + if surfaceid in surfids: + index_of_first = surfids.index(surfaceid) + surfids.append(surfaceid) + view_data = deepcopy(color_data[index_of_first]) + view_data["colormap"].update(disabled=True) + view_data["color_range"].update(disabled=True) + color_data.append(view_data) + continue + + surfids.append(surfaceid) + + use_stored_color = ( + surfaceid in stored_color_settings and not color_update_index == idx + ) + if not ("colormap" in links and idx > 0): + colormap = ( + stored_color_settings[surfaceid]["colormap"] + if use_stored_color + else data.get("colormap", colormaps[0]) + ) + + if not ("color_range" in links and idx > 0): + value_range = data["surface_range"] + if data.get("colormap_keep_range", False): + color_range = data["color_range"] + elif reset_color_index == idx or surfaceid not in stored_color_settings: + color_range = value_range + else: + color_range = ( + stored_color_settings[surfaceid]["color_range"] + if use_stored_color + else data.get("color_range", value_range) + ) + + color_data.append( + { + "colormap": {"value": colormap, "options": colormaps}, + "color_range": { + "value": color_range, + "step": calculate_slider_step( + min_value=value_range[0], + max_value=value_range[1], + steps=100, + ) + if value_range[0] != value_range[1] + else 0, + "range": value_range, + }, + } + ) + + return color_data + + def get_surface_address_from_data( + data: dict, + ) -> Union[ + SimulatedSurfaceAddress, ObservedSurfaceAddress, StatisticalSurfaceAddress + ]: + """Return the SurfaceAddress based on view selection""" + has_date = ( + ensemble_surface_providers[data["ensemble"][0]].surface_dates_for_attribute( + data["attribute"][0] + ) + is not None + ) + + if data["mode"] == SurfaceMode.REALIZATION: + return SimulatedSurfaceAddress( + attribute=data["attribute"][0], + name=data["name"][0], + datestr=data["date"][0] if has_date else None, + realization=int(data["realizations"][0]), + ) + if data["mode"] == SurfaceMode.OBSERVED: + return ObservedSurfaceAddress( + attribute=data["attribute"][0], + name=data["name"][0], + datestr=data["date"][0] if has_date else None, + ) + return StatisticalSurfaceAddress( + attribute=data["attribute"][0], + name=data["name"][0], + datestr=data["date"][0] if has_date else None, + realizations=[int(real) for real in data["realizations"]], + statistic=data["mode"], + ) + + def publish_and_get_surface_metadata( + surface_provider: EnsembleSurfaceProvider, surface_address: SurfaceAddress + ) -> Tuple: + provider_id: str = surface_provider.provider_id() + qualified_address = QualifiedSurfaceAddress(provider_id, surface_address) + surf_meta = surface_server.get_surface_metadata(qualified_address) + if not surf_meta: + # This means we need to compute the surface + surface = surface_provider.get_surface(address=surface_address) + if not surface: + raise ValueError( + f"Could not get surface for address: {surface_address}" + ) + surface_server.publish_surface(qualified_address, surface) + surf_meta = surface_server.get_surface_metadata(qualified_address) + return surf_meta, surface_server.encode_partial_url(qualified_address) + + def publish_and_get_diff_surface_metadata( + surface_provider: EnsembleSurfaceProvider, + surface_address: SurfaceAddress, + sub_surface_provider: EnsembleSurfaceProvider, + sub_surface_address: SurfaceAddress, + ) -> Tuple: + provider_id: str = surface_provider.provider_id() + subprovider_id = sub_surface_provider.provider_id() + qualified_address: Union[QualifiedSurfaceAddress, QualifiedDiffSurfaceAddress] + qualified_address = QualifiedDiffSurfaceAddress( + provider_id, surface_address, subprovider_id, sub_surface_address + ) + + surf_meta = surface_server.get_surface_metadata(qualified_address) + if not surf_meta: + surface_a = surface_provider.get_surface(address=surface_address) + surface_b = sub_surface_provider.get_surface(address=sub_surface_address) + if surface_a is not None and surface_b is not None: + surface = surface_a - surface_b + surface_server.publish_surface(qualified_address, surface) + surf_meta = surface_server.get_surface_metadata(qualified_address) + return surf_meta, surface_server.encode_partial_url(qualified_address) + + def get_surface_id_from_data(data: dict) -> str: + """Retrieve surfaceid used for the colorstore""" + surfaceid = data["attribute"][0] + data["name"][0] + if data["date"]: + surfaceid += data["date"][0] + if data["mode"] == SurfaceMode.STDDEV: + surfaceid += data["mode"] + return surfaceid + + def get_surface_id_for_diff_surf(values: List[dict]) -> str: + """Retrieve surfaceid used for the colorstore, for the diff surface + this needs to be a combination of the two surfaces subtracted""" + return get_surface_id_from_data(values[0]) + get_surface_id_from_data(values[1]) + + def update_selections_with_multi(values: List[dict], multi: str) -> List[dict]: + """If a selector has been set as multi, the values selected in that component needs + to be divided between the views so that there is only one unique value in each""" + multi_values = values[0][multi] + new_values = [] + for val in multi_values: + updated_values = deepcopy(values[0]) + updated_values[multi] = [val] + new_values.append(updated_values) + return new_values + + def attribute_has_date(attribute: str, provider: EnsembleSurfaceProvider) -> bool: + """Check if an attribute has any dates""" + return provider.surface_dates_for_attribute(attribute) is not None + + def remove_data_if_not_valid(values: List[dict]) -> List[dict]: + """Checks if surfaces can be provided from the selections. + Any invalid selections are removed.""" + updated_values = [] + for data in values: + surface_address = get_surface_address_from_data(data) + try: + provider = ensemble_surface_providers[data["ensemble"][0]] + surf_meta, _ = publish_and_get_surface_metadata( + surface_address=surface_address, + surface_provider=provider, + ) + except ValueError: + continue + if not isinstance( + surf_meta.val_min, np.ma.core.MaskedConstant + ) and not isinstance(surf_meta.val_max, np.ma.core.MaskedConstant): + data["surface_range"] = [surf_meta.val_min, surf_meta.val_max] + updated_values.append(data) + + return updated_values + + def get_surface_metadata_and_image(data: dict) -> Tuple: + surface_address = get_surface_address_from_data(data) + provider = ensemble_surface_providers[data["ensemble"][0]] + return publish_and_get_surface_metadata( + surface_address=surface_address, surface_provider=provider + ) + + def get_surface_metadata_and_image_for_diff_surface( + selector_values: List[dict], + ) -> Tuple: + surface_address = get_surface_address_from_data(selector_values[0]) + sub_surface_address = get_surface_address_from_data(selector_values[1]) + provider = ensemble_surface_providers[selector_values[0]["ensemble"][0]] + sub_provider = ensemble_surface_providers[selector_values[1]["ensemble"][0]] + return publish_and_get_diff_surface_metadata( + surface_address=surface_address, + surface_provider=provider, + sub_surface_address=sub_surface_address, + sub_surface_provider=sub_provider, + ) + + def add_diff_surface_to_values(selector_values: List[dict]) -> List[dict]: + surf_meta, _ = get_surface_metadata_and_image_for_diff_surface(selector_values) + selector_values.append( + { + "surface_range": [surf_meta.val_min, surf_meta.val_max], + "surf_type": "diff", + } + ) + return selector_values + + def combine_selector_values_and_name( + values: list, id_list: list, view: int + ) -> dict: + """Combine selector values with selector name for given view""" + return { + id_values["selector"]: values + for values, id_values in zip(values, id_list) + if id_values["view"] == view + } + + +def view_layout(views: int, columns: Optional[int] = None) -> List[int]: + """Function to set number of rows and columns for the map, if number + of columns is not specified, a square matrix layout is used""" + columns = ( + columns + if columns is not None + else min([x for x in range(20) if (x * x) >= views]) + ) + rows = math.ceil(views / columns) + return [rows, columns] diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/layout.py b/webviz_subsurface/plugins/_map_viewer_fmu/layout.py new file mode 100644 index 0000000000..e9226c8737 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_fmu/layout.py @@ -0,0 +1,670 @@ +import json +from enum import Enum, unique +from typing import Any, Callable, List, Union + +import pydeck +import webviz_core_components as wcc +from dash import dcc, html +from webviz_subsurface_components import DeckGLMap # type: ignore + +from webviz_subsurface._components.deckgl_map.types.deckgl_props import ( + ColormapLayer, + FaultPolygonsLayer, + Hillshading2DLayer, + WellsLayer, +) + +from ._types import SurfaceMode + + +@unique +class LayoutElements(str, Enum): + """Contains all ids used in plugin. Note that some id's are + used as combinations of LEFT/RIGHT_VIEW together with other elements to + support pattern matching callbacks.""" + + MULTI = "multiselection" + MAINVIEW = "main-view" + SELECTIONS = "input-selections-from-layout" + COLORSELECTIONS = "input-color-selections-from-layout" + STORED_COLOR_SETTINGS = "cached-color-selections" + VIEW_DATA = "stored-combined-raw-selections" + LINKED_VIEW_DATA = "stored-selections-after-linking-set" + VERIFIED_VIEW_DATA = "stored-verified-selections" + VERIFIED_VIEW_DATA_WITH_COLORS = "stored-verified-selections-with-colors" + + LINK = "link-checkbox" + COLORLINK = "color-link-checkbox" + WELLS = "wells-selector" + LOG = "log-selector" + VIEWS = "number-of-views-input" + VIEW_COLUMNS = "number-of-views-in-column-input" + DECKGLMAP = "deckgl-component" + RANGE_RESET = "color-range-reset-button" + RESET_BUTTOM_CLICK = "color-range-reset-stored-state" + FAULTPOLYGONS = "fault-polygon-toggle" + WRAPPER = "wrapper-for-selector-component" + COLORWRAPPER = "wrapper-for-color-selector-component" + OPTIONS = "options" + + COLORMAP_LAYER = "deckglcolormaplayer" + HILLSHADING_LAYER = "deckglhillshadinglayer" + WELLS_LAYER = "deckglwelllayer" + MAP3D_LAYER = "deckglmap3dlayer" + FAULTPOLYGONS_LAYER = "deckglfaultpolygonslayer" + REALIZATIONS_FILTER = "realization-filter-selector" + OPTIONS_DIALOG = "options-dialog" + + +class LayoutLabels(str, Enum): + """Text labels used in layout components""" + + ATTRIBUTE = "Surface attribute" + NAME = "Surface name / zone" + DATE = "Surface time interval" + ENSEMBLE = "Ensemble" + MODE = "Aggregation/Simulation/Observation" + REALIZATIONS = "Realization(s)" + WELLS = "Wells" + LOG = "Log" + COLORMAP_WRAPPER = "Surface coloring" + COLORMAP_SELECT = "Colormap" + COLORMAP_RANGE = "Value range" + RANGE_RESET = "Reset" + COLORMAP_KEEP_RANGE = "Lock range" + LINK = "🔗 Link" + FAULTPOLYGONS = "Fault polygons" + SHOW_FAULTPOLYGONS = "Show fault polygons" + SHOW_WELLS = "Show wells" + SHOW_HILLSHADING = "Hillshading" + COMMON_SELECTIONS = "Options and global filters" + REAL_FILTER = "Realization filter" + WELL_FILTER = "Well filter" + + +# pylint: disable=too-few-public-methods +class LayoutStyle: + """CSS styling""" + + MAPHEIGHT = "87vh" + SIDEBAR = {"flex": 1, "height": "90vh", "overflow-x": "auto"} + MAINVIEW = {"flex": 3, "height": "90vh"} + DISABLED = {"opacity": 0.5, "pointerEvents": "none"} + RESET_BUTTON = { + "marginTop": "5px", + "width": "100%", + "height": "20px", + "line-height": "20px", + "background-color": "#7393B3", + "color": "#fff", + } + OPTIONS_BUTTON = { + "marginBottom": "10px", + "width": "100%", + "height": "30px", + "line-height": "30px", + "background-color": "lightgrey", + } + + +class Tabs(str, Enum): + CUSTOM = "custom" + STATS = "stats" + DIFF = "diff" + SPLIT = "split" + + +class TabsLabels(str, Enum): + CUSTOM = "Custom view" + STATS = "Map statistics" + DIFF = "Difference between two maps" + SPLIT = "Maps per selector" + + +class MapSelector(str, Enum): + ENSEMBLE = "ensemble" + ATTRIBUTE = "attribute" + NAME = "name" + DATE = "date" + MODE = "mode" + REALIZATIONS = "realizations" + + +class ColorSelector(str, Enum): + COLORMAP = "colormap" + COLOR_RANGE = "color_range" + + +# pylint: disable=too-few-public-methods +class DefaultSettings: + + NUMBER_OF_VIEWS = {Tabs.STATS: 4, Tabs.DIFF: 2, Tabs.SPLIT: 1} + LINKED_SELECTORS = { + Tabs.STATS: [ + MapSelector.ENSEMBLE, + MapSelector.ATTRIBUTE, + MapSelector.NAME, + MapSelector.DATE, + ColorSelector.COLORMAP, + ], + Tabs.SPLIT: [ + MapSelector.ENSEMBLE, + MapSelector.ATTRIBUTE, + MapSelector.NAME, + MapSelector.DATE, + MapSelector.MODE, + MapSelector.REALIZATIONS, + ColorSelector.COLORMAP, + ], + } + VIEW_LAYOUT_STATISTICS_TAB = [ + SurfaceMode.MEAN, + SurfaceMode.REALIZATION, + SurfaceMode.STDDEV, + SurfaceMode.OBSERVED, + ] + COLORMAP_OPTIONS = [ + "Physics", + "Rainbow", + "Porosity", + "Permeability", + "Seismic BlueWhiteRed", + "Time/Depth", + "Stratigraphy", + "Facies", + "Gas-Oil-Water", + "Gas-Water", + "Oil-Water", + "Accent", + ] + + +class FullScreen(wcc.WebvizPluginPlaceholder): + def __init__(self, children: List[Any]) -> None: + super().__init__(buttons=["expand"], children=children) + + +def main_layout( + get_uuid: Callable, + well_names: List[str], + realizations: List[int], + show_fault_polygons: bool = True, +) -> html.Div: + return html.Div( + children=[ + wcc.Tabs( + id=get_uuid("tabs"), + style={"width": "100%"}, + value=Tabs.CUSTOM, + children=[ + wcc.Tab( + label=TabsLabels[tab.name], + value=tab, + children=wcc.FlexBox( + children=[ + wcc.Frame( + style=LayoutStyle.SIDEBAR, + children=[ + DataStores(tab, get_uuid), + OpenDialogButton(tab, get_uuid), + NumberOfViewsSelector(tab, get_uuid), + MultiSelectorSelector(tab, get_uuid), + html.Div( + [ + MapSelectorLayout( + tab=tab, + get_uuid=get_uuid, + selector=selector, + label=LayoutLabels[selector.name], + ) + for selector in MapSelector + ] + ), + SurfaceColorSelector(tab, get_uuid), + ], + ), + wcc.Frame( + id=get_uuid(LayoutElements.MAINVIEW), + style=LayoutStyle.MAINVIEW, + color="white", + highlight=False, + children=MapViewLayout(tab, get_uuid), + ), + ] + ), + ) + for tab in Tabs + ], + ), + DialogLayout(get_uuid, show_fault_polygons, well_names, realizations), + ] + ) + + +class OpenDialogButton(html.Button): + def __init__(self, tab: Tabs, get_uuid: Callable) -> None: + super().__init__( + children=LayoutLabels.COMMON_SELECTIONS, + id={"id": get_uuid("Button"), "tab": tab}, + style=LayoutStyle.OPTIONS_BUTTON, + ) + + +class MapViewLayout(FullScreen): + """Layout for the main view containing the map""" + + def __init__(self, tab: Tabs, get_uuid: Callable) -> None: + super().__init__( + children=html.Div( + DeckGLMap( + id={"id": get_uuid(LayoutElements.DECKGLMAP), "tab": tab}, + layers=update_map_layers(1), + zoom=-4, + ), + style={"height": LayoutStyle.MAPHEIGHT}, + ), + ) + + +class DataStores(html.Div): + """Layout for the options in the sidebar""" + + def __init__(self, tab: Tabs, get_uuid: Callable) -> None: + super().__init__( + children=[ + dcc.Store(id={"id": get_uuid(element), "tab": tab}) + for element in [ + LayoutElements.VERIFIED_VIEW_DATA_WITH_COLORS, + LayoutElements.VERIFIED_VIEW_DATA, + LayoutElements.LINKED_VIEW_DATA, + LayoutElements.VIEW_DATA, + ] + ] + + [dcc.Store(id=get_uuid(LayoutElements.STORED_COLOR_SETTINGS))] + ) + + +class DialogLayout(wcc.Dialog): + """Layout for the options and filters dialog""" + + def __init__( + self, + get_uuid: Callable, + show_fault_polygons: bool, + well_names: List[str], + realizations: List[int], + ) -> None: + + checklist_options = [LayoutLabels.SHOW_HILLSHADING] + if show_fault_polygons: + checklist_options.append(LayoutLabels.SHOW_FAULTPOLYGONS) + if well_names: + checklist_options.append(LayoutLabels.SHOW_WELLS) + + super().__init__( + title=LayoutLabels.COMMON_SELECTIONS, + id=get_uuid(LayoutElements.OPTIONS_DIALOG), + draggable=True, + open=False, + children=[ + ViewsInRowSelector(get_uuid), + wcc.Checklist( + id=get_uuid(LayoutElements.OPTIONS), + options=[{"label": opt, "value": opt} for opt in checklist_options], + value=checklist_options, + ), + wcc.FlexBox( + children=[ + html.Div( + style={ + "flex": 3, + "minWidth": "20px", + "display": "block" if well_names else "none", + }, + children=WellFilter(get_uuid, well_names), + ), + html.Div( + RealizationFilter(get_uuid, realizations), + style={"flex": 2, "minWidth": "20px"}, + ), + ], + style={"width": "20vw"}, + ), + ], + ) + + +class LinkCheckBox(wcc.Checklist): + def __init__(self, tab: Tabs, get_uuid: Callable, selector: str) -> None: + clicked = selector in DefaultSettings.LINKED_SELECTORS.get(tab, []) + super().__init__( + id={ + "id": get_uuid(LayoutElements.LINK) + if selector not in ["color_range", "colormap"] + else get_uuid(LayoutElements.COLORLINK), + "tab": tab, + "selector": selector, + }, + options=[{"label": LayoutLabels.LINK, "value": selector}], + value=[selector] if clicked else [], + style={"display": "none" if clicked else "block"}, + ) + + +class SideBySideSelectorFlex(wcc.FlexBox): + def __init__( + self, + tab: str, + get_uuid: Callable, + view_data: List[dict], + selector: str, + link: bool = False, + dropdown: bool = False, + ) -> None: + super().__init__( + style={"flex-wrap": "nowrap"}, + children=[ + html.Div( + style={ + "flex": 1, + "minWidth": "33%", + "display": "none" if link and idx != 0 else "block", + **(LayoutStyle.DISABLED if data.get("disabled", False) else {}), + }, + children=[ + dropdown_vs_select( + value=data["value"], + options=data["options"], + component_id={ + "view": idx, + "id": get_uuid(LayoutElements.COLORSELECTIONS) + if selector in ["colormap", "color_range"] + else get_uuid(LayoutElements.SELECTIONS), + "tab": tab, + "selector": selector, + }, + multi=data.get("multi", False), + dropdown=dropdown, + ) + if selector != "color_range" + else color_range_selection_layout( + tab, + get_uuid, + value=data["value"], + value_range=data["range"], + step=data["step"], + view_idx=idx, + ) + ], + ) + for idx, data in enumerate(view_data) + ], + ) + + +class MultiSelectorSelector(html.Div): + def __init__(self, tab: Tabs, get_uuid: Callable) -> None: + super().__init__( + style={ + "margin-bottom": "15px", + "display": "block" if tab == Tabs.SPLIT else "none", + }, + children=wcc.Dropdown( + label="Create map for each:", + id={"id": get_uuid(LayoutElements.MULTI), "tab": tab}, + options=[ + { + "label": LayoutLabels[selector.name], + "value": selector, + } + for selector in [ + MapSelector.NAME, + MapSelector.DATE, + MapSelector.ENSEMBLE, + MapSelector.ATTRIBUTE, + MapSelector.REALIZATIONS, + ] + ], + value=MapSelector.NAME if tab == Tabs.SPLIT else None, + clearable=False, + ), + ) + + +class NumberOfViewsSelector(html.Div): + def __init__(self, tab: Tabs, get_uuid: Callable) -> None: + super().__init__( + children=[ + "Number of views", + html.Div( + dcc.Input( + id={"id": get_uuid(LayoutElements.VIEWS), "tab": tab}, + type="number", + min=1, + max=9, + step=1, + value=DefaultSettings.NUMBER_OF_VIEWS.get(tab, 1), + ), + style={"float": "right"}, + ), + ], + style={ + "display": "none" if tab in DefaultSettings.NUMBER_OF_VIEWS else "block" + }, + ) + + +class ViewsInRowSelector(html.Div): + def __init__(self, get_uuid: Callable) -> None: + super().__init__( + children=[ + "Views in row (optional)", + html.Div( + dcc.Input( + id=get_uuid(LayoutElements.VIEW_COLUMNS), + type="number", + min=1, + max=9, + step=1, + ), + style={"float": "right"}, + ), + ] + ) + + +class RealizationFilter(wcc.SelectWithLabel): + def __init__(self, get_uuid: Callable, realizations: List[int]) -> None: + super().__init__( + label=LayoutLabels.REAL_FILTER, + id=get_uuid(LayoutElements.REALIZATIONS_FILTER), + options=[{"label": i, "value": i} for i in realizations], + value=realizations, + size=min(20, len(realizations)), + ) + + +class WellFilter(html.Div): + def __init__(self, get_uuid: Callable, well_names: List[str]) -> None: + super().__init__( + style={"display": "block" if well_names else "none"}, + children=wcc.SelectWithLabel( + label=LayoutLabels.WELL_FILTER, + id=get_uuid(LayoutElements.WELLS), + options=[{"label": i, "value": i} for i in well_names], + value=well_names, + size=min(20, len(well_names)), + ), + ) + + +class MapSelectorLayout(html.Div): + def __init__( + self, + tab: Tabs, + get_uuid: Callable, + selector: MapSelector, + label: str, + ) -> None: + super().__init__( + style={ + "display": "none" + if tab == Tabs.STATS and selector == MapSelector.MODE + else "block" + }, + children=wcc.Selectors( + label=label, + children=[ + LinkCheckBox(tab, get_uuid, selector=selector), + html.Div( + id={ + "id": get_uuid(LayoutElements.WRAPPER), + "tab": tab, + "selector": selector, + } + ), + ], + ), + ) + + +class SurfaceColorSelector(wcc.Selectors): + def __init__(self, tab: Tabs, get_uuid: Callable) -> None: + super().__init__( + label=LayoutLabels.COLORMAP_WRAPPER, + open_details=False, + children=[ + html.Div( + style={"margin-bottom": "10px"}, + children=[ + LinkCheckBox(tab, get_uuid, selector), + html.Div( + id={ + "id": get_uuid(LayoutElements.COLORWRAPPER), + "tab": tab, + "selector": selector, + } + ), + ], + ) + for selector in ColorSelector + ], + ) + + +def color_range_selection_layout( + tab: str, + get_uuid: Callable, + value: List[float], + value_range: List[float], + step: float, + view_idx: int, +) -> html.Div: + return html.Div( + children=[ + f"{LayoutLabels.COLORMAP_RANGE}", + wcc.RangeSlider( + id={ + "view": view_idx, + "id": get_uuid(LayoutElements.COLORSELECTIONS), + "selector": ColorSelector.COLOR_RANGE, + "tab": tab, + }, + tooltip={"placement": "bottomLeft"}, + min=value_range[0], + max=value_range[1], + step=step, + marks={str(value): {"label": f"{value:.2f}"} for value in value_range}, + value=value, + ), + wcc.Checklist( + id={ + "view": view_idx, + "id": get_uuid(LayoutElements.COLORSELECTIONS), + "selector": "colormap_keep_range", + "tab": tab, + }, + options=[ + { + "label": LayoutLabels.COLORMAP_KEEP_RANGE, + "value": LayoutLabels.COLORMAP_KEEP_RANGE, + } + ], + value=[], + ), + html.Button( + children=LayoutLabels.RANGE_RESET, + style=LayoutStyle.RESET_BUTTON, + id={ + "view": view_idx, + "id": get_uuid(LayoutElements.RANGE_RESET), + "tab": tab, + }, + ), + ] + ) + + +def dropdown_vs_select( + value: Union[List[str], str], + options: List[str], + component_id: dict, + dropdown: bool = False, + multi: bool = False, +) -> Union[wcc.Dropdown, wcc.SelectWithLabel]: + if dropdown: + if isinstance(value, list) and not multi: + value = value[0] + return wcc.Dropdown( + id=component_id, + options=[{"label": opt, "value": opt} for opt in options], + value=value, + clearable=False, + multi=multi, + ) + return wcc.SelectWithLabel( + id=component_id, + options=[{"label": opt, "value": opt} for opt in options], + size=5, + value=value, + multi=multi, + ) + + +def update_map_layers( + views: int, + include_well_layer: bool = True, + include_faultpolygon_layer: bool = True, + visible_well_layer: bool = True, + visible_fault_polygons_layer: bool = True, + visible_hillshading_layer: bool = True, +) -> List[dict]: + layers: List[pydeck.Layer] = [] + for idx in range(views): + layers.extend( + [ + # Map3DLayer(uuid=f"{LayoutElements.MAP3D_LAYER}-{idx}"), + ColormapLayer(uuid=f"{LayoutElements.COLORMAP_LAYER}-{idx}"), + Hillshading2DLayer( + uuid=f"{LayoutElements.HILLSHADING_LAYER}-{idx}", + visible=visible_hillshading_layer, + ), + ] + ) + + if include_faultpolygon_layer: + layers.append( + FaultPolygonsLayer( + uuid=f"{LayoutElements.FAULTPOLYGONS_LAYER}-{idx}", + visible=visible_fault_polygons_layer, + ) + ) + if include_well_layer: + layers.append( + WellsLayer( + uuid=f"{LayoutElements.WELLS_LAYER}-{idx}", + visible=visible_well_layer, + ) + ) + + return [json.loads(layer.to_json()) for layer in layers] diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py b/webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py new file mode 100644 index 0000000000..94c605b281 --- /dev/null +++ b/webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py @@ -0,0 +1,170 @@ +from pathlib import Path +from typing import Callable, Dict, List, Optional, Tuple + +from dash import Dash, html +from webviz_config import WebvizPluginABC, WebvizSettings + +from webviz_subsurface._providers import ( + EnsembleFaultPolygonsProviderFactory, + EnsembleSurfaceProviderFactory, +) +from webviz_subsurface._providers.ensemble_fault_polygons_provider.fault_polygons_server import ( + FaultPolygonsServer, +) +from webviz_subsurface._providers.ensemble_surface_provider.surface_server import ( + SurfaceServer, +) +from webviz_subsurface._utils.webvizstore_functions import read_csv + +from ._tmp_well_pick_provider import WellPickProvider +from .callbacks import plugin_callbacks +from .layout import main_layout + + +class MapViewerFMU(WebvizPluginABC): + """Surface visualizer for FMU ensembles. +A dashboard to covisualize arbitrary surfaces generated by FMU. + +--- + +* **`ensembles`:** Which ensembles in `shared_settings` to visualize. +* **`attributes`:** List of surface attributes to include, if not given + all surface attributes will be included. +* **`well_pick_file`:** A csv file with well picks. See data input. +* **`fault_polygon_attribute`:** Which set of fault polygons to use. +* **`map_surface_names_to_well_pick_names`:** Allows mapping of file surface names + to the relevant well pick name +* **`map_surface_names_to_fault_polygons`:** Allows mapping of file surface names + to the relevant fault polygon set name + +--- +The available maps are gathered from the `share/results/maps/` folder +for each realization. Subfolders are not supported. + +Observed maps are gathered from the `share/observations/maps/` folder in the case folder. +The filenames need to follow a fairly strict convention, as the filenames are used as metadata: +`horizon_name--attribute--date` (`--date` is optional). The files should be on `irap binary` +format with the suffix `.gri`. The date is of the form `YYYYMMDD` or +`YYYYMMDD_YYYYMMDD`, the latter would be for a delta surface between two dates. + +See [this folder]\ +(https://github.com/equinor/webviz-subsurface-testdata/tree/master/01_drogon_ahm/\ +realization-0/iter-0/share/results/maps) \ +for examples of file naming conventions. + +Fault polygons are gathered from the `share/results/polygons` folder for each realization. +Same file naming convention as for surfaces must be folloewed and the suffix should be `.pol`, +representing XYZ format usable by xtgeo. + +Well picks are provided as a csv file with columns `X_UTME,Y_UTMN,Z_TVDSS,MD,WELL,HORIZON`. +See [wellpicks.csv](https://github.com/equinor/webviz-subsurface-testdata/tree/master/\ + observed_data/drogon_well_picks/wellpicks.csv) for an example. +Well picks can be exported from RMS using this script: [extract_well_picks_from_rms.py]\ + (https://github.com/equinor/webviz-subsurface-testdata/tree/master/observed_data/\ + drogon_well_picks/extract_well_picks_from_rms.py) +""" + + # pylint: disable=too-many-arguments + def __init__( + self, + app: Dash, + webviz_settings: WebvizSettings, + ensembles: list, + attributes: list = None, + well_pick_file: Path = None, + fault_polygon_attribute: Optional[str] = None, + map_surface_names_to_fault_polygons: Dict[str, str] = None, + map_surface_names_to_well_pick_names: Dict[str, str] = None, + ): + + super().__init__() + + surface_provider_factory = EnsembleSurfaceProviderFactory.instance() + fault_polygons_provider_factory = ( + EnsembleFaultPolygonsProviderFactory.instance() + ) + + self._ensemble_surface_providers = { + ens: surface_provider_factory.create_from_ensemble_surface_files( + webviz_settings.shared_settings["scratch_ensembles"][ens], + attribute_filter=attributes, + ) + for ens in ensembles + } + self._surface_server = SurfaceServer.instance(app) + + self.well_pick_provider = None + self.well_pick_file = well_pick_file + if self.well_pick_file is not None: + well_pick_table = read_csv(self.well_pick_file) + self.well_pick_provider = WellPickProvider( + dframe=well_pick_table, + map_surface_names_to_well_pick_names=map_surface_names_to_well_pick_names, + ) + self.well_pick_provider.get_geojson( + self.well_pick_provider.well_names(), "TopVolantis" + ) + + self._ensemble_fault_polygons_providers = { + ens: fault_polygons_provider_factory.create_from_ensemble_fault_polygons_files( + webviz_settings.shared_settings["scratch_ensembles"][ens] + ) + for ens in ensembles + } + all_fault_polygon_attributes = self._ensemble_fault_polygons_providers[ + ensembles[0] + ].attributes() + self.fault_polygon_attribute: Optional[str] = None + if ( + fault_polygon_attribute is not None + and fault_polygon_attribute in all_fault_polygon_attributes + ): + self.fault_polygon_attribute = fault_polygon_attribute + elif all_fault_polygon_attributes: + self.fault_polygon_attribute = all_fault_polygon_attributes[0] + else: + self.fault_polygon_attribute = None + + self._fault_polygons_server = FaultPolygonsServer.instance(app) + for fault_polygons_provider in self._ensemble_fault_polygons_providers.values(): + self._fault_polygons_server.add_provider(fault_polygons_provider) + + self.map_surface_names_to_fault_polygons = ( + map_surface_names_to_fault_polygons + if map_surface_names_to_fault_polygons is not None + else {} + ) + + self.set_callbacks() + + @property + def layout(self) -> html.Div: + reals = [] + for provider in self._ensemble_surface_providers.values(): + reals.extend([x for x in provider.realizations() if x not in reals]) + return main_layout( + get_uuid=self.uuid, + well_names=self.well_pick_provider.well_names() + if self.well_pick_provider is not None + else [], + realizations=reals, + ) + + def set_callbacks(self) -> None: + + plugin_callbacks( + get_uuid=self.uuid, + ensemble_surface_providers=self._ensemble_surface_providers, + surface_server=self._surface_server, + ensemble_fault_polygons_providers=self._ensemble_fault_polygons_providers, + fault_polygon_attribute=self.fault_polygon_attribute, + fault_polygons_server=self._fault_polygons_server, + map_surface_names_to_fault_polygons=self.map_surface_names_to_fault_polygons, + well_picks_provider=self.well_pick_provider, + ) + + def add_webvizstore(self) -> List[Tuple[Callable, list]]: + store_functions = [] + if self.well_pick_file is not None: + store_functions.append((read_csv, [{"csv_file": self.well_pick_file}])) + return store_functions