diff --git a/.github/workflows/subsurface.yml b/.github/workflows/subsurface.yml index cd1e60c9e..01f478696 100644 --- a/.github/workflows/subsurface.yml +++ b/.github/workflows/subsurface.yml @@ -57,7 +57,6 @@ jobs: # Testing against our latest release (including pre-releases) pip install --pre --upgrade webviz-config webviz-core-components webviz-subsurface-components - - name: 📦 Install test dependencies run: | pip install .[tests] @@ -101,7 +100,7 @@ jobs: git clone --depth 1 --branch $TESTDATA_REPO_BRANCH https://github.com/$TESTDATA_REPO_OWNER/webviz-subsurface-testdata.git # Copy any clientside script to the test folder before running tests mkdir ./tests/assets && cp ./webviz_subsurface/_assets/js/* ./tests/assets - pytest ./tests --headless --forked --testdata-folder ./webviz-subsurface-testdata + pytest ./tests --headless --forked -s --testdata-folder ./webviz-subsurface-testdata rm -rf ./tests/assets - name: 🐳 Build Docker example image diff --git a/setup.py b/setup.py index 9688ea0b4..980d66393 100644 --- a/setup.py +++ b/setup.py @@ -107,9 +107,8 @@ "statsmodels>=0.12.1", # indirect dependency through https://plotly.com/python/linear-fits/ "xtgeo>=2.20.0", "vtk>=9.2.2", - "webviz-config", - "webviz-core-components>=0.6", - "webviz-subsurface-components==1.0.2", + "webviz-config>=0.6.5", + "webviz-subsurface-components>=1.0.3", ], extras_require={"tests": TESTS_REQUIRE}, setup_requires=["setuptools_scm~=3.2"], diff --git a/tests/integration_tests/plugin_tests/__init__.py b/tests/integration_tests/plugin_tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/integration_tests/plugin_tests/test_bhp_qc.py b/tests/integration_tests/plugin_tests/test_bhp_qc.py deleted file mode 100644 index 5840f0626..000000000 --- a/tests/integration_tests/plugin_tests/test_bhp_qc.py +++ /dev/null @@ -1,12 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import BhpQc -from webviz_config.testing import WebvizComposite - - -def test_bhp_qc(_webviz_duo: WebvizComposite, shared_settings: dict) -> None: - plugin = BhpQc( - shared_settings["HM_SETTINGS"], ensembles=shared_settings["HM_ENSEMBLES"] - ) - - _webviz_duo.start_server(plugin) - assert not _webviz_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_history_match.py b/tests/integration_tests/plugin_tests/test_history_match.py deleted file mode 100644 index 52b3be711..000000000 --- a/tests/integration_tests/plugin_tests/test_history_match.py +++ /dev/null @@ -1,18 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import HistoryMatch - - -def test_history_match(dash_duo, app, testdata_folder, shared_settings) -> None: - plugin = HistoryMatch( - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - observation_file=testdata_folder - / "01_drogon_ahm" - / "share" - / "observations" - / "tables" - / "ert_observations.yml", - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_line_plotter_fmu.py b/tests/integration_tests/plugin_tests/test_line_plotter_fmu.py deleted file mode 100644 index a5c9ba697..000000000 --- a/tests/integration_tests/plugin_tests/test_line_plotter_fmu.py +++ /dev/null @@ -1,29 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import LinePlotterFMU - - -def test_line_plotter_fmu(dash_duo, app, testdata_folder, shared_settings) -> None: - plugin = LinePlotterFMU( - app, - shared_settings["HM_SETTINGS"], - aggregated_csvfile=testdata_folder - / "reek_test_data" - / "aggregated_data" - / "smry_hm.csv", - aggregated_parameterfile=testdata_folder - / "reek_test_data" - / "aggregated_data" - / "parameters_hm.csv", - observation_file=testdata_folder / "reek_test_data" / "observations.yml", - observation_group="smry", - remap_observation_values={"DATE": "date"}, - initial_data={ - "x": "DATE", - "y": "FOPR", - "ensembles": ["iter-0", "iter-3"], - "colors": {"iter-0": "red", "iter-3": "blue"}, - }, - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_parameter_analysis.py b/tests/integration_tests/plugin_tests/test_parameter_analysis.py deleted file mode 100644 index db20565c5..000000000 --- a/tests/integration_tests/plugin_tests/test_parameter_analysis.py +++ /dev/null @@ -1,26 +0,0 @@ -import warnings - -# pylint: disable=no-name-in-module -from webviz_config.plugins import ParameterAnalysis -from webviz_config.testing import WebvizComposite - - -def test_parameter_analysis( - _webviz_duo: WebvizComposite, shared_settings: dict -) -> None: - plugin = ParameterAnalysis( - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - column_keys=["WWCT:*"], - time_index="monthly", - drop_constants=True, - ) - _webviz_duo.start_server(plugin) - - logs = [] - for log in _webviz_duo.get_logs() or []: - if "dash_renderer" in log.get("message"): - warnings.warn(log.get("message")) - else: - logs.append(log) - assert not logs diff --git a/tests/integration_tests/plugin_tests/test_parameter_correlation.py b/tests/integration_tests/plugin_tests/test_parameter_correlation.py deleted file mode 100644 index 4b75d2b69..000000000 --- a/tests/integration_tests/plugin_tests/test_parameter_correlation.py +++ /dev/null @@ -1,25 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import ParameterCorrelation -from webviz_config.testing import WebvizComposite - - -def test_parameter_correlation(_webviz_duo: WebvizComposite, shared_settings) -> None: - parameter_correlation = ParameterCorrelation( - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - ) - - _webviz_duo.start_server(parameter_correlation) - - _webviz_duo.toggle_webviz_settings_drawer() - _webviz_duo.toggle_webviz_settings_group( - parameter_correlation.view("paracorr").settings_group_unique_id("settings") - ) - # Using str literals directly, not IDs from the plugin as intended because - # the run test did not accept the imports - - my_component_id = _webviz_duo.view_settings_group_unique_component_id( - "paracorr", "settings", "shared-ensemble" - ) - _webviz_duo.wait_for_contains_text(my_component_id, "iter-0") - assert not _webviz_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_parameter_distribution.py b/tests/integration_tests/plugin_tests/test_parameter_distribution.py deleted file mode 100644 index d653d6fff..000000000 --- a/tests/integration_tests/plugin_tests/test_parameter_distribution.py +++ /dev/null @@ -1,13 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import ParameterDistribution - - -def test_parameter_distribution(dash_duo, app, shared_settings) -> None: - plugin = ParameterDistribution( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_parameter_parallel_coordinates.py b/tests/integration_tests/plugin_tests/test_parameter_parallel_coordinates.py deleted file mode 100644 index f0597122d..000000000 --- a/tests/integration_tests/plugin_tests/test_parameter_parallel_coordinates.py +++ /dev/null @@ -1,13 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import ParameterParallelCoordinates - - -def test_parameter_parallel_coordinates(dash_duo, app, shared_settings) -> None: - plugin = ParameterParallelCoordinates( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_parameter_response_correlation.py b/tests/integration_tests/plugin_tests/test_parameter_response_correlation.py deleted file mode 100644 index 97d8d61b5..000000000 --- a/tests/integration_tests/plugin_tests/test_parameter_response_correlation.py +++ /dev/null @@ -1,15 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import ParameterResponseCorrelation - - -def test_parameter_response_correlation(dash_duo, app, shared_settings) -> None: - plugin = ParameterResponseCorrelation( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - response_file="share/results/volumes/geogrid--vol.csv", - response_filters={"ZONE": "multi", "REGION": "multi"}, - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_property_statistics.py b/tests/integration_tests/plugin_tests/test_property_statistics.py deleted file mode 100644 index 4fc314ade..000000000 --- a/tests/integration_tests/plugin_tests/test_property_statistics.py +++ /dev/null @@ -1,22 +0,0 @@ -import warnings - -# pylint: disable=no-name-in-module -from webviz_config.plugins import PropertyStatistics - - -def test_property_statistics(dash_duo, app, shared_settings) -> None: - plugin = PropertyStatistics( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - statistics_file="share/results/tables/grid_property_statistics_geogrid.csv", - ) - app.layout = plugin.layout - dash_duo.start_server(app) - logs = [] - for log in dash_duo.get_logs() or []: - if "dash_renderer" in log.get("message"): - warnings.warn(log.get("message")) - else: - logs.append(log) - assert not logs diff --git a/tests/integration_tests/plugin_tests/test_pvt_plot.py b/tests/integration_tests/plugin_tests/test_pvt_plot.py deleted file mode 100644 index 29a73b5ed..000000000 --- a/tests/integration_tests/plugin_tests/test_pvt_plot.py +++ /dev/null @@ -1,15 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import PvtPlot -from webviz_config.testing import WebvizComposite - - -def test_pvt_plot(_webviz_duo: WebvizComposite, shared_settings: dict) -> None: - plugin = PvtPlot( - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - pvt_relative_file_path="share/results/tables/pvt.csv", - ) - - _webviz_duo.start_server(plugin) - - assert not _webviz_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_relative_permeability.py b/tests/integration_tests/plugin_tests/test_relative_permeability.py deleted file mode 100644 index 2dddda5cb..000000000 --- a/tests/integration_tests/plugin_tests/test_relative_permeability.py +++ /dev/null @@ -1,14 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import RelativePermeability - - -def test_relative_permeability(dash_duo, app, shared_settings) -> None: - plugin = RelativePermeability( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - relpermfile="share/results/tables/relperm.csv", - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries.py b/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries.py deleted file mode 100644 index 3ff1481b2..000000000 --- a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries.py +++ /dev/null @@ -1,30 +0,0 @@ -import warnings - -# pylint: disable=no-name-in-module -from webviz_config.plugins import ReservoirSimulationTimeSeries - - -def test_reservoir_simulation_timeseries( - dash_duo, app, shared_settings, testdata_folder -) -> None: - plugin = ReservoirSimulationTimeSeries( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - obsfile=testdata_folder - / "01_drogon_ahm" - / "share" - / "observations" - / "tables" - / "ert_observations.yml", - ) - app.layout = plugin.layout - dash_duo.start_server(app) - - logs = [] - for log in dash_duo.get_logs() or []: - if "dash_renderer" in log.get("message"): - warnings.warn(log.get("message")) - else: - logs.append(log) - assert not logs diff --git a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_onebyone.py b/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_onebyone.py deleted file mode 100644 index 698a205f7..000000000 --- a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_onebyone.py +++ /dev/null @@ -1,16 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import ReservoirSimulationTimeSeriesOneByOne - - -def test_reservoir_simulation_timeseries_onebyone( - dash_duo, app, shared_settings -) -> None: - plugin = ReservoirSimulationTimeSeriesOneByOne( - app, - shared_settings["SENS_SETTINGS"], - ensembles=shared_settings["SENS_ENSEMBLES"], - initial_vector="FOPT", - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_regional.py b/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_regional.py deleted file mode 100644 index f4d740fda..000000000 --- a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_regional.py +++ /dev/null @@ -1,23 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import ReservoirSimulationTimeSeriesRegional - - -def test_reservoir_simulation_timeseries_regional( - dash_duo, app, shared_settings, testdata_folder -) -> None: - plugin = ReservoirSimulationTimeSeriesRegional( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - fipfile=testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "tables" - / "fip.yml", - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_rft_plotter.py b/tests/integration_tests/plugin_tests/test_rft_plotter.py deleted file mode 100644 index 6019ce698..000000000 --- a/tests/integration_tests/plugin_tests/test_rft_plotter.py +++ /dev/null @@ -1,43 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import RftPlotter -from webviz_config.testing import WebvizComposite - - -def test_rft_plotter( - _webviz_duo: WebvizComposite, shared_settings, testdata_folder -) -> None: - plugin = RftPlotter( - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - formations=testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "tables" - / "formations_res_only.csv", - faultlines=testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "polygons" - / "toptherys--gl_faultlines_extract_postprocess.csv", - ) - - _webviz_duo.start_server(plugin) - - _webviz_duo.toggle_webviz_settings_drawer() - _webviz_duo.toggle_webviz_settings_group( - plugin.view("map-view").settings_group_unique_id("map-settings") - ) - # Using str literals directly, not IDs from the plugin as intended because - # the run test did not accept the imports - - my_component_id = _webviz_duo.view_settings_group_unique_component_id( - "map-view", "map-settings", "map-ensemble" - ) - _webviz_duo.wait_for_contains_text(my_component_id, "iter-0") - assert not _webviz_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_segy_viewer.py b/tests/integration_tests/plugin_tests/test_segy_viewer.py deleted file mode 100644 index d867ac5e0..000000000 --- a/tests/integration_tests/plugin_tests/test_segy_viewer.py +++ /dev/null @@ -1,22 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import SegyViewer - - -def test_segy_viewer(dash_duo, app, shared_settings, testdata_folder) -> None: - plugin = SegyViewer( - app, - shared_settings["HM_SETTINGS"], - segyfiles=[ - testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "seismic" - / "seismic--amplitude_depth--20180701_20180101.segy" - ], - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_simulation_timeseries_onebyone.py b/tests/integration_tests/plugin_tests/test_simulation_timeseries_onebyone.py deleted file mode 100644 index 644f972da..000000000 --- a/tests/integration_tests/plugin_tests/test_simulation_timeseries_onebyone.py +++ /dev/null @@ -1,23 +0,0 @@ -import warnings - -# pylint: disable=no-name-in-module -from webviz_config.plugins import SimulationTimeSeriesOneByOne -from webviz_config.testing import WebvizComposite - - -def test_simulation_timeseries_onebyone( - _webviz_duo: WebvizComposite, shared_settings: dict -) -> None: - plugin = SimulationTimeSeriesOneByOne( - webviz_settings=shared_settings["SENS_SETTINGS"], - ensembles=shared_settings["SENS_ENSEMBLES"], - initial_vector="FOPT", - ) - _webviz_duo.start_server(plugin) - logs = [] - for log in _webviz_duo.get_logs() or []: - if "dash_renderer" in log.get("message"): - warnings.warn(log.get("message")) - else: - logs.append(log) - assert not logs diff --git a/tests/integration_tests/plugin_tests/test_structural_uncertainty.py b/tests/integration_tests/plugin_tests/test_structural_uncertainty.py deleted file mode 100644 index 0b10f5546..000000000 --- a/tests/integration_tests/plugin_tests/test_structural_uncertainty.py +++ /dev/null @@ -1,244 +0,0 @@ -import json - -from dash import Input, Output, State, html -from webviz_config import WebvizSettings - -# pylint: disable=no-name-in-module -from webviz_config.plugins import StructuralUncertainty -from webviz_config.themes import default_theme - -# pylint: enable=no-name-in-module - - -def stringify_object_id(uuid) -> str: - """Object ids must be sorted and converted to - css strings to be recognized as dom elements""" - sorted_uuid_obj = json.loads( - json.dumps( - uuid, - sort_keys=True, - separators=(",", ":"), - ) - ) - string = ["{"] - for idx, (key, value) in enumerate(sorted_uuid_obj.items()): - string.append(f'\\"{key}\\"\\:\\"{value}\\"\\') - if idx == len(sorted_uuid_obj) - 1: - string.append("}") - else: - string.append(",") - return ("").join(string) - - -# pylint: disable=too-many-locals -def test_default_configuration(dash_duo, app, testdata_folder) -> None: - webviz_settings = WebvizSettings( - shared_settings={ - "scratch_ensembles": { - "iter-0": str(testdata_folder / "01_drogon_ahm/realization-*/iter-0") - } - }, - theme=default_theme, - ) - plugin = StructuralUncertainty( - app, - webviz_settings, - ensembles=["iter-0"], - surface_attributes=["ds_extract_postprocess"], - surface_name_filter=[ - "topvolon", - "toptherys", - "topvolantis", - "basevolantis", - ], - wellsuffix=".rmswell", - wellfolder=testdata_folder / "observed_data" / "wells", - ) - - app.layout = plugin.layout - dash_duo.start_server(app) - - intersection_data_id = plugin.uuid("intersection-data") - dialog_id = plugin.uuid("dialog") - # Check some initialization - # Check dropdowns - for element, return_val in zip( - ["well", "surface_attribute"], ["55_33-1", "ds_extract_postprocess"] - ): - uuid = stringify_object_id( - uuid={"element": element, "id": intersection_data_id} - ) - assert dash_duo.wait_for_element(f"#\\{uuid} .Select-value").text == return_val - - # Check Selects - for element, return_val in zip( - ["surface_names"], - [["topvolon", "toptherys", "topvolantis", "basevolantis"]], - ): - uuid = stringify_object_id( - uuid={"element": element, "id": intersection_data_id} - ) - assert ( - dash_duo.wait_for_element(f"#\\{uuid} select").text.splitlines() - == return_val - ) - - # Check Calculation checkbox - uuid = stringify_object_id( - uuid={"element": "calculation", "id": intersection_data_id} - ) - calculation_element = dash_duo.driver.find_elements_by_css_selector( - f"#\\{uuid} > label > input" - ) - assert len(calculation_element) == len( - ["Min", "Max", "Mean", "Realizations", "Uncertainty envelope"] - ) - for checkbox, selected in zip( - calculation_element, - ["true", "true", "true", None, None], - ): - assert checkbox.get_attribute("selected") == selected - - # Check realizations - real_filter_btn_uuid = stringify_object_id( - { - "id": dialog_id, - "dialog_id": "realization-filter", - "element": "button-open", - } - ) - real_uuid = stringify_object_id( - uuid={"element": "realizations", "id": intersection_data_id} - ) - - ### Open realization filter and check realizations - dash_duo.wait_for_element_by_id(real_filter_btn_uuid).click() - real_selector = dash_duo.wait_for_element_by_id(real_uuid) - assert real_selector.text.splitlines() == ["0", "1"] - - assert not dash_duo.get_logs(), "browser console should contain no error" - - -def test_full_configuration(dash_duo, app, testdata_folder) -> None: - webviz_settings = WebvizSettings( - shared_settings={ - "scratch_ensembles": { - "iter-0": str(testdata_folder / "01_drogon_ahm/realization-*/iter-0"), - } - }, - theme=default_theme, - ) - plugin = StructuralUncertainty( - app, - webviz_settings, - ensembles=["iter-0"], - surface_attributes=["ds_extract_postprocess"], - surface_name_filter=["topvolon", "toptherys", "topvolantis", "basevolantis"], - wellfolder=testdata_folder / "observed_data" / "wells", - wellsuffix=".rmswell", - zonelog="Zone", - initial_settings={ - "intersection_data": { - "surface_names": ["topvolon", "toptherys", "topvolantis"], - "surface_attribute": "ds_extract_postprocess", - "ensembles": [ - "iter-0", - ], - "calculation": ["Mean", "Min", "Max"], - # - Uncertainty envelope - "well": "55_33-1", - "realizations": [0, 1], - "colors": { - "topvolon": {"iter-0": "#2C82C9"}, - "toptherys": { - "iter-0": "#512E34", - }, - "topvolantis": { - "iter-0": "#EEE657", - }, - }, - }, - "intersection_layout": { - "yaxis": { - "range": [1700, 1550], - "title": "True vertical depth [m]", - }, - "xaxis": {"title": "Lateral distance [m]"}, - }, - }, - ) - - app.layout = plugin.layout - - # Injecting a div that will be updated when the plot data stores are - # changed. Since the plot data are stored in LocalStorage and Selenium - # has no functionality to wait for LocalStorage to equal some value we - # instead populate this injected div with some data before we check the content - # of Localstorage. - @app.callback( - Output(plugin.uuid("layout"), "children"), - Input(plugin.uuid("intersection-graph-layout"), "data"), - State(plugin.uuid("layout"), "children"), - ) - def _add_or_update_div(data, children): - plot_is_updated = html.Div( - id=plugin.uuid("plot_is_updated"), children=data.get("title") - ) - if len(children) == 6: - children[5] = plot_is_updated - else: - children.append(plot_is_updated) - - return children - - dash_duo.start_server(app) - - intersection_data_id = plugin.uuid("intersection-data") - - # Check some initialization - # Check dropdowns - for element, return_val in zip( - ["well", "surface_attribute"], ["55_33-1", "ds_extract_postprocess"] - ): - uuid = stringify_object_id( - uuid={"element": element, "id": intersection_data_id} - ) - assert dash_duo.wait_for_text_to_equal(f"#\\{uuid} .Select-value", return_val) - - # Wait for the callbacks to execute - dash_duo.wait_for_text_to_equal( - f'#{plugin.uuid("plot_is_updated")}', - "Intersection along well: 55_33-1", - timeout=30, - ) - - # Check that graph data is stored - graph_data = dash_duo.get_session_storage(plugin.uuid("intersection-graph-data")) - assert len(graph_data) == 14 - graph_layout = dash_duo.get_session_storage( - plugin.uuid("intersection-graph-layout") - ) - assert isinstance(graph_layout, dict) - assert graph_layout.get("title") == "Intersection along well: 55_33-1" - - ### Change well and check graph - well_uuid = stringify_object_id( - uuid={"element": "well", "id": intersection_data_id} - ) - - apply_btn = dash_duo.wait_for_element_by_id( - plugin.uuid("apply-intersection-data-selections") - ) - well_dropdown = dash_duo.wait_for_element_by_id(well_uuid) - dash_duo.select_dcc_dropdown(well_dropdown, value="55_33-2") - apply_btn.click() - - # dash_duo.wait_for_text_to_equal( - # f'#{plugin.uuid("plot_is_updated")}', - # "Intersection along well: 55_33-1", - # timeout=100, - # ) - graph_layout = dash_duo.get_session_storage( - plugin.uuid("intersection-graph-layout") - ) - # assert graph_layout.get("title") == "Intersection along well: 55_33-2" diff --git a/tests/integration_tests/plugin_tests/test_surface_viewer_fmu.py b/tests/integration_tests/plugin_tests/test_surface_viewer_fmu.py deleted file mode 100644 index 1876fe95f..000000000 --- a/tests/integration_tests/plugin_tests/test_surface_viewer_fmu.py +++ /dev/null @@ -1,21 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import SurfaceViewerFMU - - -def test_surface_viewer_fmu(dash_duo, app, shared_settings, testdata_folder) -> None: - plugin = SurfaceViewerFMU( - app, - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - wellsuffix=".rmswell", - wellfolder=testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "wells", - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_surface_with_grid_crossection.py b/tests/integration_tests/plugin_tests/test_surface_with_grid_crossection.py deleted file mode 100644 index efe038bc3..000000000 --- a/tests/integration_tests/plugin_tests/test_surface_with_grid_crossection.py +++ /dev/null @@ -1,45 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import SurfaceWithGridCrossSection - - -def test_surface_with_grid_crosssection( - dash_duo, app, shared_settings, testdata_folder -) -> None: - plugin = SurfaceWithGridCrossSection( - app, - shared_settings["HM_SETTINGS"], - gridfile=( - testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "grids" - / "geogrid.roff" - ), - gridparameterfiles=[ - testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "grids" - / "geogrid--phit.roff" - ], - surfacefiles=[ - testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "maps" - / "topvolon--ds_extract_geogrid.gri" - ], - surfacenames=["Top Volon"], - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_surface_with_seismic_crossection.py b/tests/integration_tests/plugin_tests/test_surface_with_seismic_crossection.py deleted file mode 100644 index befd67368..000000000 --- a/tests/integration_tests/plugin_tests/test_surface_with_seismic_crossection.py +++ /dev/null @@ -1,35 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import SurfaceWithSeismicCrossSection - - -def test_surface_with_seismic_crosssection( - dash_duo, app, shared_settings, testdata_folder -) -> None: - plugin = SurfaceWithSeismicCrossSection( - app, - shared_settings["HM_SETTINGS"], - segyfiles=[ - testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "seismic" - / "seismic--amplitude_depth--20180701_20180101.segy" - ], - surfacefiles=[ - testdata_folder - / "01_drogon_ahm" - / "realization-0" - / "iter-0" - / "share" - / "results" - / "maps" - / "topvolon--ds_extract_geogrid.gri" - ], - surfacenames=["Top Volon"], - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_tornado_plotter_fmu.py b/tests/integration_tests/plugin_tests/test_tornado_plotter_fmu.py deleted file mode 100644 index d7c0ff84b..000000000 --- a/tests/integration_tests/plugin_tests/test_tornado_plotter_fmu.py +++ /dev/null @@ -1,14 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import TornadoPlotterFMU - - -def test_tornado_plotter_fmu(dash_duo, app, shared_settings) -> None: - plugin = TornadoPlotterFMU( - shared_settings["SENS_SETTINGS"], - ensemble=shared_settings["SENS_ENSEMBLES"][0], - csvfile="share/results/volumes/geogrid--vol.csv", - multi_value_selectors=["REGION", "ZONE"], - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_vfp_analysis.py b/tests/integration_tests/plugin_tests/test_vfp_analysis.py deleted file mode 100644 index b1f2377a8..000000000 --- a/tests/integration_tests/plugin_tests/test_vfp_analysis.py +++ /dev/null @@ -1,13 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import VfpAnalysis -from webviz_config.testing import WebvizComposite - - -def test_vfp_analysis(_webviz_duo: WebvizComposite, shared_settings: dict) -> None: - plugin = VfpAnalysis( - shared_settings["HM_SETTINGS"], vfp_file_pattern="tests/data/vfp.arrow" - ) - - _webviz_duo.start_server(plugin) - - assert not _webviz_duo.get_logs() diff --git a/tests/integration_tests/plugin_tests/test_volumetric_analysis.py b/tests/integration_tests/plugin_tests/test_volumetric_analysis.py deleted file mode 100644 index 6846bc6d6..000000000 --- a/tests/integration_tests/plugin_tests/test_volumetric_analysis.py +++ /dev/null @@ -1,38 +0,0 @@ -import warnings - -# pylint: disable=no-name-in-module -from webviz_config.plugins import VolumetricAnalysis - - -def test_volumetrics_no_sens(dash_duo, app, shared_settings) -> None: - plugin = VolumetricAnalysis( - shared_settings["HM_SETTINGS"], - ensembles=shared_settings["HM_ENSEMBLES"], - volfiles={"geogrid": "geogrid--vol.csv", "simgrid": "simgrid--vol.csv"}, - ) - app.layout = plugin.layout - dash_duo.start_server(app) - logs = [] - for log in dash_duo.get_logs() or []: - if "dash_renderer" in log.get("message"): - warnings.warn(log.get("message")) - else: - logs.append(log) - assert not logs - - -def test_volumetrics_sens(dash_duo, app, shared_settings) -> None: - plugin = VolumetricAnalysis( - shared_settings["SENS_SETTINGS"], - ensembles=shared_settings["SENS_ENSEMBLES"], - volfiles={"geogrid": "geogrid--vol.csv", "simgrid": "simgrid--vol.csv"}, - ) - app.layout = plugin.layout - dash_duo.start_server(app) - logs = [] - for log in dash_duo.get_logs() or []: - if "dash_renderer" in log.get("message"): - warnings.warn(log.get("message")) - else: - logs.append(log) - assert not logs diff --git a/tests/integration_tests/plugin_tests/test_well_log_viewer.py b/tests/integration_tests/plugin_tests/test_well_log_viewer.py deleted file mode 100644 index 74c510770..000000000 --- a/tests/integration_tests/plugin_tests/test_well_log_viewer.py +++ /dev/null @@ -1,16 +0,0 @@ -# pylint: disable=no-name-in-module -from webviz_config.plugins import WellLogViewer - - -def test_well_log_viewer(dash_duo, app, testdata_folder) -> None: - wellfolder = testdata_folder / "observed_data" / "wells/" - plugin = WellLogViewer( - app, - wellfolder=wellfolder, - wellsuffix=".rmswell", - mdlog="MDepth", - logtemplates=[f"{testdata_folder}/webviz_examples/all_logs_template.yml"], - ) - app.layout = plugin.layout - dash_duo.start_server(app) - assert not dash_duo.get_logs() diff --git a/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py b/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py index b723de6d3..1e9144580 100644 --- a/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py +++ b/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py @@ -75,7 +75,7 @@ def _create_vtk_esgrid_from_verts_and_conn( vtk_cell_array.SetData(8, conn_idarr) vtk_esgrid = vtkExplicitStructuredGrid() - vtk_esgrid.SetDimensions(point_dims.tolist()) + vtk_esgrid.SetDimensions(point_dims.tolist()) # type: ignore vtk_esgrid.SetPoints(vtk_points) vtk_esgrid.SetCells(vtk_cell_array) diff --git a/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py b/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py index 13b750649..a7cda5172 100644 --- a/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py +++ b/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py @@ -365,15 +365,15 @@ def cut_along_polyline( plane = vtkPlane() plane.SetOrigin([x_0, y_0, 0]) - plane.SetNormal(right_vec.tolist()) + plane.SetNormal(right_vec.tolist()) # type: ignore plane_0 = vtkPlane() plane_0.SetOrigin([x_0, y_0, 0]) - plane_0.SetNormal(fwd_vec.tolist()) + plane_0.SetNormal(fwd_vec.tolist()) # type: ignore plane_1 = vtkPlane() plane_1.SetOrigin([x_1, y_1, 0]) - plane_1.SetNormal((-fwd_vec).tolist()) + plane_1.SetNormal((-fwd_vec).tolist()) # type: ignore cutter_alg.SetPlane(plane) cutter_alg.Update() diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py index 377a671f3..2f4f74fce 100644 --- a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py +++ b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py @@ -313,7 +313,7 @@ def dates( f"find_unique={et_find_unique_ms}ms)" ) - return intersected_dates.astype(datetime.datetime).tolist() + return intersected_dates.astype(datetime.datetime).tolist() # type: ignore def get_vectors_df( self, @@ -377,7 +377,7 @@ def get_vectors_for_date_df( table = table.filter(real_mask) et_filter_ms = timer.lap_ms() - np_lookup_date = np.datetime64(date, "ms") + np_lookup_date = np.datetime64(date).astype("M8[ms]") table = sample_segmented_multi_real_table_at_date(table, np_lookup_date) et_resample_ms = timer.lap_ms() diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py index 1caf0211d..0177b673c 100644 --- a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py +++ b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py @@ -374,7 +374,7 @@ def dates( f"find_unique={et_find_unique_ms}ms)" ) - return intersected_dates.astype(datetime.datetime).tolist() + return intersected_dates.astype(datetime.datetime).tolist() # type: ignore def get_vectors_df( self, diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py b/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py index d06cbdf7f..364fa6a07 100644 --- a/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py +++ b/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py @@ -17,7 +17,7 @@ def _truncate_day_to_monday(datetime_day: np.datetime64) -> np.datetime64: def _quarter_start_month(datetime_day: np.datetime64) -> np.datetime64: # A bit hackish, utilizes the fact that datetime64 is relative to epoch # 1970-01-01 which is the first day in Q1. - datetime_month = np.datetime64(datetime_day, "M") + datetime_month = datetime_day.astype("M8[M]") return datetime_month - (datetime_month.astype(int) % 3) @@ -30,44 +30,52 @@ def generate_normalized_sample_dates( """ if freq == Frequency.DAILY: - start = np.datetime64(min_date, "D") - stop = np.datetime64(max_date, "D") + start = min_date.astype("M8[D]") + stop = max_date.astype("M8[D]") if stop < max_date: - stop += 1 - sampledates = np.arange(start, stop + 1) + stop += np.timedelta64(1, "D") + sampledates = np.arange(start, stop + np.timedelta64(1, "D")) + elif freq == Frequency.WEEKLY: - start = _truncate_day_to_monday(np.datetime64(min_date, "D")) - stop = _truncate_day_to_monday(np.datetime64(max_date, "D")) + start = _truncate_day_to_monday(min_date.astype("M8[D]")) + stop = _truncate_day_to_monday(max_date.astype("M8[D]")) if start > min_date: - start -= 7 + start -= np.timedelta64(7, "D") if stop < max_date: - stop += 7 - sampledates = np.arange(start, stop + 1, 7) + stop += np.timedelta64(7, "D") + sampledates = np.arange( + start, stop + np.timedelta64(1, "D"), np.timedelta64(7, "D") + ) + elif freq == Frequency.MONTHLY: - start = np.datetime64(min_date, "M") - stop = np.datetime64(max_date, "M") + start = min_date.astype("M8[M]") + stop = max_date.astype("M8[M]") if stop < max_date: - stop += 1 - sampledates = np.arange(start, stop + 1) + stop += np.timedelta64(1, "M") + sampledates = np.arange(start, stop + np.timedelta64(1, "M")) + elif freq == Frequency.QUARTERLY: start = _quarter_start_month(min_date) stop = _quarter_start_month(max_date) if stop < max_date: - stop += 3 - sampledates = np.arange(start, stop + 1, 3) + stop += np.timedelta64(3, "M") + sampledates = np.arange( + start, stop + np.timedelta64(1, "M"), np.timedelta64(3, "M") + ) + elif freq == Frequency.YEARLY: - start = np.datetime64(min_date, "Y") - stop = np.datetime64(max_date, "Y") + start = min_date.astype("M8[Y]") + stop = max_date.astype("M8[Y]") if stop < max_date: - stop += 1 - sampledates = np.arange(start, stop + 1) + stop += np.timedelta64(1, "Y") + sampledates = np.arange(start, stop + np.timedelta64(1, "Y")) + else: raise NotImplementedError( f"Currently not supporting resampling to frequency {freq}." ) - sampledates = sampledates.astype("datetime64[ms]") - + sampledates = sampledates.astype("M8[ms]") return sampledates diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/dev_resampling_perf_testing.py b/webviz_subsurface/_providers/ensemble_summary_provider/dev_resampling_perf_testing.py deleted file mode 100644 index cdb8b5b8c..000000000 --- a/webviz_subsurface/_providers/ensemble_summary_provider/dev_resampling_perf_testing.py +++ /dev/null @@ -1,112 +0,0 @@ -import logging -import time - -import numpy as np -import pyarrow as pa - -from webviz_subsurface._providers.ensemble_summary_provider._resampling import ( - sample_segmented_multi_real_table_at_date, -) - - -def _create_table( - num_reals: int, start_date: np.datetime64, end_date: np.datetime64, num_columns: int -) -> pa.Table: - date_arr_np = np.empty(0, np.datetime64) - real_arr_np = np.empty(0, np.int32) - - for real in range(0, num_reals): - dates_for_this_real = np.arange(start_date, end_date + 1) - dates_for_this_real = dates_for_this_real.astype("datetime64[ms]") - real_arr_np = np.concatenate( - (real_arr_np, np.full(len(dates_for_this_real), real)) - ) - date_arr_np = np.concatenate((date_arr_np, dates_for_this_real)) - - print( - f"real_arr_np (num unique={len(np.unique(real_arr_np))} len={len(real_arr_np)}):" - ) - print(real_arr_np) - print( - f"date_arr_np (num unique={len(np.unique(date_arr_np))} len={len(date_arr_np)}):" - ) - print(date_arr_np) - - field_list = [] - columndata_list = [] - field_list.append(pa.field("DATE", pa.timestamp("ms"))) - field_list.append(pa.field("REAL", pa.int64())) - columndata_list.append(pa.array(date_arr_np)) - columndata_list.append(pa.array(real_arr_np)) - - num_rows = len(real_arr_np) - - for colnum in range(0, num_columns): - if (colnum % 2) == 0: - metadata = {b"is_rate": b'{"is_rate": False}'} - else: - metadata = {b"is_rate": b'{"is_rate": True}'} - - field_list.append(pa.field(f"c_{colnum}", pa.float32(), metadata=metadata)) - - valarr = np.linspace(colnum, colnum + num_rows, num_rows) - columndata_list.append(pa.array(valarr)) - - schema = pa.schema(field_list) - return pa.table(columndata_list, schema=schema) - - -def main() -> None: - print() - print("## Running resampling performance tests") - print("## =================================================") - - logging.basicConfig( - level=logging.WARNING, - format="%(asctime)s %(levelname)-3s [%(name)s]: %(message)s", - ) - logging.getLogger("webviz_subsurface").setLevel(level=logging.INFO) - logging.getLogger("webviz_subsurface").setLevel(level=logging.DEBUG) - - # table = _create_table( - # num_reals=3, - # start_date=np.datetime64("2020-12-30"), - # end_date=np.datetime64("2021-01-05"), - # num_columns=4, - # ) - - table = _create_table( - num_reals=100, - start_date=np.datetime64("2000-01-01", "M"), - end_date=np.datetime64("2099-12-31", "M"), - num_columns=10000, - ) - - print("## table shape (rows,columns):", table.shape) - # print(table.to_pandas()) - - start_tim = time.perf_counter() - - res = sample_segmented_multi_real_table_at_date( - table, np.datetime64("2098-01-03", "ms") - ) - - # res = sample_segmented_multi_real_table_at_date( - # table, np.datetime64("2098-01-01", "ms") - # ) - - elapsed_time_ms = int(1000 * (time.perf_counter() - start_tim)) - - # print(res) - # print(res.to_pandas()) - - print("## res shape:", res.shape) - - print(f"## sample at date took: {elapsed_time_ms}ms") - - -# Running: -# python -m webviz_subsurface._providers.ensemble_summary_provider.dev_resampling_perf_testing -# ------------------------------------------------------------------------- -if __name__ == "__main__": - main() diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py index eb90b8b49..9c0631faf 100644 --- a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py +++ b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py @@ -6,7 +6,7 @@ def surface_to_float32_array(surface: xtgeo.RegularSurface) -> io.BytesIO: values = surface.values.astype(np.float32) - values.fill_value = np.NaN + values.fill_value = np.nan values = np.ma.filled(values) # Rotate 90 deg left. diff --git a/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py b/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py index b29a6c1fc..214d29c6b 100644 --- a/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py +++ b/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py @@ -20,7 +20,7 @@ def filter_df(df: pd.DataFrame, ensemble: str, wells: List[str]) -> pd.DataFrame in statistics. """ columns = ["ENSEMBLE"] + [f"WBHP:{well}" for well in wells] - return df.loc[df["ENSEMBLE"] == ensemble][columns].replace(0, np.NaN) + return df.loc[df["ENSEMBLE"] == ensemble][columns].replace(0, np.nan) def calc_statistics(df: pd.DataFrame) -> pd.DataFrame: diff --git a/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py b/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py index 7fa00d276..fb9e8cadd 100644 --- a/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py +++ b/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py @@ -105,4 +105,4 @@ def _find_contours( def _simplify(poly: np.ndarray, simplify_dist: float) -> List[List[float]]: simplified = shapely.geometry.LineString(poly).simplify(simplify_dist) - return np.array(simplified.coords).tolist() + return np.array(simplified.coords).tolist() # type: ignore diff --git a/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py b/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py index 8f1b53977..c730dd9a3 100644 --- a/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py +++ b/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py @@ -6,7 +6,7 @@ from dash.development.base_component import Component from webviz_config.utils import StrEnum from webviz_config.webviz_plugin_subclasses import ViewABC, ViewElementABC -from webviz_subsurface_components import DashSubsurfaceViewer +from webviz_subsurface_components import SubsurfaceViewer class MainView(ViewABC): @@ -48,7 +48,7 @@ def inner_layout(self) -> Component: children=[ html.Div( [ - DashSubsurfaceViewer( + SubsurfaceViewer( id=self.register_component_unique_id( self.Ids.DECKGL_MAP ), diff --git a/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py b/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py index 4fecd762b..435fe3473 100644 --- a/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py +++ b/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py @@ -2,7 +2,7 @@ from dash.development.base_component import Component from webviz_config.utils import StrEnum from webviz_config.webviz_plugin_subclasses import ViewElementABC -from webviz_subsurface_components import DashSubsurfaceViewer +from webviz_subsurface_components import SubsurfaceViewer class VTKView3D(ViewElementABC): @@ -45,7 +45,7 @@ def inner_layout(self) -> Component: html.Div( style={"position": "absolute", "width": "100%", "height": "90%"}, children=[ - DashSubsurfaceViewer( + SubsurfaceViewer( id=self.register_component_unique_id(VTKView3D.Ids.VIEW), layers=[ { diff --git a/webviz_subsurface/plugins/_history_match.py b/webviz_subsurface/plugins/_history_match.py index 43b5b2c34..8c1f9d5c2 100644 --- a/webviz_subsurface/plugins/_history_match.py +++ b/webviz_subsurface/plugins/_history_match.py @@ -155,7 +155,7 @@ def _get_sorted_edges(number_observation_groups: int) -> Dict[str, list]: np.random.chisquare(df=1, size=number_observation_groups) ) - sorted_values = np.flip(sorted_values, 0) + sorted_values = np.flip(sorted_values, 0) # type: ignore p10 = np.percentile(sorted_values, 90, axis=1) p90 = np.percentile(sorted_values, 10, axis=1) diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/layout.py b/webviz_subsurface/plugins/_map_viewer_fmu/layout.py index 1203324bf..ca90c6450 100644 --- a/webviz_subsurface/plugins/_map_viewer_fmu/layout.py +++ b/webviz_subsurface/plugins/_map_viewer_fmu/layout.py @@ -3,7 +3,7 @@ import webviz_core_components as wcc from dash import dcc, html -from webviz_subsurface_components import DashSubsurfaceViewer # type: ignore +from webviz_subsurface_components import SubsurfaceViewer # type: ignore from webviz_subsurface._utils.enum_shim import StrEnum @@ -271,7 +271,7 @@ def __init__( ) -> None: super().__init__( children=html.Div( - DashSubsurfaceViewer( + SubsurfaceViewer( id={"id": get_uuid(LayoutElements.DECKGLMAP), "tab": tab}, layers=update_map_layers(1, render_surfaces_as_images), colorTables=color_tables, diff --git a/webviz_subsurface/plugins/_running_time_analysis_fmu.py b/webviz_subsurface/plugins/_running_time_analysis_fmu.py index 644a225b5..06457a79b 100644 --- a/webviz_subsurface/plugins/_running_time_analysis_fmu.py +++ b/webviz_subsurface/plugins/_running_time_analysis_fmu.py @@ -557,9 +557,9 @@ def ensemble_post_processing() -> list: if len(set(range(min(reals), max(reals) + 1))) > len(set(reals)): missing_df = ens_dfs[0].copy() missing_df["STATUS"] = "Realization not started" - missing_df["RUNTIME"] = np.NaN - missing_df["JOB_SCALED_RUNTIME"] = np.NaN - missing_df["ENS_SCALED_RUNTIME"] = np.NaN + missing_df["RUNTIME"] = np.nan + missing_df["JOB_SCALED_RUNTIME"] = np.nan + missing_df["ENS_SCALED_RUNTIME"] = np.nan for missing_real in set(range(min(reals), max(reals) + 1)).difference( set(reals) ): diff --git a/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py b/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py index 619702d14..c45117959 100644 --- a/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py +++ b/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py @@ -171,7 +171,7 @@ def layout(self) -> html.Div: self.initial_well_name ) ), - colorTables=self.colortables, + colorMapFunctions=self.colortables, axisMnemos={"MD": ["MD"], "TVD": ["TVD"]}, ), )