From b62837b66c65002c161ae3a17be53ecf57ce7473 Mon Sep 17 00:00:00 2001 From: Charles Turner Date: Fri, 21 Feb 2025 11:12:47 +0800 Subject: [PATCH] Migrate alongslope-velocities to intake --- Recipes/Along-slope-velocities.ipynb | 5214 ++++++++++++++++++++++++-- 1 file changed, 4993 insertions(+), 221 deletions(-) diff --git a/Recipes/Along-slope-velocities.ipynb b/Recipes/Along-slope-velocities.ipynb index bf701001..74df7d88 100644 --- a/Recipes/Along-slope-velocities.ipynb +++ b/Recipes/Along-slope-velocities.ipynb @@ -23,13 +23,13 @@ "outputs": [], "source": [ "from pathlib import Path\n", - "import cosima_cookbook as cc\n", "from dask.distributed import Client\n", "import numpy as np\n", "import xarray as xr\n", "\n", "import xgcm\n", "import cf_xarray\n", + "import intake\n", "\n", "# For plotting\n", "import cartopy.crs as ccrs\n", @@ -74,7 +74,7 @@ "
\n", "
\n", "

Client

\n", - "

Client-4c6b5d42-dda9-11ef-b88e-00000189fe80

\n", + "

Client-4383b9ce-f001-11ef-8c66-000007d2fe80

\n", " \n", "\n", " \n", @@ -109,22 +109,22 @@ " \n", "
\n", "

LocalCluster

\n", - "

0fc81417

\n", + "

12668b1f

\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", @@ -146,14 +146,14 @@ "
\n", "
\n", "

Scheduler

\n", - "

Scheduler-60cbd07e-7d4f-4912-887a-85f679c5b1e2

\n", + "

Scheduler-487634c1-51a3-418d-93d5-85d813a93368

\n", "
\n", " Dashboard: /proxy/8787/status\n", " \n", - " Workers: 8\n", + " Workers: 7\n", "
\n", - " Total threads: 48\n", + " Total threads: 7\n", " \n", - " Total memory: 188.56 GiB\n", + " Total memory: 32.00 GiB\n", "
\n", " \n", " \n", " \n", " \n", " \n", @@ -161,7 +161,7 @@ " Dashboard:/proxy/8787/status\n", " \n", " \n", " \n", " \n", @@ -169,7 +169,7 @@ " Started: Just now\n", " \n", " \n", " \n", "
\n", - " Comm: tcp://127.0.0.1:45875\n", + " Comm: tcp://127.0.0.1:33165\n", " \n", - " Workers: 8\n", + " Workers: 7\n", "
\n", - " Total threads: 48\n", + " Total threads: 7\n", "
\n", - " Total memory: 188.56 GiB\n", + " Total memory: 32.00 GiB\n", "
\n", @@ -192,29 +192,29 @@ " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "\n", @@ -237,29 +237,29 @@ "
\n", - " Comm: tcp://127.0.0.1:40735\n", + " Comm: tcp://127.0.0.1:43695\n", " \n", - " Total threads: 6\n", + " Total threads: 1\n", "
\n", - " Dashboard: /proxy/42553/status\n", + " Dashboard: /proxy/34195/status\n", " \n", - " Memory: 23.57 GiB\n", + " Memory: 4.57 GiB\n", "
\n", - " Nanny: tcp://127.0.0.1:39029\n", + " Nanny: tcp://127.0.0.1:42257\n", "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-3s4y4i8m\n", + " Local directory: /jobfs/135697543.gadi-pbs/dask-scratch-space/worker-m5k6q9ki\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "\n", @@ -282,29 +282,29 @@ "
\n", - " Comm: tcp://127.0.0.1:37017\n", + " Comm: tcp://127.0.0.1:41769\n", " \n", - " Total threads: 6\n", + " Total threads: 1\n", "
\n", - " Dashboard: /proxy/43205/status\n", + " Dashboard: /proxy/42331/status\n", " \n", - " Memory: 23.57 GiB\n", + " Memory: 4.57 GiB\n", "
\n", - " Nanny: tcp://127.0.0.1:36313\n", + " Nanny: tcp://127.0.0.1:44455\n", "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-twn3vhma\n", + " Local directory: /jobfs/135697543.gadi-pbs/dask-scratch-space/worker-8jf7kjey\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "\n", @@ -327,29 +327,29 @@ "
\n", - " Comm: tcp://127.0.0.1:34721\n", + " Comm: tcp://127.0.0.1:33701\n", " \n", - " Total threads: 6\n", + " Total threads: 1\n", "
\n", - " Dashboard: /proxy/37827/status\n", + " Dashboard: /proxy/38505/status\n", " \n", - " Memory: 23.57 GiB\n", + " Memory: 4.57 GiB\n", "
\n", - " Nanny: tcp://127.0.0.1:39777\n", + " Nanny: tcp://127.0.0.1:33581\n", "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-1vvih5w0\n", + " Local directory: /jobfs/135697543.gadi-pbs/dask-scratch-space/worker-al6hevgm\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "\n", @@ -372,29 +372,29 @@ "
\n", - " Comm: tcp://127.0.0.1:40071\n", + " Comm: tcp://127.0.0.1:46349\n", " \n", - " Total threads: 6\n", + " Total threads: 1\n", "
\n", - " Dashboard: /proxy/38727/status\n", + " Dashboard: /proxy/42051/status\n", " \n", - " Memory: 23.57 GiB\n", + " Memory: 4.57 GiB\n", "
\n", - " Nanny: tcp://127.0.0.1:37211\n", + " Nanny: tcp://127.0.0.1:44285\n", "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-9ow6ahuw\n", + " Local directory: /jobfs/135697543.gadi-pbs/dask-scratch-space/worker-3jgw5juj\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "\n", @@ -417,29 +417,29 @@ "
\n", - " Comm: tcp://127.0.0.1:39403\n", + " Comm: tcp://127.0.0.1:35349\n", " \n", - " Total threads: 6\n", + " Total threads: 1\n", "
\n", - " Dashboard: /proxy/35805/status\n", + " Dashboard: /proxy/34031/status\n", " \n", - " Memory: 23.57 GiB\n", + " Memory: 4.57 GiB\n", "
\n", - " Nanny: tcp://127.0.0.1:46039\n", + " Nanny: tcp://127.0.0.1:43835\n", "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-el4ypv5m\n", + " Local directory: /jobfs/135697543.gadi-pbs/dask-scratch-space/worker-a2m81dcm\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "\n", @@ -462,74 +462,29 @@ "
\n", - " Comm: tcp://127.0.0.1:38073\n", + " Comm: tcp://127.0.0.1:46191\n", " \n", - " Total threads: 6\n", + " Total threads: 1\n", "
\n", - " Dashboard: /proxy/46661/status\n", + " Dashboard: /proxy/33841/status\n", " \n", - " Memory: 23.57 GiB\n", + " Memory: 4.57 GiB\n", "
\n", - " Nanny: tcp://127.0.0.1:33041\n", + " Nanny: tcp://127.0.0.1:44437\n", "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-6gkj93b8\n", + " Local directory: /jobfs/135697543.gadi-pbs/dask-scratch-space/worker-jzr9ebfn\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", - " \n", - "\n", - " \n", - "\n", - " \n", - "\n", - "
\n", - " Comm: tcp://127.0.0.1:43189\n", + " Comm: tcp://127.0.0.1:34713\n", " \n", - " Total threads: 6\n", + " Total threads: 1\n", "
\n", - " Dashboard: /proxy/42655/status\n", + " Dashboard: /proxy/32935/status\n", " \n", - " Memory: 23.57 GiB\n", + " Memory: 4.57 GiB\n", "
\n", - " Nanny: tcp://127.0.0.1:33499\n", + " Nanny: tcp://127.0.0.1:34837\n", "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-2t64pdwa\n", - "
\n", - " \n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "

Worker: 7

\n", - "
\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", " \n", "\n", @@ -556,7 +511,7 @@ "" ], "text/plain": [ - "" + "" ] }, "execution_count": 3, @@ -580,24 +535,121 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Nominate a database from which to load the data and define an experiment" + "Load the ACCESS-NRI Intake Catalog & get the datastore for our experiment." ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "

01deg_jra55v13_ryf9091 catalog with 34 dataset(s) from 11947 asset(s):

\n", + "\n", + "
\n", - " Comm: tcp://127.0.0.1:40457\n", - " \n", - " Total threads: 6\n", - "
\n", - " Dashboard: /proxy/34907/status\n", - " \n", - " Memory: 23.57 GiB\n", - "
\n", - " Nanny: tcp://127.0.0.1:33145\n", - "
\n", - " Local directory: /jobfs/133084514.gadi-pbs/dask-scratch-space/worker-uses5491\n", + " Local directory: /jobfs/135697543.gadi-pbs/dask-scratch-space/worker-m5ojdehu\n", "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
unique
filename3469
file_id33
path11947
filename_timestamp3372
frequency5
start_date3361
end_date3360
variable205
variable_long_name197
variable_standard_name36
variable_cell_methods3
variable_units52
realm2
derived_variable0
\n", + "
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ - "session = cc.database.create_session()\n", - "experiment = '01deg_jra55v13_ryf9091'" + "catalog = intake.cat.access_nri\n", + "experiment = '01deg_jra55v13_ryf9091'\n", + "\n", + "# We call our datastore esm_ds\n", + "esm_ds = catalog[experiment]\n", + "esm_ds" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Limit to Southern Ocean and single RYF year" + "Limit to Southern Ocean and single RYF year by setting our search parameters" ] }, { @@ -609,7 +661,12 @@ "lat_slice = slice(-80, -59)\n", "\n", "start_time = '2086-01-01'\n", - "end_time = '2086-12-31'" + "end_time = '2086-12-31'\n", + "\n", + "# Since we are only interested in 2086, we can use a wildcard to specify our start date:\n", + "# See https://access-nri-intake-catalog.readthedocs.io/en/latest/usage/quickstart.html#data-discovery for more info\n", + "dates_2086 = '2086.*'\n", + "variable_name = 'hu'" ] }, { @@ -623,197 +680,4886 @@ "cell_type": "code", "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:280: ConcatenationWarning: Attempting to concatenate datasets without valid dimension coordinates: retaining only first dataset. Request valid dimension coordinate to silence this warning.\n", + " warnings.warn(\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset> Size: 7MB\n",
+       "Dimensions:   (yu_ocean: 484, xu_ocean: 3600)\n",
+       "Coordinates:\n",
+       "  * xu_ocean  (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n",
+       "  * yu_ocean  (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n",
+       "Data variables:\n",
+       "    hu        (yu_ocean, xu_ocean) float32 7MB dask.array<chunksize=(484, 900), meta=np.ndarray>\n",
+       "Attributes: (12/19)\n",
+       "    filename:                                 ocean_grid.nc\n",
+       "    title:                                    ACCESS-OM2-01\n",
+       "    grid_type:                                mosaic\n",
+       "    grid_tile:                                1\n",
+       "    intake_esm_vars:                          ['hu']\n",
+       "    intake_esm_attrs:filename:                ocean_grid.nc\n",
+       "    ...                                       ...\n",
+       "    intake_esm_attrs:variable_standard_name:  ,,,,,,,,,sea_floor_depth_below_...\n",
+       "    intake_esm_attrs:variable_cell_methods:   ,,,,,time: point,time: point,ti...\n",
+       "    intake_esm_attrs:variable_units:          degrees_E,degrees_N,days since ...\n",
+       "    intake_esm_attrs:realm:                   ocean\n",
+       "    intake_esm_attrs:_data_format_:           netcdf\n",
+       "    intake_esm_dataset_key:                   ocean_grid.fx
" + ], + "text/plain": [ + " Size: 7MB\n", + "Dimensions: (yu_ocean: 484, xu_ocean: 3600)\n", + "Coordinates:\n", + " * xu_ocean (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n", + " * yu_ocean (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n", + "Data variables:\n", + " hu (yu_ocean, xu_ocean) float32 7MB dask.array\n", + "Attributes: (12/19)\n", + " filename: ocean_grid.nc\n", + " title: ACCESS-OM2-01\n", + " grid_type: mosaic\n", + " grid_tile: 1\n", + " intake_esm_vars: ['hu']\n", + " intake_esm_attrs:filename: ocean_grid.nc\n", + " ... ...\n", + " intake_esm_attrs:variable_standard_name: ,,,,,,,,,sea_floor_depth_below_...\n", + " intake_esm_attrs:variable_cell_methods: ,,,,,time: point,time: point,ti...\n", + " intake_esm_attrs:variable_units: degrees_E,degrees_N,days since ...\n", + " intake_esm_attrs:realm: ocean\n", + " intake_esm_attrs:_data_format_: netcdf\n", + " intake_esm_dataset_key: ocean_grid.fx" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "hu = cc.querying.getvar(experiment, 'hu', session, n=1).drop(['geolat_c', 'geolon_c']).sel(yu_ocean=lat_slice).load()" + "hu_ds = esm_ds.search(variable=variable_name,start_date=dates_2086).to_dask()\n", + "\n", + "# Now we select our latitude slice\n", + "hu_ds = hu_ds.sel(yu_ocean=lat_slice).drop_vars(['geolat_c','geolon_c'])\n", + "\n", + "hu = hu_ds['hu']\n", + "hu_ds" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Load velocity data, limit to upper 500m and take the mean in time" + "Now we want to load velocity data, limit to upper 500m and take the mean in time" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "

01deg_jra55v13_ryf9091 catalog with 8 dataset(s) from 2070 asset(s):

\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
unique
filename29
file_id7
path2070
filename_timestamp12
frequency4
start_date1404
end_date1404
variable49
variable_long_name43
variable_standard_name11
variable_cell_methods2
variable_units20
realm1
derived_variable0
\n", + "
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ - "u = cc.querying.getvar(experiment, 'u', session, ncfile=\"ocean.nc\",\n", - " start_time=start_time, end_time=end_time,\n", - " chunks={}).sel(yu_ocean=lat_slice).sel(st_ocean=slice(0, 500)).mean('time')\n", + "depth_slice = slice(0,500) # Save our depth slice\n", "\n", - "v = cc.querying.getvar(experiment, 'v', session, ncfile=\"ocean.nc\",\n", - " start_time=start_time, end_time=end_time,\n", - " chunks={}).sel(yu_ocean=lat_slice).sel(st_ocean=slice(0, 500)).mean('time')" + "esm_ds.search(variable=['u','v'])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Load model grid information directly from a grid data file" + "We have a few frequencies - so lets have a look at what they are, and use that to limit ourselves to a single dataset" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, - "outputs": [], - "source": [ - "path_to_folder = Path('/g/data/ik11/outputs/access-om2-01/01deg_jra55v13_ryf9091/output000/ocean/')\n", - "grid = xr.open_mfdataset(path_to_folder / 'ocean_grid.nc', combine='by_coords').drop(['geolon_t', 'geolat_t', 'geolon_c', 'geolat_c'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Along-slope velocity\n", - "\n", - "We calculate the along-slope velocity component by projecting the velocity field to the tangent vector, $u_{along} = \\boldsymbol{u \\cdot \\hat{t}}$, and the cross-slope component by projecting to the normal vector, $v_{cross} = \\boldsymbol{u \\cdot \\hat{n}}$. The schematic below defines the unit normal normal and tangent vectors for a given bathymetric contour, $\\boldsymbol{n}$ and $\\boldsymbol{t}$ respectively. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array(['3mon', '1mon', '1day', '3hr'], dtype=object)" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "![Sketch of topographic gradient](images/topographic_gradient_sketch.png)" + "esm_ds.search(variable=['u','v']).df.frequency.unique()" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": 9, "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

01deg_jra55v13_ryf9091 catalog with 1 dataset(s) from 920 asset(s):

\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
unique
filename1
file_id1
path920
filename_timestamp0
frequency1
start_date920
end_date920
variable46
variable_long_name42
variable_standard_name11
variable_cell_methods2
variable_units20
realm1
derived_variable0
\n", + "
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ - "Accordingly, the code below calculates the along-slope velocity component as\n", - "\n", - "$$ u_{along} = (u,v) \\boldsymbol{\\cdot} \\left(\\frac{h_y}{|\\nabla h|} , -\\frac{h_x}{|\\nabla h|}\\right) = \n", - "u \\frac{h_y}{|\\nabla h|} - v \\frac{h_x}{|\\nabla h|}, $$ \n", - "\n", - "and similarly the cross-slope velocity component as\n", - "\n", - "$$ v_{cross} = (u,v) \\boldsymbol{\\cdot} \\left(\\frac{h_x}{|\\nabla h|} , \\frac{h_y}{|\\nabla h|}\\right) = \n", - "u \\frac{h_x}{|\\nabla h|} + v \\frac{h_y}{|\\nabla h|}.$$ \n" + "# Try only the 1 month frequency data:\n", + "esm_ds.search(variable=['u','v'],frequency='1mon')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We need the derivatives of the bathymetry which we compute using the `xgcm` functionality." + "Perfect! That gives us a single dataset, so let's load it, limit ourselves to the top 500m like we wanted, and take the mean in time." ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset> Size: 6GB\n",
+       "Dimensions:   (st_ocean: 75, yu_ocean: 2700, xu_ocean: 3600)\n",
+       "Coordinates:\n",
+       "  * st_ocean  (st_ocean) float64 600B 0.5413 1.681 2.94 ... 5.511e+03 5.709e+03\n",
+       "  * xu_ocean  (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n",
+       "  * yu_ocean  (yu_ocean) float64 22kB -81.09 -81.05 -81.0 ... 89.92 89.96 90.0\n",
+       "Data variables:\n",
+       "    u         (st_ocean, yu_ocean, xu_ocean) float32 3GB dask.array<chunksize=(7, 300, 400), meta=np.ndarray>\n",
+       "    v         (st_ocean, yu_ocean, xu_ocean) float32 3GB dask.array<chunksize=(7, 300, 400), meta=np.ndarray>\n",
+       "Attributes: (12/16)\n",
+       "    filename:                                 ocean.nc\n",
+       "    title:                                    ACCESS-OM2-01\n",
+       "    grid_type:                                mosaic\n",
+       "    grid_tile:                                1\n",
+       "    intake_esm_vars:                          ['u', 'v']\n",
+       "    intake_esm_attrs:filename:                ocean.nc\n",
+       "    ...                                       ...\n",
+       "    intake_esm_attrs:variable_standard_name:  ,,,,,,,,,,,,,,,,,,sea_water_con...\n",
+       "    intake_esm_attrs:variable_cell_methods:   ,,,,,,,,,,,,,,,,,,time: mean,ti...\n",
+       "    intake_esm_attrs:variable_units:          degrees_E,degrees_N,meters,mete...\n",
+       "    intake_esm_attrs:realm:                   ocean\n",
+       "    intake_esm_attrs:_data_format_:           netcdf\n",
+       "    intake_esm_dataset_key:                   ocean.1mon
" + ], + "text/plain": [ + " Size: 6GB\n", + "Dimensions: (st_ocean: 75, yu_ocean: 2700, xu_ocean: 3600)\n", + "Coordinates:\n", + " * st_ocean (st_ocean) float64 600B 0.5413 1.681 2.94 ... 5.511e+03 5.709e+03\n", + " * xu_ocean (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n", + " * yu_ocean (yu_ocean) float64 22kB -81.09 -81.05 -81.0 ... 89.92 89.96 90.0\n", + "Data variables:\n", + " u (st_ocean, yu_ocean, xu_ocean) float32 3GB dask.array\n", + " v (st_ocean, yu_ocean, xu_ocean) float32 3GB dask.array\n", + "Attributes: (12/16)\n", + " filename: ocean.nc\n", + " title: ACCESS-OM2-01\n", + " grid_type: mosaic\n", + " grid_tile: 1\n", + " intake_esm_vars: ['u', 'v']\n", + " intake_esm_attrs:filename: ocean.nc\n", + " ... ...\n", + " intake_esm_attrs:variable_standard_name: ,,,,,,,,,,,,,,,,,,sea_water_con...\n", + " intake_esm_attrs:variable_cell_methods: ,,,,,,,,,,,,,,,,,,time: mean,ti...\n", + " intake_esm_attrs:variable_units: degrees_E,degrees_N,meters,mete...\n", + " intake_esm_attrs:realm: ocean\n", + " intake_esm_attrs:_data_format_: netcdf\n", + " intake_esm_dataset_key: ocean.1mon" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Load the dataset\n", + "ds = esm_ds.search(variable=['u','v'],frequency='1mon',start_date=dates_2086).to_dask() \n", + "\n", + "# Select the slice we wanted\n", + "ds.sel(yu_ocean = lat_slice).sel(st_ocean=depth_slice)\n", + "\n", + "# Take the mean over time\n", + "uv_ds = ds.mean(dim='time')\n", + "\n", + "uv_ds" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load model grid information - we can search the catalog for this as of version 1.1.1:\n", + "Note you must be using the conda/analysis3-25.02 or later environment: \n", + "see also https://github.com/ACCESS-NRI/intake-training/blob/main/2025_02/building-intake-esm-datastore-slides-202502.pdf" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset> Size: 2GB\n",
+       "Dimensions:     (time: 4, yt_ocean: 2700, xt_ocean: 3600, yu_ocean: 2700,\n",
+       "                 xu_ocean: 3600)\n",
+       "Coordinates:\n",
+       "  * xt_ocean    (xt_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.85 79.95\n",
+       "  * yt_ocean    (yt_ocean) float64 22kB -81.11 -81.07 -81.02 ... 89.94 89.98\n",
+       "  * time        (time) object 32B 2086-01-01 00:00:00 ... 2086-10-01 00:00:00\n",
+       "  * xu_ocean    (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n",
+       "  * yu_ocean    (yu_ocean) float64 22kB -81.09 -81.05 -81.0 ... 89.92 89.96 90.0\n",
+       "Data variables:\n",
+       "    ht          (time, yt_ocean, xt_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    hu          (time, yu_ocean, xu_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    dxt         (time, yt_ocean, xt_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    dyt         (time, yt_ocean, xt_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    dxu         (time, yu_ocean, xu_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    dyu         (time, yu_ocean, xu_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    area_t      (time, yt_ocean, xt_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    area_u      (time, yu_ocean, xu_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    kmt         (time, yt_ocean, xt_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    kmu         (time, yu_ocean, xu_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "    drag_coeff  (time, yu_ocean, xu_ocean) float32 156MB dask.array<chunksize=(1, 675, 900), meta=np.ndarray>\n",
+       "Attributes: (12/16)\n",
+       "    filename:                                 ocean_grid.nc\n",
+       "    title:                                    ACCESS-OM2-01\n",
+       "    grid_type:                                mosaic\n",
+       "    grid_tile:                                1\n",
+       "    intake_esm_vars:                          ['xt_ocean', 'yt_ocean', 'time'...\n",
+       "    intake_esm_attrs:filename:                ocean_grid.nc\n",
+       "    ...                                       ...\n",
+       "    intake_esm_attrs:variable_standard_name:  ,,,,,,,,,sea_floor_depth_below_...\n",
+       "    intake_esm_attrs:variable_cell_methods:   ,,,,,time: point,time: point,ti...\n",
+       "    intake_esm_attrs:variable_units:          degrees_E,degrees_N,days since ...\n",
+       "    intake_esm_attrs:realm:                   ocean\n",
+       "    intake_esm_attrs:_data_format_:           netcdf\n",
+       "    intake_esm_dataset_key:                   ocean_grid.fx
" + ], + "text/plain": [ + " Size: 2GB\n", + "Dimensions: (time: 4, yt_ocean: 2700, xt_ocean: 3600, yu_ocean: 2700,\n", + " xu_ocean: 3600)\n", + "Coordinates:\n", + " * xt_ocean (xt_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.85 79.95\n", + " * yt_ocean (yt_ocean) float64 22kB -81.11 -81.07 -81.02 ... 89.94 89.98\n", + " * time (time) object 32B 2086-01-01 00:00:00 ... 2086-10-01 00:00:00\n", + " * xu_ocean (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n", + " * yu_ocean (yu_ocean) float64 22kB -81.09 -81.05 -81.0 ... 89.92 89.96 90.0\n", + "Data variables:\n", + " ht (time, yt_ocean, xt_ocean) float32 156MB dask.array\n", + " hu (time, yu_ocean, xu_ocean) float32 156MB dask.array\n", + " dxt (time, yt_ocean, xt_ocean) float32 156MB dask.array\n", + " dyt (time, yt_ocean, xt_ocean) float32 156MB dask.array\n", + " dxu (time, yu_ocean, xu_ocean) float32 156MB dask.array\n", + " dyu (time, yu_ocean, xu_ocean) float32 156MB dask.array\n", + " area_t (time, yt_ocean, xt_ocean) float32 156MB dask.array\n", + " area_u (time, yu_ocean, xu_ocean) float32 156MB dask.array\n", + " kmt (time, yt_ocean, xt_ocean) float32 156MB dask.array\n", + " kmu (time, yu_ocean, xu_ocean) float32 156MB dask.array\n", + " drag_coeff (time, yu_ocean, xu_ocean) float32 156MB dask.array\n", + "Attributes: (12/16)\n", + " filename: ocean_grid.nc\n", + " title: ACCESS-OM2-01\n", + " grid_type: mosaic\n", + " grid_tile: 1\n", + " intake_esm_vars: ['xt_ocean', 'yt_ocean', 'time'...\n", + " intake_esm_attrs:filename: ocean_grid.nc\n", + " ... ...\n", + " intake_esm_attrs:variable_standard_name: ,,,,,,,,,sea_floor_depth_below_...\n", + " intake_esm_attrs:variable_cell_methods: ,,,,,time: point,time: point,ti...\n", + " intake_esm_attrs:variable_units: degrees_E,degrees_N,days since ...\n", + " intake_esm_attrs:realm: ocean\n", + " intake_esm_attrs:_data_format_: netcdf\n", + " intake_esm_dataset_key: ocean_grid.fx" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# There are ocean grid variables for each year of output - so to save looking at them all, we can just specify the year\n", + "# Note: even though we only need one file, xarray still needs to open them all to check the metadata - this can take a while\n", + "grid = esm_ds.search(file_id='ocean_grid',start_date=dates_2086).to_dask()\n", + "\n", + "# Again, we don't need the geolon variables\n", + "grid = grid.drop_vars(['geolon_t', 'geolat_t', 'geolon_c', 'geolat_c'])\n", + "grid" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Along-slope velocity\n", + "\n", + "We calculate the along-slope velocity component by projecting the velocity field to the tangent vector, $u_{along} = \\boldsymbol{u \\cdot \\hat{t}}$, and the cross-slope component by projecting to the normal vector, $v_{cross} = \\boldsymbol{u \\cdot \\hat{n}}$. The schematic below defines the unit normal normal and tangent vectors for a given bathymetric contour, $\\boldsymbol{n}$ and $\\boldsymbol{t}$ respectively. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![Sketch of topographic gradient](images/topographic_gradient_sketch.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Accordingly, the code below calculates the along-slope velocity component as\n", + "\n", + "$$ u_{along} = (u,v) \\boldsymbol{\\cdot} \\left(\\frac{h_y}{|\\nabla h|} , -\\frac{h_x}{|\\nabla h|}\\right) = \n", + "u \\frac{h_y}{|\\nabla h|} - v \\frac{h_x}{|\\nabla h|}, $$ \n", + "\n", + "and similarly the cross-slope velocity component as\n", + "\n", + "$$ v_{cross} = (u,v) \\boldsymbol{\\cdot} \\left(\\frac{h_x}{|\\nabla h|} , \\frac{h_y}{|\\nabla h|}\\right) = \n", + "u \\frac{h_x}{|\\nabla h|} + v \\frac{h_y}{|\\nabla h|}.$$ \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We need the derivatives of the bathymetry which we compute using the `xgcm` functionality." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/distributed/client.py:3370: UserWarning: Sending large graph of size 185.51 MiB.\n", + "This may cause some slowdown.\n", + "Consider loading the data with Dask directly\n", + " or using futures or delayed objects to embed the data into the graph without repetition.\n", + "See also https://docs.dask.org/en/stable/best-practices.html#load-data-with-dask for more information.\n", + " warnings.warn(\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/xgcm/grid_ufunc.py:832: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " out_dim: grid._ds.dims[out_dim] for arg in out_core_dims for out_dim in arg\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/xgcm/grid_ufunc.py:832: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " out_dim: grid._ds.dims[out_dim] for arg in out_core_dims for out_dim in arg\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/xgcm/grid_ufunc.py:832: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " out_dim: grid._ds.dims[out_dim] for arg in out_core_dims for out_dim in arg\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/xgcm/grid_ufunc.py:832: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " out_dim: grid._ds.dims[out_dim] for arg in out_core_dims for out_dim in arg\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/xgcm/grid_ufunc.py:832: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " out_dim: grid._ds.dims[out_dim] for arg in out_core_dims for out_dim in arg\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/xgcm/grid_ufunc.py:832: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " out_dim: grid._ds.dims[out_dim] for arg in out_core_dims for out_dim in arg\n" + ] + } + ], + "source": [ + "# Give information on the grid: location of u (momentum) and t (tracer) points on B-grid \n", + "\n", + "# We need to merge the two datasets. They're on the same coordinates, so this should be straightforward.\n", + "ds = xr.merge([hu_ds, grid])\n", + "ds.coords['xt_ocean'].attrs.update(axis='X')\n", + "ds.coords['xu_ocean'].attrs.update(axis='X', c_grid_axis_shift=0.5)\n", + "ds.coords['yt_ocean'].attrs.update(axis='Y')\n", + "ds.coords['yu_ocean'].attrs.update(axis='Y', c_grid_axis_shift=0.5)\n", + "\n", + "grid = xgcm.Grid(ds, periodic=['X'])\n", + "\n", + "# Take topographic gradient (simple gradient over one grid cell) and move back to u-grid\n", + "dhu_dx = grid.interp( grid.diff(ds.hu, 'X') / grid.interp(ds.dxu, 'X'), 'X')\n", + "\n", + "# In meridional direction, we need to specify what happens at the boundary\n", + "dhu_dy = grid.interp( grid.diff(ds.hu, 'Y', boundary='extend') / grid.interp(ds.dyt, 'X'), 'Y', boundary='extend')\n", + "\n", + "# Select latitude slice\n", + "dhu_dx = dhu_dx.sel(yu_ocean=lat_slice)\n", + "dhu_dy = dhu_dy.sel(yu_ocean=lat_slice)\n", + "\n", + "# Magnitude of the topographic slope (to normalise the topographic gradient)\n", + "topographic_slope_magnitude = np.sqrt(dhu_dx**2 + dhu_dy**2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Calculate along-slope velocity component" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.DataArray (st_ocean: 75, yu_ocean: 484, xu_ocean: 3600)> Size: 523MB\n",
+       "dask.array<mean_agg-aggregate, shape=(75, 484, 3600), dtype=float32, chunksize=(7, 300, 400), chunktype=numpy.ndarray>\n",
+       "Coordinates:\n",
+       "  * st_ocean  (st_ocean) float64 600B 0.5413 1.681 2.94 ... 5.511e+03 5.709e+03\n",
+       "  * xu_ocean  (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n",
+       "  * yu_ocean  (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n",
+       "Attributes:\n",
+       "    long_name:      i-current\n",
+       "    units:          m/sec\n",
+       "    valid_range:    [-10.  10.]\n",
+       "    cell_methods:   time: mean\n",
+       "    time_avg_info:  average_T1,average_T2,average_DT\n",
+       "    standard_name:  sea_water_x_velocity
" + ], + "text/plain": [ + " Size: 523MB\n", + "dask.array\n", + "Coordinates:\n", + " * st_ocean (st_ocean) float64 600B 0.5413 1.681 2.94 ... 5.511e+03 5.709e+03\n", + " * xu_ocean (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n", + " * yu_ocean (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n", + "Attributes:\n", + " long_name: i-current\n", + " units: m/sec\n", + " valid_range: [-10. 10.]\n", + " cell_methods: time: mean\n", + " time_avg_info: average_T1,average_T2,average_DT\n", + " standard_name: sea_water_x_velocity" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "# Give information on the grid: location of u (momentum) and t (tracer) points on B-grid \n", - "ds = xr.merge([hu, grid])\n", - "ds.coords['xt_ocean'].attrs.update(axis='X')\n", - "ds.coords['xu_ocean'].attrs.update(axis='X', c_grid_axis_shift=0.5)\n", - "ds.coords['yt_ocean'].attrs.update(axis='Y')\n", - "ds.coords['yu_ocean'].attrs.update(axis='Y', c_grid_axis_shift=0.5)\n", - "\n", - "grid = xgcm.Grid(ds, periodic=['X'])\n", - "\n", - "# Take topographic gradient (simple gradient over one grid cell) and move back to u-grid\n", - "dhu_dx = grid.interp( grid.diff(ds.hu, 'X') / grid.interp(ds.dxu, 'X'), 'X')\n", - "\n", - "# In meridional direction, we need to specify what happens at the boundary\n", - "dhu_dy = grid.interp( grid.diff(ds.hu, 'Y', boundary='extend') / grid.interp(ds.dyt, 'X'), 'Y', boundary='extend')\n", + "# Get the u and v data arrays out of our dataset\n", + "u, v = uv_ds['u'],uv_ds['v']\n", "\n", - "# Select latitude slice\n", - "dhu_dx = dhu_dx.sel(yu_ocean=lat_slice)\n", - "dhu_dy = dhu_dy.sel(yu_ocean=lat_slice)\n", + "# Along-slope velocity\n", + "alongslope_velocity = u * dhu_dy / topographic_slope_magnitude - v * dhu_dx / topographic_slope_magnitude\n", "\n", - "# Magnitude of the topographic slope (to normalise the topographic gradient)\n", - "topographic_slope_magnitude = np.sqrt(dhu_dx**2 + dhu_dy**2)" + "alongslope_velocity = alongslope_velocity.mean(dim='time') # Remember to average over time again - otheriwse we wind up with an extraneous extra dim in the plot\n", + "alongslope_velocity" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Calculate along-slope velocity component" + "### Vertical averaging (we only need this to plot the velocity on a map)" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "/g/data/hh5/public/apps/miniconda3/envs/analysis3-24.01/lib/python3.10/site-packages/dask/core.py:127: RuntimeWarning: invalid value encountered in divide\n", - " return func(*(_execute_task(a, cache) for a in args))\n", - "/g/data/hh5/public/apps/miniconda3/envs/analysis3-24.01/lib/python3.10/site-packages/dask/core.py:127: RuntimeWarning: invalid value encountered in divide\n", - " return func(*(_execute_task(a, cache) for a in args))\n", - "/g/data/hh5/public/apps/miniconda3/envs/analysis3-24.01/lib/python3.10/site-packages/dask/core.py:127: RuntimeWarning: invalid value encountered in divide\n", - " return func(*(_execute_task(a, cache) for a in args))\n", - "/g/data/hh5/public/apps/miniconda3/envs/analysis3-24.01/lib/python3.10/site-packages/dask/core.py:127: RuntimeWarning: invalid value encountered in divide\n", - " return func(*(_execute_task(a, cache) for a in args))\n", - "/g/data/hh5/public/apps/miniconda3/envs/analysis3-24.01/lib/python3.10/site-packages/dask/core.py:127: RuntimeWarning: invalid value encountered in divide\n", - " return func(*(_execute_task(a, cache) for a in args))\n", - "/g/data/hh5/public/apps/miniconda3/envs/analysis3-24.01/lib/python3.10/site-packages/dask/core.py:127: RuntimeWarning: invalid value encountered in divide\n", - " return func(*(_execute_task(a, cache) for a in args))\n", - "/g/data/hh5/public/apps/miniconda3/envs/analysis3-24.01/lib/python3.10/site-packages/dask/core.py:127: RuntimeWarning: invalid value encountered in divide\n", - " return func(*(_execute_task(a, cache) for a in args))\n" + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n" ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.DataArray 'hu' (yu_ocean: 484, xu_ocean: 3600)> Size: 7MB\n",
+       "dask.array<getitem, shape=(484, 3600), dtype=float32, chunksize=(484, 900), chunktype=numpy.ndarray>\n",
+       "Coordinates:\n",
+       "  * xu_ocean  (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n",
+       "  * yu_ocean  (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n",
+       "Attributes:\n",
+       "    long_name:     ocean depth on u-cells\n",
+       "    units:         m\n",
+       "    valid_range:   [-1.e+09  1.e+09]\n",
+       "    cell_methods:  time: point
" + ], + "text/plain": [ + " Size: 7MB\n", + "dask.array\n", + "Coordinates:\n", + " * xu_ocean (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n", + " * yu_ocean (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n", + "Attributes:\n", + " long_name: ocean depth on u-cells\n", + " units: m\n", + " valid_range: [-1.e+09 1.e+09]\n", + " cell_methods: time: point" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" } ], - "source": [ - "# Along-slope velocity\n", - "alongslope_velocity = u * dhu_dy / topographic_slope_magnitude - v * dhu_dx / topographic_slope_magnitude\n", - "\n", - "# Load the data\n", - "alongslope_velocity = alongslope_velocity.load()\n", - "# warnings might come up in points where we divide by NaN/0,\n", - "# i.e., when there is no topographic gradient and warning can be ignored" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Vertical averaging (we only need this to plot the velocity on a map)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], "source": [ "# Import edges of st_ocean and add lat/lon dimensions:\n", - "st_edges_ocean = cc.querying.getvar(experiment, 'st_edges_ocean', session,\n", - " start_time=start_time, end_time=end_time, n=1)\n", - "st_edges_array = st_edges_ocean.expand_dims({'yu_ocean': u.yu_ocean, 'xu_ocean': u.xu_ocean}, axis=[1, 2])" + "st_edges_ocean = esm_ds.search(variable='st_edges_ocean',start_date=dates_2086).to_dask()\n", + "st_edges_array = st_edges_ocean.expand_dims({'yu_ocean': u.yu_ocean, 'xu_ocean': u.xu_ocean}, axis=[1, 2])\n", + "st_edges_array = st_edges_array.sel(yu_ocean = lat_slice)['st_edges_ocean']\n", + "\n", + "st_ocean = esm_ds.search(variable='st_ocean',start_date=dates_2086,file_id='ocean')\n", + "st_ocean = st_ocean.to_dask()['st_ocean']\n", + "\n", + "hu = hu_ds['hu']\n", + "hu" ] }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n", + "/g/data/xp65/public/apps/med_conda/envs/analysis3-25.02/lib/python3.11/site-packages/intake_esm/source.py:82: FutureWarning: In a future version of xarray decode_timedelta will default to False rather than None. To silence this warning, set decode_timedelta to True, False, or a 'CFTimedeltaCoder' instance.\n", + " ds = xr.open_dataset(url, **xarray_open_kwargs)\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.DataArray (yu_ocean: 484, xu_ocean: 3600)> Size: 14MB\n",
+       "dask.array<truediv, shape=(484, 3600), dtype=float64, chunksize=(300, 400), chunktype=numpy.ndarray>\n",
+       "Coordinates:\n",
+       "  * xu_ocean  (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n",
+       "  * yu_ocean  (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n",
+       "Attributes:\n",
+       "    long_name:      i-current\n",
+       "    units:          m/sec\n",
+       "    valid_range:    [-10.  10.]\n",
+       "    cell_methods:   time: mean\n",
+       "    time_avg_info:  average_T1,average_T2,average_DT\n",
+       "    standard_name:  sea_water_x_velocity
" + ], + "text/plain": [ + " Size: 14MB\n", + "dask.array\n", + "Coordinates:\n", + " * xu_ocean (xu_ocean) float64 29kB -279.9 -279.8 -279.7 ... 79.8 79.9 80.0\n", + " * yu_ocean (yu_ocean) float64 4kB -79.99 -79.95 -79.9 ... -59.06 -59.01\n", + "Attributes:\n", + " long_name: i-current\n", + " units: m/sec\n", + " valid_range: [-10. 10.]\n", + " cell_methods: time: mean\n", + " time_avg_info: average_T1,average_T2,average_DT\n", + " standard_name: sea_water_x_velocity" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Adjust edges at bottom for partial thickness:\n", "st_edges_with_partial = st_edges_array.where(st_edges_array" ] @@ -904,7 +5676,7 @@ " transform=ccrs.PlateCarree())\n", "\n", "# Along slope barotropic velocity\n", - "sc = barotropic_alongslope_velocity.plot(ax=ax, cmap=cm.cm.curl,\n", + "sc = barotropic_alongslope_velocity.plot.pcolormesh(ax=ax, cmap=cm.cm.curl,\n", " transform=ccrs.PlateCarree(),\n", " vmin=-0.3, vmax=0.3,\n", " cbar_kwargs={'orientation': 'vertical',\n", @@ -918,7 +5690,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 19, "metadata": {}, "outputs": [], "source": [ @@ -928,9 +5700,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python [conda env:analysis3-24.01]", + "display_name": "Python 3 (ipykernel)", "language": "python", - "name": "conda-env-analysis3-24.01-py" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -942,7 +5714,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.11.11" }, "other_supplementary_files": [ "images/topographic_gradient_sketch.png"