From e3fe38dbf86a7d15c1163b66ea41ededfced9f87 Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Thu, 29 Aug 2019 11:52:43 +0100 Subject: [PATCH 1/3] transferred Lee area changes to new branch and PR --- esmvalcore/preprocessor/_area.py | 56 +++++++++++++++++++++----------- 1 file changed, 37 insertions(+), 19 deletions(-) diff --git a/esmvalcore/preprocessor/_area.py b/esmvalcore/preprocessor/_area.py index 9b5cf74bbe..b6a8512b80 100644 --- a/esmvalcore/preprocessor/_area.py +++ b/esmvalcore/preprocessor/_area.py @@ -8,6 +8,7 @@ import iris from dask import array as da +import numpy as np logger = logging.getLogger(__name__) @@ -56,20 +57,24 @@ def extract_region(cube, start_longitude, end_longitude, start_latitude, start_latitude = float(start_latitude) end_latitude = float(end_latitude) + # Regular grid if cube.coord('latitude').ndim == 1: region_subset = cube.intersection( longitude=(start_longitude, end_longitude), latitude=(start_latitude, end_latitude)) region_subset = region_subset.intersection(longitude=(0., 360.)) return region_subset - # irregular grids + + # Irregular grids - not lazy. lats = cube.coord('latitude').points lons = cube.coord('longitude').points - select_lats = start_latitude < lats < end_latitude - select_lons = start_longitude < lons < end_longitude - selection = select_lats & select_lons - data = da.ma.masked_where(~selection, cube.core_data()) - return cube.copy(data) + mask = np.ma.array(cube.data).mask + mask += np.ma.masked_where(lats < start_latitude, lats).mask + mask += np.ma.masked_where(lats > end_latitude, lats).mask + mask += np.ma.masked_where(lons < start_longitude, lons).mask + mask += np.ma.masked_where(lons > end_longitude, lons).mask + cube.data = da.ma.masked_array(data=cube.data, mask=mask) + return cube def get_iris_analysis_operation(operator): @@ -156,19 +161,32 @@ def tile_grid_areas(cube, fx_files): continue logger.info('Attempting to load %s from file: %s', key, fx_file) fx_cube = iris.load_cube(fx_file) - grid_areas = fx_cube.core_data() - if cube.ndim == 4 and grid_areas.ndim == 2: - grid_areas = da.tile(grid_areas, - [cube.shape[0], cube.shape[1], 1, 1]) - elif cube.ndim == 4 and grid_areas.ndim == 3: - grid_areas = da.tile(grid_areas, [cube.shape[0], 1, 1, 1]) - elif cube.ndim == 3 and grid_areas.ndim == 2: - grid_areas = da.tile(grid_areas, [cube.shape[0], 1, 1]) - else: - raise ValueError('Grid and dataset number of dimensions not ' - 'recognised: {} and {}.' - ''.format(cube.ndim, grid_areas.ndim)) + else: + return None + + if grid_areas.shape != cube.shape[-2:]: + raise ValueError('Fx area {} and dataset {} shapes do not match.' + ''.format(grid_areas.shape, cube.shape)) + + if cube.ndim == grid_areas.ndim: + return grid_areas + + # Use dash.array.stack to tile areacello. + elif cube.ndim == 4 and grid_areas.ndim == 2: + for shape in [1, 0]: + grida = [grid_areas for itr in range(cube.shape[shape])] + grid_areas = da.stack(grida, axis=0) + elif cube.ndim == 4 and grid_areas.ndim == 3: + grida = [grid_areas for itr in range(cube.shape[0])] + grid_areas = da.stack(grida, axis=0) + elif cube.ndim == 3 and grid_areas.ndim == 2: + grida = [grid_areas for itr in range(cube.shape[0])] + grid_areas = da.stack(grida, axis=0) + else: + raise ValueError('Grid and dataset number of dimensions not ' + 'recognised: {} and {}.' + ''.format(cube.ndim, grid_areas.ndim)) return grid_areas @@ -246,7 +264,7 @@ def area_statistics(cube, operator, fx_files=None): if operator == 'mean': return cube.collapsed(coord_names, operation, - weights=grid_areas) + weights=np.array(grid_areas)) # Many IRIS analysis functions do not accept weights arguments. return cube.collapsed(coord_names, operation) From effd567153bc295ca2fb309148f253f90293ac26 Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Thu, 29 Aug 2019 11:53:09 +0100 Subject: [PATCH 2/3] transferred Lee volume changes to new branch and PR --- esmvalcore/preprocessor/_volume.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index 4859b73639..73f43933bb 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -260,12 +260,13 @@ def volume_statistics( layer_vol = np.ma.masked_where( cube[time_itr, z_itr].data.mask, grid_volume[time_itr, z_itr]).sum() - except AttributeError: # #### # No mask in the cube data. layer_vol = grid_volume.sum() depth_volume.append(layer_vol) + + column = np.ma.masked_invalid(column) # #### # Calculate weighted mean over the water volumn result.append(np.average(column, weights=depth_volume)) From d068af0aec1502636938031e928c240dad4a3b05 Mon Sep 17 00:00:00 2001 From: Lee de Mora Date: Wed, 16 Oct 2019 09:33:46 +0100 Subject: [PATCH 3/3] stashing changes --- esmvalcore/preprocessor/_area.py | 45 ++------------------------------ 1 file changed, 2 insertions(+), 43 deletions(-) diff --git a/esmvalcore/preprocessor/_area.py b/esmvalcore/preprocessor/_area.py index 9bfb0939bb..369448f5e4 100644 --- a/esmvalcore/preprocessor/_area.py +++ b/esmvalcore/preprocessor/_area.py @@ -48,18 +48,12 @@ def extract_region(cube, start_longitude, end_longitude, start_latitude, iris.cube.Cube smaller cube. """ - # Converts Negative longitudes to 0 -> 360. standard - start_longitude = float(start_longitude) - end_longitude = float(end_longitude) - start_latitude = float(start_latitude) - end_latitude = float(end_latitude) - - # Regular grid if abs(start_latitude) > 90.: raise ValueError(f"Invalid start_latitude: {start_latitude}") if abs(end_latitude) > 90.: raise ValueError(f"Invalid end_latitude: {end_latitude}") + # Regular grid if cube.coord('latitude').ndim == 1: # Iris check if any point of the cell is inside the region # To check only the center, ignore_bounds must be set to @@ -72,33 +66,6 @@ def extract_region(cube, start_longitude, end_longitude, start_latitude, region_subset = region_subset.intersection(longitude=(0., 360.)) return region_subset - # Irregular grids - not lazy. - lats = cube.coord('latitude').points - lons = cube.coord('longitude').points - mask = np.ma.array(cube.data).mask - mask += np.ma.masked_where(lats < start_latitude, lats).mask - mask += np.ma.masked_where(lats > end_latitude, lats).mask - mask += np.ma.masked_where(lons < start_longitude, lons).mask - mask += np.ma.masked_where(lons > end_longitude, lons).mask - cube.data = da.ma.masked_array(data=cube.data, mask=mask) - return cube - - -def get_iris_analysis_operation(operator): - """ - Determine the iris analysis operator from a string. - - Map string to functional operator. - - Parameters - ---------- - operator: str - A named operator. - - Returns - ------- - function: A function from iris.analysis -======= # Irregular grids lats = cube.coord('latitude').points lons = cube.coord('longitude').points @@ -117,7 +84,6 @@ def get_iris_analysis_operation(operator): select_lats = (lats >= start_latitude) & (lats <= end_latitude) else: select_lats = (lats >= start_latitude) | (lats <= end_latitude) ->>>>>>> remotes/origin/development selection = select_lats & select_lons selection = da.broadcast_to(selection, cube.shape) @@ -191,7 +157,7 @@ def tile_grid_areas(cube, fx_files): return grid_areas # Use dash.array.stack to tile areacello. - elif cube.ndim == 4 and grid_areas.ndim == 2: + if cube.ndim == 4 and grid_areas.ndim == 2: for shape in [1, 0]: grida = [grid_areas for itr in range(cube.shape[shape])] grid_areas = da.stack(grida, axis=0) @@ -282,15 +248,8 @@ def area_statistics(cube, operator, fx_files=None): # TODO: implement weighted stdev, median, s var when available in iris. # See iris issue: https://github.com/SciTools/iris/issues/3208 -<<<<<<< HEAD - if operator == 'mean': - return cube.collapsed(coord_names, - operation, - weights=np.array(grid_areas)) -======= if operator_accept_weights(operator): return cube.collapsed(coord_names, operation, weights=grid_areas) ->>>>>>> remotes/origin/development # Many IRIS analysis functions do not accept weights arguments. return cube.collapsed(coord_names, operation)