From 907d49e3ddb2288b5cf2d83c0e5c8b78b122139f Mon Sep 17 00:00:00 2001 From: Bill Little Date: Fri, 7 Jan 2022 14:10:51 +0000 Subject: [PATCH 01/69] whatsnew highlight clarification (#4482) --- docs/src/whatsnew/latest.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 2787e0044f4..bb33423e1be 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -16,8 +16,8 @@ This document explains the changes made to Iris for this release The highlights for this minor release of Iris include: - * We've added support for `UGRID`_ meshes which can now be loaded and attached - to a cube. + * We've added experimental support for `UGRID`_ meshes which can now be loaded + and attached to a cube. And finally, get in touch with us on `GitHub`_ if you have any issues or feature requests for improving Iris. Enjoy! From a12d0b15bab3377b23a148e891270b13a0419c38 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 7 Jan 2022 21:39:34 +0000 Subject: [PATCH 02/69] Whatsnew entry for #4470. (#4485) --- docs/src/whatsnew/latest.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index bb33423e1be..d8a5f820956 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -140,6 +140,11 @@ This document explains the changes made to Iris for this release so that a cube with lazy data awaiting a unit conversion can be pickled. (:issue:`4354 `, :pull:`4377`) +#. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate + was mistakenly interpreted as a latitude, usually resulting in two 'latitude's + instead of one latitude and one longitude. + (:issue:`4460 `, :pull:`4470`) + 💣 Incompatible Changes ======================= From 20e0caeca1918d27a1115624dd966fb9f4079066 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 10 Jan 2022 09:54:04 +0000 Subject: [PATCH 03/69] Standard names table update. (#4483) * Standard names update. * --amend * Added whatsnew. * Update docs/src/whatsnew/latest.rst Co-authored-by: Bill Little * Update docs/src/whatsnew/latest.rst Co-authored-by: Bill Little Co-authored-by: Bill Little --- docs/src/developers_guide/release.rst | 9 + docs/src/whatsnew/latest.rst | 3 + etc/cf-standard-name-table.xml | 2033 ++++++++++++++----------- tools/generate_std_names.py | 4 +- 4 files changed, 1158 insertions(+), 891 deletions(-) diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index bcf075e4ae7..09b884302b8 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -36,6 +36,14 @@ Ensure that any behaviour which has been deprecated for the correct number of previous releases is now finally changed. More detail, including the correct number of releases, is in :ref:`iris_development_deprecations`. +Standard Names +~~~~~~~~~~~~~~ + +Update the file ``etc/cf-standard-name-table.xml`` to the latest CF standard names, +from the `latest CF standard names`_. +( This is used during build to automatically generate the sourcefile +``lib/iris/std_names.py``. ) + Release Branch -------------- @@ -268,3 +276,4 @@ Post Release Steps .. _rc_iris: https://anaconda.org/conda-forge/iris/labels .. _Generating Distribution Archives: https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives .. _Packaging Your Project: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-your-project +.. _latest CF standard names: http://cfconventions.org/standard-names.html \ No newline at end of file diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index d8a5f820956..89711e36d1a 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -90,6 +90,9 @@ This document explains the changes made to Iris for this release benchmark scripts. Also added a regridding benchmark that uses this data (:pull:`4402`) +#. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). + (:issue:`4479`, :pull:`4483`) + 🐛 Bugs Fixed ============= diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index 5a19f8d5b1b..bd761681927 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,7 +1,7 @@ - 77 - 2021-01-19T13:38:50Z + 78 + 2021-09-21T11:55:06Z Centre for Environmental Data Analysis support@ceda.ac.uk @@ -489,6 +489,13 @@ + + m2 s-2 + + + One-half the scalar product of the air velocity and vorticity vectors, where vorticity refers to the standard name atmosphere_upward_absolute_vorticity. Helicity is proportional to the strength of the flow, the amount of vertical wind shear, and the amount of turning in the flow. + + m2 s-1 35 @@ -2467,7 +2474,7 @@ 1 - The "beam_consistency_indicator" is the degree to which the magnitudes of a collection (ensemble) of acoustic signals from multiple underwater acoustic transceivers relate to each other. It is used as a data quality assessment parameter in ADCP (acoustic doppler current profiler) instruments and is frequently referred to as "correlation magnitude". Convention is that the larger the value, the higher the signal to noise ratio and therefore the better the quality of the current vector measurements; the maximum value of the indicator is 128. + The "beam_consistency_indicator" is the degree to which the received acoustic pulse is correlated with the transmitted pulse. It is used as a data quality assessment parameter in ADCP (acoustic doppler current profiler) instruments and is frequently referred to as "correlation magnitude". Convention is that the larger the value, the higher the signal to noise ratio and therefore the better the quality of the current vector measurements; the maximum value of the indicator is 128. @@ -2491,11 +2498,11 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level. "Bedrock" is the solid Earth surface beneath land ice, ocean water or soil. The zero of bedrock altitude change is arbitrary. Isostatic adjustment is the vertical movement of the lithosphere due to changing surface ice and water loads. - + - "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. The quantity with standard name biological_taxon_identifier is the machine-readable identifier for the taxon registration in either WoRMS (the AphiaID) or ITIS (the taxonomic serial number or TSN), including namespace. The namespace strings are 'aphia:' or 'tsn:'. For example, Calanus finmarchicus is encoded as either 'aphia:104464' or 'tsn:85272'. For the marine domain WoRMS has more complete coverage and so aphia Ids are preferred. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. The quantity with standard name biological_taxon_lsid is the machine-readable identifier based on a taxon registration system using the syntax convention specified for the Life Science Identifier (LSID) - urn:lsid:<Authority>:<Namespace>:<ObjectID>[:<Version>]. This includes the reference classification in the element and these are restricted by the LSID governance. It is strongly recommended in CF that the authority chosen is World Register of Marine Species (WoRMS) for oceanographic data and Integrated Taxonomic Information System (ITIS) for freshwater and terrestrial data. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. This identifier is a narrower equivalent to the scientificNameID field in the Darwin Core Standard. @@ -2687,6 +2694,13 @@ "Amount" means mass per unit area. Zero change in land ice amount is an arbitrary level. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. + + kg + + + Zero change in land ice mass is an arbitrary level. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The horizontal domain over which the quantity is calculated is described by the associated coordinate variables and coordinate bounds or by a coordinate variable or scalar coordinate variable with the standard name of "region" supplied according to section 6.1.1 of the CF conventions. + + kg m-2 @@ -2922,7 +2936,7 @@ m-3 - "Colony forming unit" means an estimate of the viable bacterial or fungal numbers determined by counting colonies grown from a sample. "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Colony forming unit" means an estimate of the viable bacterial or fungal numbers determined by counting colonies grown from a sample. "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -3079,6 +3093,13 @@ Depth is the vertical distance below the surface. + + m + + + The phrase depth_at_base_of_unfrozen_ground is the instantaneous depth of the downward penetration of thaw from the ground surface at a given time. Permafrost is soil or rock that has remained at a temperature at or below zero degrees Celsius throughout the seasonal cycle for two or more consecutive years. The maximum measurable depth_at_base_of_unfrozen_ground value as recorded at the end of a thawing season corresponds to the permafrost_active_layer_thickness. + + m @@ -3142,6 +3163,13 @@ "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". + + K + + + Sea surface temperature is usually abbreviated as "SST". It is the temperature of sea water near the surface (including the part under sea-ice, if any), not the skin or interface temperature, whose standard names are sea_surface_skin_temperature and surface_temperature, respectively. For the temperature of sea water at a particular depth or layer, a data variable of "sea_water_temperature" with a vertical coordinate axis should be used. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. + + Pa @@ -3723,6 +3751,13 @@ A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Flood water is water that covers land which is normally not covered by water. + + m s-1 + + + A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Friction velocity is a reference wind velocity derived from the relationship between air density and downward stress and is usually applied at a level close to the surface where stress is assumed to independent of height and approximately proportional to the square of mean velocity. + + m s-1 @@ -4577,6 +4612,13 @@ "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be "model_level_number", but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". + + 1 + + + ice_volume_in_frozen_ground_in_excess_of_pore_volume_in_unfrozen_ground_expressed_as_fraction_of_frozen_ground_volume represents the fractional amount of "excess ice" in frozen ground. Excess ice is the volume of ice in the ground which exceeds the total pore volume that the ground would have under natural unfrozen conditions. Due to the presence of ground ice, the total water content of a frozen soil may exceed that corresponding to its normally consolidated state when unfrozen. As a result, upon thawing, a soil containing excess ice will settle under its own weight until it attains its consolidated state. Reference: van Everdingen, R. O. editor 1998: Multi-language glossary of permafrost and related ground ice terms. International Permafrost Association. + + m3 s-1 @@ -4588,7 +4630,7 @@ m s-1 - Sea water velocity is a vector quantity that is the speed at which water travels in a specified direction. The "indicative error" is an estimate of the quality of a sea water velocity profile measured using an ADCP (acoustic doppler current profiler). It is determined by differencing duplicate error velocity measurements made using different pairs of beams. The parameter is frequently referred to as the "error velocity". + Sea water velocity is a vector quantity that is the speed at which water travels in a specified direction. The "indicative error" is an estimate of the quality of a sea water velocity profile measured using an ADCP (acoustic doppler current profiler). It is determined by the difference between the vertical velocity calculated from two 3-beam solutions. The parameter is frequently referred to as the "error velocity". @@ -7671,6 +7713,13 @@ "Content" indicates a quantity per unit area. + + J Kg-1 + + + The lightning_potential_index measures the potential for charge generation and separation that leads to lightning flashes in convective thunderstorms. It is derived from the model simulated grid-scale updraft velocity and the mass mixing-ratios of liquid water, cloud ice, snow, and graupel. + + J @@ -8081,21 +8130,21 @@ kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as carbon is also referred to as "carbon biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as carbon is also referred to as "carbon biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as nitrogen is also referred to as "nitrogen biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mass concentration of biota expressed as nitrogen is also referred to as "nitrogen biomass". "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -9449,6 +9498,13 @@ Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally; all contain a chlorin ring which gives the green pigment and a side chain whose structure varies. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -9610,6 +9666,13 @@ Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). Graupel consists of heavily rimed snow particles, often called snow pellets; often indistinguishable from very small soft hail except when the size convention that hail must have a diameter greater than 5 mm is adopted. Reference: American Meteorological Society Glossary http://glossary.ametsoc.org/wiki/Graupel. There are also separate standard names for hail. Standard names for "graupel_and_hail" should be used to describe data produced by models that do not distinguish between hail and graupel. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -9918,6 +9981,13 @@ "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. + + 1 @@ -10191,6 +10261,13 @@ The quantity with standard name mass_fraction_of_rainfall_falling_onto_surface_snow is the mass of rainfall falling onto snow as a fraction of the mass of rainfall falling within the area of interest. Surface snow refers to the snow on the solid ground or on surface ice cover, but excludes, for example, falling snowflakes and snow on plants. The surface called "surface" means the lower boundary of the atmosphere. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -10219,6 +10296,13 @@ "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y", where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Shallow convective cloud is nonprecipitating cumulus cloud with a cloud top below 3000m above the surface produced by the convection schemes in an atmosphere model. Some atmosphere models differentiate between shallow and deep convection. "Cloud liquid water" refers to the liquid phase of cloud water. A diameter of 0.2 mm has been suggested as an upper limit to the size of drops that shall be regarded as cloud drops; larger drops fall rapidly enough so that only very strong updrafts can sustain them. Any such division is somewhat arbitrary, and active cumulus clouds sometimes contain cloud drops much larger than this. Reference: AMS Glossary http://glossary.ametsoc.org/wiki/Cloud_drop. + + 1 + + + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + + 1 @@ -10755,14 +10839,14 @@ mol m-3 - "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. mol m-3 - "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -12648,6 +12732,13 @@ The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of CFC11 is CFCl3. The IUPAC name for CFC11 is trichloro(fluoro)methane. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved inorganic carbon" describes a family of chemical species in solution, including carbon dioxide, carbonic acid and the carbonate and bicarbonate anions. "Dissolved inorganic carbon" is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + mol kg-1 @@ -13054,6 +13145,13 @@ A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). Flood water is water that covers land which is normally not covered by water. + + m s-1 + + + A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). Friction velocity is a reference wind velocity derived from the relationship between air density and downward stress and is usually applied at a level close to the surface where stress is assumed to independent of height and approximately proportional to the square of mean velocity. + + W m-2 @@ -13345,7 +13443,7 @@ m-3 - "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_identifier to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. + "Number concentration" means the number of particles or other specified objects per unit volume. "Biological taxon" is a name or other label identifying an organism or a group of organisms as belonging to a unit of classification in a hierarchical taxonomy. There must be an auxiliary coordinate variable with standard name biological_taxon_name to identify the taxon in human readable format and optionally an auxiliary coordinate variable with standard name biological_taxon_lsid to provide a machine-readable identifier. See Section 6.1.2 of the CF convention (version 1.8 or later) for information about biological taxon auxiliary coordinate variables. @@ -13436,21 +13534,21 @@ 1 - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". 1 - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". 1 - The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". @@ -13464,7 +13562,7 @@ 1 - Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". @@ -17359,6 +17457,13 @@ The "reaction rate" is the rate at which the reactants of a chemical reaction form the products. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The rate of "hydroxyl radical destruction due to reaction with nmvoc" is the nmvoc reactivity with regard to reactions with OH. It is the weighted sum of the reactivity of all individual nmvoc species with OH. The chemical formula for the hydroxyl radical is OH. In chemistry, a "radical" is a highly reactive, and therefore short lived, species. The abbreviation "nmvoc" means non methane volatile organic compounds; "nmvoc" is the term used in standard names to describe the group of chemical species having this classification that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + 1 + + + The phrase "ratio_of_X_to_Y" means X/Y. "X_volume" means the volume occupied by X within the grid cell. Pore volume is the volume of the porosity of the ground under natural, unfrozen conditions. This is often known as "ice saturation index". + + K s-1 @@ -17391,7 +17496,7 @@ 1 - Realization is used to label a dimension that can be thought of asa statistical sample, e.g., labelling members of a model ensemble. + Realization is used to label a dimension that can be thought of as a statistical sample, e.g., labelling members of a model ensemble. @@ -17597,6 +17702,13 @@ The sea_floor_depth_below_sea_surface is the vertical distance between the sea surface and the seabed as measured at a given point in space including the variance caused by tides and possibly waves. + + m + + + The average size of grains (also known as particles) in a sediment sample. + + 1 @@ -17681,6 +17793,13 @@ The term sea_ice_extent means the total area of all grid cells in which the sea ice area fraction equals or exceeds a threshold, often chosen to be 15 per cent. The threshold must be specified by supplying a coordinate variable or scalar coordinate variable with the standard name of sea_ice_area_fraction. The horizontal domain over which sea ice extent is calculated is described by the associated coordinate variables and coordinate bounds or by a coordinate variable or scalar coordinate variable with the standard name of "region" supplied according to section 6.1.1 of the CF conventions. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + + m + + + "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. An ice floe is a flat expanse of sea ice, generally taken to be less than 10 km across. ice_floe_diameter corresponds to the diameter of a circle with the same area as the ice floe. + + m @@ -17856,6 +17975,20 @@ Sea surface density is the density of sea water near the surface (including the part under sea-ice, if any). + + Pa + + + The surface called "sea surface" means the upper boundary of the ocean. "Surface stress" means the shear stress (force per unit area) exerted at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, surface stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "dissipation_of_sea_surface_waves" means the stress associated with sea surface waves dissipation processes such as whitecapping. + + + + Pa + + + The surface called "sea surface" means the upper boundary of the ocean. "Surface stress" means the shear stress (force per unit area) exerted at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, surface stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "dissipation_of_sea_surface_waves" means the stress associated with sea surface waves dissipation processes such as whitecapping. + + K @@ -18367,6 +18500,13 @@ Wave slope describes an aspect of sea surface wave geometry related to sea surface roughness. Mean square slope describes a derivation over multiple waves within a sea-state, for example calculated from moments of the wave directional spectrum. The phrase "y_slope" indicates that slope values are derived from vector components along the grid y-axis. + + m-1 + + + The wave directional spectrum can be written as a five dimensional function S(t,x,y,k,theta) where t is time, x and y are horizontal coordinates (such as longitude and latitude), k is wavenumber and theta is direction. S has the standard name sea_surface_wave_directional_variance_spectral_density. S can be integrated over direction to give S1= integral(S dtheta) and this quantity has the standard name sea_surface_wave_variance_spectral_density. Wavenumber is the number of oscillations of a wave per unit distance. Wavenumber moments, M(n) of S1 can then be calculated as follows: M(n) = integral(S1 k^n dk), where k^n is k to the power of n. The mean wavenumber, k(1), is calculated as the ratio M(1)/M(0). + + s @@ -18451,6 +18591,27 @@ Sea surface wave variance spectral density is the variance of wave amplitude within a range of wave frequency. + + Pa + + + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xx" indicates the component of the tensor along the grid x_ axis. + + + + Pa + + + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xy" indicates the lateral contributions to x_ and y_ components of the tensor. + + + + Pa + + + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "yy" indicates the component of the tensor along the grid y_ axis. + + degree @@ -19088,6 +19249,13 @@ "Content" indicates a quantity per unit area. The "soil content" of a quantity refers to the vertical integral from the surface down to the bottom of the soil model. For the content between specified levels in the soil, standard names including content_of_soil_layer are used. Soil carbon is returned to the atmosphere as the organic matter decays. The decay process takes varying amounts of time depending on the composition of the organic matter, the temperature and the availability of moisture. A carbon "soil pool" means the carbon contained in organic matter which has a characteristic period over which it decays and releases carbon into the atmosphere. "Slow soil pool" refers to the decay of organic matter in soil with a characteristic period of more than a hundred years under reference climate conditions of a temperature of 20 degrees Celsius and no water limitations. + + 1 + + + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Snow "viewable from above" refers to the snow on objects or the ground as viewed from above, which excludes, for example, falling snow flakes and snow obscured by a canopy, vegetative cover, or other features resting on the surface. + + kg m-2 @@ -19225,7 +19393,7 @@ K 85 - Soil temperature is the bulk temperature of the soil, not the surface (skin) temperature. "Soil" means the near-surface layer where plants sink their roots. For subsurface temperatures that extend beneath the soil layer or in areas where there is no surface soil layer, the standard name solid_earth_subsurface_temperature should be used. + Soil temperature is the bulk temperature of the soil, not the surface (skin) temperature. "Soil" means the near-surface layer where plants sink their roots. For subsurface temperatures that extend beneath the soil layer or in areas where there is no surface soil layer, the standard name temperature_in_ground should be used. @@ -19249,6 +19417,13 @@ A variable with the standard name of soil_type contains strings which indicate the character of the soil e.g. clay. These strings have not yet been standardised. Alternatively, the data variable may contain integers which can be translated to strings using flag_values and flag_meanings attributes. + + 1 + + + soil_water_ph is the measure of acidity of soil moisture, defined as the negative logarithm of the concentration of dissolved hydrogen ions in soil water. + + degree @@ -19284,13 +19459,6 @@ Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. - - K - - - The quantity with standard name solid_earth_subsurface_temperature is the temperature at any depth (or in a layer) of the "solid" earth, excluding surficial snow and ice (but not permafrost or soil). For temperatures in surface lying snow and ice, the more specific standard names temperature_in_surface_snow and land_ice_temperature should be used. For temperatures measured or modelled specifically in the soil layer (the near-surface layer where plants sink their roots) the standard name soil_temperature should be used. - - kg m-2 s-1 @@ -19410,6 +19578,13 @@ "specific" means per unit mass. Potential energy is the sum of the gravitational potential energy relative to the geoid and the centripetal potential energy. (The geopotential is the specific potential energy.) + + J kg-1 K-1 + + + Thermal capacity, or heat capacity, is the amount of heat energy required to increase the temperature of 1 kg of material by 1 K. It is a property of the material. + + J kg-1 K-1 @@ -19470,28 +19645,28 @@ day - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". day - Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + Air temperature is the bulk temperature of the air, not the surface (skin) temperature. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". day - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". day - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases.The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the a standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases.The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". @@ -19627,6 +19802,13 @@ "Sea surface height" is a time-varying quantity. The steric change in sea surface height is the change in height that a water column of standard temperature zero degrees Celsius and practical salinity S=35.0 would undergo when its temperature and salinity are changed to the observed values. The sum of the quantities with standard names thermosteric_change_in_sea_surface_height and halosteric_change_in_sea_surface_height is the total steric change in the water column height, which has the standard name of steric_change_in_sea_surface_height. The sum of the quantities with standard names sea_water_mass_per_unit_area_expressed_as_thickness and steric_change_in_sea_surface_height is the total thickness of the sea water column. + + m s-1 + + + Storm motion speed is defined as a two dimensional velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) It is defined as the average speed of a supercell, and the direction the storm will move from. It is not dependent on the orientation of the ground-relative winds. Storm motion speed generally follows the methodology outlined in Bunkers et al. (2000). + + 1 @@ -19928,6 +20110,20 @@ The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed eastward (negative westward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Viscosity" means the stress associated with viscous effects at the sea surface and is equivalent to the turbulent stress just outside the viscous sublayer. + + + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Downward eastward" indicates the ZX component of a tensor. A downward eastward stress is a downward flux of eastward momentum, which accelerates the lower medium eastward and the upper medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with form drag over sea surface waves. + + W m-2 @@ -20068,6 +20264,20 @@ The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Viscosity" means the stress associated with viscous effects at the sea surface and is equivalent to the turbulent stress just outside the viscous sublayer. + + + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Downward northward" indicates the ZY component of a tensor. A downward northward stress is a downward flux of northward momentum, which accelerates the lower medium northward and the upper medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with form drag over sea surface waves. + + W m-2 @@ -23323,6 +23533,13 @@ The surface called "surface" means the lower boundary of the atmosphere. "anomaly" means difference from climatology. The surface temperature is the (skin) temperature at the interface, not the bulk temperature of the medium above or below. + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted at the surface. An upward stress is an upward flux of momentum into the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). "Eastward" indicates a vector component which is positive when directed northward (negative southward). "Upward eastward" indicates the ZX component of a tensor. An upward eastward stress is an upward flux of eastward momentum, which accelerates the upper medium eastward and the lower medium westward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with oscillatory motions of a wavy sea surface. + + W m-2 @@ -23596,6 +23813,13 @@ The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. + + Pa + + + The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted at the surface. An upward stress is an upward flux of momentum into the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). "Northward" indicates a vector component which is positive when directed northward (negative southward). "Upward northward" indicates the ZY component of a tensor. An upward northward stress is an upward flux of northward momentum, which accelerates the upper medium northward and the lower medium southward. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea surface waves" means the stress associated with oscillatory motions of a wavy sea surface. + + W m-2 122 E146 @@ -23799,6 +24023,13 @@ The quantity with standard name temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water is the heat carried by the transfer of water into the liquid ocean by the process of runoff. This quantity additionally includes melt water from sea ice and icebergs. It is calculated relative to the heat that would be transported by runoff water entering the sea at zero degrees Celsius. It is calculated as the product QrunoffCpTrunoff, where Q runoff is the mass flux of liquid runoff entering the sea water (kg m-2 s-1), Cp is the specific heat capacity of water, and Trunoff is the temperature in degrees Celsius of the runoff water. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Runoff is the liquid water which drains from land. If not specified, "runoff" refers to the sum of surface runoff and subsurface drainage. + + K + + + The temperature at any given depth (or in a layer) below the surface of the ground, excluding surficial snow and ice (but not permafrost or soil). For temperatures in surface lying snow and ice, the more specific standard names temperature_in_surface_snow and land_ice_temperature should be used. For temperatures measured or modelled specifically for the soil layer (the near-surface layer where plants sink their roots) the standard name soil_temperature should be used. + + K E238 @@ -28041,6 +28272,13 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. + + kg s-1 + + + The phrase "tendency_of_X" means derivative of X with respect to time. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The horizontal domain over which the quantity is calculated is described by the associated coordinate variables and coordinate bounds or by a coordinate variable or scalar coordinate variable with the standard name of "region" supplied according to section 6.1.1 of the CF conventions. + + kg s-1 @@ -29623,6 +29861,13 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. Speed is the magnitude of velocity. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. + + W m-1 K-1 + + + Thermal conductivity is the constant k in the formula q = -k grad T where q is the heat transfer per unit time per unit area of a surface normal to the direction of transfer and grad T is the temperature gradient. Thermal conductivity is a property of the material. + + J m-2 @@ -31227,14 +31472,110 @@ - - integral_wrt_time_of_surface_downward_northward_stress + + biological_taxon_lsid + + + + temperature_in_ground + + + + surface_snow_density + + + + soot_content_of_surface_snow + + + + liquid_water_content_of_surface_snow + + + + surface_snow_thickness + + + + thermal_energy_content_of_surface_snow + + + + temperature_in_surface_snow integral_wrt_time_of_surface_downward_eastward_stress + + integral_wrt_time_of_surface_downward_northward_stress + + + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice + + + + atmosphere_upward_absolute_vorticity + + + + atmosphere_upward_relative_vorticity + + + + area_type + + + + area_type + + + + iron_growth_limitation_of_diazotrophic_phytoplankton + + + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + + + tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton + + + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water + + + + mass_fraction_of_liquid_precipitation_in_air + + + + mass_fraction_of_liquid_precipitation_in_air + + + + mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water + + + + air_pseudo_equivalent_potential_temperature + + + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water + + + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water + + + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton @@ -31263,42 +31604,6 @@ effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top - - mass_content_of_cloud_liquid_water_in_atmosphere_layer - - - - air_equivalent_potential_temperature - - - - number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - - - number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - - - wave_frequency - - - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - - - tendency_of_troposphere_moles_of_carbon_monoxide - - - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - - - tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - northward_heat_flux_in_air_due_to_eddy_advection @@ -31355,72 +31660,56 @@ atmosphere_mass_content_of_cloud_liquid_water - - mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - - - sea_water_velocity_to_direction - - - - sea_water_velocity_to_direction - - - - gross_primary_productivity_of_biomass_expressed_as_carbon - - - - eastward_water_vapor_flux_in_air + + mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - sea_water_velocity_from_direction + + mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - thickness_of_stratiform_snowfall_amount + + mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - lwe_thickness_of_stratiform_snowfall_amount + + mass_content_of_cloud_ice_in_atmosphere_layer - - equivalent_thickness_at_stp_of_atmosphere_ozone_content + + mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + mass_concentration_of_mercury_dry_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + sea_water_velocity_to_direction - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + sea_water_velocity_to_direction - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + gross_primary_productivity_of_biomass_expressed_as_carbon - - atmosphere_net_upward_convective_mass_flux + + eastward_water_vapor_flux_in_air @@ -31435,94 +31724,6 @@ tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - thermal_energy_content_of_surface_snow - - - - liquid_water_content_of_surface_snow - - - - temperature_in_surface_snow - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - - - surface_snow_thickness - - - - surface_snow_density - - - - soot_content_of_surface_snow - - - - atmosphere_upward_absolute_vorticity - - - - atmosphere_upward_relative_vorticity - - - - area_type - - - - area_type - - - - iron_growth_limitation_of_diazotrophic_phytoplankton - - - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - - - tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water - - - - mass_fraction_of_liquid_precipitation_in_air - - - - mass_fraction_of_liquid_precipitation_in_air - - - - mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water - - - - air_pseudo_equivalent_potential_temperature - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water - - - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - tendency_of_atmosphere_mass_content_of_water_vapor @@ -31611,256 +31812,68 @@ atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - moles_of_cfc11_per_unit_mass_in_sea_water - - - - atmosphere_moles_of_cfc11 - - - - tendency_of_atmosphere_moles_of_cfc113 - - - - atmosphere_moles_of_cfc113 - - - - tendency_of_atmosphere_moles_of_cfc114 - - - - atmosphere_moles_of_cfc114 - - - - tendency_of_atmosphere_moles_of_cfc115 - - - - atmosphere_moles_of_cfc115 - - - - tendency_of_atmosphere_moles_of_cfc12 - - - - atmosphere_moles_of_cfc12 - - - - tendency_of_atmosphere_moles_of_halon1202 - - - - atmosphere_moles_of_halon1202 - - - - tendency_of_atmosphere_moles_of_halon1211 - - - - atmosphere_moles_of_halon1211 - - - - tendency_of_atmosphere_moles_of_halon1301 - - - - atmosphere_moles_of_halon1301 - - - - tendency_of_atmosphere_moles_of_halon2402 - - - - atmosphere_moles_of_halon2402 - - - - tendency_of_atmosphere_moles_of_hcc140a - - - - effective_radius_of_convective_cloud_rain_particles - - - - tendency_of_troposphere_moles_of_hcc140a - - - - tendency_of_middle_atmosphere_moles_of_hcc140a - - - - tendency_of_troposphere_moles_of_hcfc22 - - - - tendency_of_atmosphere_moles_of_hcfc22 - - - - atmosphere_moles_of_hcfc22 - - - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition - - - - lagrangian_tendency_of_atmosphere_sigma_coordinate - - - - lagrangian_tendency_of_atmosphere_sigma_coordinate - - - - diameter_of_ambient_aerosol_particles - - - - effective_radius_of_stratiform_cloud_ice_particles - - - - effective_radius_of_convective_cloud_ice_particles - - - - effective_radius_of_stratiform_cloud_graupel_particles - - - - effective_radius_of_stratiform_cloud_rain_particles - - - - effective_radius_of_convective_cloud_snow_particles - - - - mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - - - mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - - - mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - - - mass_content_of_cloud_ice_in_atmosphere_layer - - - - mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_content_of_cloud_liquid_water_in_atmosphere_layer - - mass_concentration_of_mercury_dry_aerosol_particles_in_air + + air_equivalent_potential_temperature - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - stratiform_cloud_area_fraction + + wave_frequency - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - atmosphere_moles_of_hcc140a + + tendency_of_troposphere_moles_of_carbon_monoxide - - floating_ice_shelf_area_fraction + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - atmosphere_moles_of_carbon_tetrachloride + + tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - mole_fraction_of_inorganic_bromine_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - water_vapor_saturation_deficit_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - tendency_of_atmosphere_moles_of_carbon_tetrachloride + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - tendency_of_atmosphere_moles_of_carbon_monoxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition @@ -32015,104 +32028,152 @@ tendency_of_atmosphere_moles_of_cfc11 - - mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water + + moles_of_cfc11_per_unit_mass_in_sea_water - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization + + atmosphere_moles_of_cfc11 - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton + + tendency_of_atmosphere_moles_of_hcc140a - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton + + effective_radius_of_convective_cloud_rain_particles - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms + + tendency_of_troposphere_moles_of_hcc140a - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + tendency_of_middle_atmosphere_moles_of_hcc140a - - mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + tendency_of_troposphere_moles_of_hcfc22 - - tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + tendency_of_atmosphere_moles_of_hcfc22 - - tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + atmosphere_moles_of_hcfc22 - - tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition - - volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - platform_name + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - platform_id + + diameter_of_ambient_aerosol_particles - - platform_pitch + + effective_radius_of_stratiform_cloud_ice_particles - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + effective_radius_of_convective_cloud_ice_particles - - tendency_of_air_temperature_due_to_stratiform_precipitation + + effective_radius_of_stratiform_cloud_graupel_particles - - water_evaporation_amount_from_canopy + + effective_radius_of_stratiform_cloud_rain_particles - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition + + effective_radius_of_convective_cloud_snow_particles - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change - - atmosphere_mass_content_of_cloud_ice + + stratiform_cloud_area_fraction - - stratiform_precipitation_amount + + sea_water_velocity_from_direction - - tendency_of_atmosphere_moles_of_nitrous_oxide + + thickness_of_stratiform_snowfall_amount - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - atmosphere_mass_content_of_convective_cloud_condensed_water + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + + + lwe_thickness_of_stratiform_snowfall_amount + + + + equivalent_thickness_at_stp_of_atmosphere_ozone_content + + + + atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + + + atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + + + atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + + + atmosphere_net_upward_convective_mass_flux + + + + mass_fraction_of_mercury_dry_aerosol_particles_in_air + + + + atmosphere_moles_of_hcc140a + + + + floating_ice_shelf_area_fraction + + + + atmosphere_moles_of_carbon_tetrachloride @@ -32127,12 +32188,144 @@ mole_fraction_of_noy_expressed_as_nitrogen_in_air - - tendency_of_atmosphere_moles_of_methane + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton - - rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + mole_fraction_of_inorganic_bromine_in_air + + + + water_vapor_saturation_deficit_in_air + + + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + + + tendency_of_atmosphere_moles_of_carbon_tetrachloride + + + + tendency_of_atmosphere_moles_of_carbon_monoxide + + + + tendency_of_atmosphere_moles_of_cfc113 + + + + atmosphere_moles_of_cfc113 + + + + tendency_of_atmosphere_moles_of_cfc114 + + + + atmosphere_moles_of_cfc114 + + + + tendency_of_atmosphere_moles_of_cfc115 + + + + atmosphere_moles_of_cfc115 + + + + tendency_of_atmosphere_moles_of_cfc12 + + + + atmosphere_moles_of_cfc12 + + + + tendency_of_atmosphere_moles_of_halon1202 + + + + atmosphere_moles_of_halon1202 + + + + tendency_of_atmosphere_moles_of_halon1211 + + + + atmosphere_moles_of_halon1211 + + + + tendency_of_atmosphere_moles_of_halon1301 + + + + atmosphere_moles_of_halon1301 + + + + tendency_of_atmosphere_moles_of_halon2402 + + + + atmosphere_moles_of_halon2402 + + + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition + + + + mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + + + mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + + + tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + + + tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + + + tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction + + + + volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles + + + + atmosphere_mass_content_of_convective_cloud_condensed_water @@ -32207,6 +32400,78 @@ lwe_thickness_of_stratiform_precipitation_amount + + tendency_of_atmosphere_moles_of_methane + + + + rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + + + magnitude_of_sea_ice_displacement + + + + surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + + + surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + + + surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + + + surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + + + surface_downwelling_longwave_flux_in_air + + + + integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + + + integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + + + downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + + + downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + + + downwelling_radiative_flux_per_unit_wavelength_in_air + + + + downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + + + downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + + + downwelling_radiance_per_unit_wavelength_in_air + + + + downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky @@ -32263,26 +32528,6 @@ surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - surface_water_evaporation_flux - - - - water_evapotranspiration_flux - - - - water_volume_transport_into_sea_water_from_rivers - - - - stratiform_graupel_flux - - - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - wood_debris_mass_content_of_carbon @@ -32311,18 +32556,6 @@ volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - platform_yaw - - - - platform_roll - - - - water_vapor_partial_pressure_in_air - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles @@ -32339,68 +32572,68 @@ integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - magnitude_of_sea_ice_displacement + + platform_yaw - - surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + platform_roll - - surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + water_vapor_partial_pressure_in_air - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + platform_name - - surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + platform_id - - surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + platform_pitch - - surface_downwelling_longwave_flux_in_air + + tendency_of_specific_humidity_due_to_stratiform_precipitation - - integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + tendency_of_air_temperature_due_to_stratiform_precipitation - - integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + water_evaporation_amount_from_canopy - - downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition - - downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling - - downwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + atmosphere_mass_content_of_cloud_ice - - downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + stratiform_precipitation_amount - - downwelling_radiance_per_unit_wavelength_in_air + + tendency_of_atmosphere_moles_of_nitrous_oxide - - downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition @@ -32507,6 +32740,26 @@ lwe_stratiform_precipitation_rate + + surface_water_evaporation_flux + + + + water_evapotranspiration_flux + + + + water_volume_transport_into_sea_water_from_rivers + + + + stratiform_graupel_flux + + + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection @@ -32535,6 +32788,94 @@ tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + integral_wrt_time_of_surface_net_downward_shortwave_flux + + + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + + + ocean_heat_y_transport_due_to_parameterized_eddy_advection + + + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + + + ocean_heat_x_transport_due_to_parameterized_eddy_advection + + + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + + + integral_wrt_time_of_toa_outgoing_longwave_flux + + + + integral_wrt_time_of_toa_net_downward_shortwave_flux + + + + integral_wrt_time_of_surface_net_downward_longwave_flux + + + + integral_wrt_time_of_surface_downward_sensible_heat_flux + + + + integral_wrt_time_of_surface_downward_latent_heat_flux + + + + integral_wrt_time_of_air_temperature_excess + + + + integral_wrt_time_of_air_temperature_deficit + + + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles @@ -32711,392 +33052,392 @@ surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + air_pressure_at_mean_sea_level - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + sea_floor_depth_below_mean_sea_level - - tendency_of_sea_surface_height_above_mean_sea_level + + ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + sea_surface_wind_wave_mean_period - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + sea_surface_wave_mean_period - - mass_concentration_of_pm10_ambient_aerosol_particles_in_air + + sea_surface_swell_wave_mean_period - - atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles + + sea_surface_wind_wave_to_direction - - surface_geostrophic_eastward_sea_water_velocity + + sea_surface_swell_wave_to_direction - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + mass_content_of_water_in_soil_layer - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + mass_content_of_water_in_soil - - mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air + + sea_surface_wind_wave_significant_height - - atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles + + sea_surface_swell_wave_significant_height - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + atmosphere_mass_content_of_water_in_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition - - mass_concentration_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition - - atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles + + mass_fraction_of_nitrate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + mass_concentration_of_sulfate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + mass_fraction_of_water_in_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + mass_concentration_of_dust_dry_aerosol_particles_in_air - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - air_pressure_at_mean_sea_level + + sea_surface_wave_significant_height - - sea_floor_depth_below_mean_sea_level + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition - - sea_surface_wind_wave_mean_period + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition - - sea_surface_wave_mean_period + + number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air - - sea_surface_swell_wave_mean_period + + number_concentration_of_ambient_aerosol_particles_in_air - - sea_surface_wind_wave_to_direction + + mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_swell_wave_to_direction + + mass_fraction_of_dust_dry_aerosol_particles_in_air - - mass_content_of_water_in_soil_layer + + mass_concentration_of_water_in_ambient_aerosol_particles_in_air - - mass_content_of_water_in_soil + + mass_concentration_of_nitrate_dry_aerosol_particles_in_air - - sea_surface_wind_wave_significant_height + + mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air - - sea_surface_swell_wave_significant_height + + mass_concentration_of_ammonium_dry_aerosol_particles_in_air - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_mass_content_of_dust_dry_aerosol_particles - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + surface_upward_mole_flux_of_carbon_dioxide - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + surface_downward_mole_flux_of_carbon_dioxide - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + atmosphere_mass_content_of_cloud_condensed_water - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + northward_water_vapor_flux_in_air - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + lwe_stratiform_snowfall_rate - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + stratiform_snowfall_amount - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + stratiform_rainfall_rate - - integral_wrt_time_of_toa_outgoing_longwave_flux + + stratiform_rainfall_flux - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + stratiform_rainfall_amount - - integral_wrt_time_of_surface_net_downward_longwave_flux + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - integral_wrt_time_of_surface_downward_latent_heat_flux + + tendency_of_sea_surface_height_above_mean_sea_level - - integral_wrt_time_of_air_temperature_excess + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - integral_wrt_time_of_air_temperature_deficit + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + mass_concentration_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + surface_geostrophic_eastward_sea_water_velocity - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air - - mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles - - atmosphere_mass_content_of_water_in_ambient_aerosol_particles + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - mass_fraction_of_nitrate_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - mass_concentration_of_sulfate_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - mass_fraction_of_water_in_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion + + mass_concentration_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution + + atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_dust_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - stratiform_rainfall_rate + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - stratiform_rainfall_flux + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - stratiform_rainfall_amount + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles @@ -33139,22 +33480,6 @@ upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves - - mass_fraction_of_ozone_in_air - - - - mass_fraction_of_convective_cloud_condensed_water_in_air - - - - sea_surface_swell_wave_period - - - - surface_drag_coefficient_in_air - - specific_gravitational_potential_energy @@ -33175,6 +33500,14 @@ isotropic_longwave_radiance_in_air + + mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + + + atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + stratiform_snowfall_flux @@ -33183,108 +33516,120 @@ thickness_of_stratiform_rainfall_amount - - sea_surface_wave_significant_height + + sea_surface_wind_wave_period - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_middle_atmosphere_moles_of_methyl_chloride - - number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air + + tendency_of_middle_atmosphere_moles_of_methane - - number_concentration_of_ambient_aerosol_particles_in_air + + sea_water_y_velocity - - mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + sea_water_x_velocity - - mass_fraction_of_dust_dry_aerosol_particles_in_air + + mole_fraction_of_hypochlorous_acid_in_air - - mass_concentration_of_water_in_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_molecular_hydrogen - - mass_concentration_of_nitrate_dry_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_methyl_chloride - - mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_content_of_water_vapor_in_atmosphere_layer - - mass_concentration_of_ammonium_dry_aerosol_particles_in_air + + mass_content_of_water_in_atmosphere_layer - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - atmosphere_mass_content_of_dust_dry_aerosol_particles + + tendency_of_troposphere_moles_of_methyl_bromide - - atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence + + radiation_wavelength - - surface_upward_mole_flux_of_carbon_dioxide + + tendency_of_troposphere_moles_of_methane - - surface_downward_mole_flux_of_carbon_dioxide + + tendency_of_atmosphere_mass_content_of_water_due_to_advection - - atmosphere_mass_content_of_cloud_condensed_water + + mole_fraction_of_chlorine_monoxide_in_air - - northward_water_vapor_flux_in_air + + mole_fraction_of_chlorine_dioxide_in_air - - lwe_stratiform_snowfall_rate + + mass_fraction_of_ozone_in_air - - stratiform_snowfall_amount + + mass_fraction_of_convective_cloud_condensed_water_in_air - - sea_surface_wind_wave_period + + sea_surface_swell_wave_period - - omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water + + surface_drag_coefficient_in_air + + + + mass_content_of_cloud_condensed_water_in_atmosphere_layer + + + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water @@ -33347,98 +33692,6 @@ tendency_of_atmosphere_moles_of_methyl_chloride - - tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - - - tendency_of_middle_atmosphere_moles_of_methyl_chloride - - - - tendency_of_middle_atmosphere_moles_of_methane - - - - sea_water_y_velocity - - - - sea_water_x_velocity - - - - mole_fraction_of_hypochlorous_acid_in_air - - - - tendency_of_troposphere_moles_of_molecular_hydrogen - - - - tendency_of_troposphere_moles_of_methyl_chloride - - - - mass_content_of_water_vapor_in_atmosphere_layer - - - - mass_content_of_water_in_atmosphere_layer - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - - - tendency_of_troposphere_moles_of_methyl_bromide - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - - - radiation_wavelength - - - - tendency_of_troposphere_moles_of_methane - - - - tendency_of_atmosphere_mass_content_of_water_due_to_advection - - - - mole_fraction_of_chlorine_monoxide_in_air - - - - mole_fraction_of_chlorine_dioxide_in_air - - - - mass_content_of_cloud_condensed_water_in_atmosphere_layer - - - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water - - - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water - - surface_drag_coefficient_for_momentum_in_air diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 95dcce8171f..08bacbe1e07 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -13,7 +13,9 @@ By default, Iris will use the source XML file: etc/cf-standard-name-table.xml as obtained from: - http://cf-pcmdi.llnl.gov/documents/cf-standard-names + http://cfconventions.org/standard-names.html + E.G. http://cfconventions.org/Data/cf-standard-names/78/src/cf-standard-name-table.xml + - N.B. no fixed 'latest' url is provided. """ From 34ee90ae1dd24a06d612c527ad4cc7dbd5855327 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 10 Jan 2022 12:53:09 +0000 Subject: [PATCH 04/69] nep29: drop py37 support (#4481) --- .cirrus.yml | 2 - .github/workflows/refresh-lockfiles.yml | 2 +- docs/src/whatsnew/latest.rst | 4 + noxfile.py | 2 +- pyproject.toml | 2 +- requirements/ci/nox.lock/py37-linux-64.lock | 228 -------------------- requirements/ci/py37.yml | 47 ---- setup.cfg | 2 +- 8 files changed, 8 insertions(+), 281 deletions(-) delete mode 100644 requirements/ci/nox.lock/py37-linux-64.lock delete mode 100644 requirements/ci/py37.yml diff --git a/.cirrus.yml b/.cirrus.yml index b3992de64a4..bdedbec43d2 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -141,8 +141,6 @@ task: only_if: ${SKIP_TEST_TASK} == "" << : *CREDITS_TEMPLATE matrix: - env: - PY_VER: 3.7 env: PY_VER: 3.8 name: "${CIRRUS_OS}: py${PY_VER} tests" diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 643825b3668..3106d94a67d 100755 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -71,7 +71,7 @@ jobs: strategy: matrix: - python: ['37', '38'] + python: ['38'] steps: - uses: actions/checkout@v2 diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 89711e36d1a..53d215a352d 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -18,6 +18,7 @@ This document explains the changes made to Iris for this release * We've added experimental support for `UGRID`_ meshes which can now be loaded and attached to a cube. + * We've also dropped support for ``Python 3.7``. And finally, get in touch with us on `GitHub`_ if you have any issues or feature requests for improving Iris. Enjoy! @@ -182,6 +183,9 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) +#. `@bjlittle`_ dropped support for ``Python 3.7``, as per the `NEP-29`_ + backwards compatibility and deprecation policy schedule. (:pull:`4481`) + 📚 Documentation ================ diff --git a/noxfile.py b/noxfile.py index 497330de377..9d8706a04b8 100755 --- a/noxfile.py +++ b/noxfile.py @@ -16,7 +16,7 @@ nox.options.reuse_existing_virtualenvs = True #: Python versions we can run sessions under -_PY_VERSIONS_ALL = ["3.7", "3.8"] +_PY_VERSIONS_ALL = ["3.8"] _PY_VERSION_LATEST = _PY_VERSIONS_ALL[-1] #: One specific python version for docs builds diff --git a/pyproject.toml b/pyproject.toml index 8d01db2af79..26e6ae727a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ build-backend = "setuptools.build_meta" [tool.black] line-length = 79 -target-version = ['py37', 'py38'] +target-version = ['py38'] include = '\.pyi?$' extend-exclude = ''' ( diff --git a/requirements/ci/nox.lock/py37-linux-64.lock b/requirements/ci/nox.lock/py37-linux-64.lock deleted file mode 100644 index 06bcca68f8b..00000000000 --- a/requirements/ci/nox.lock/py37-linux-64.lock +++ /dev/null @@ -1,228 +0,0 @@ -# Generated by conda-lock. -# platform: linux-64 -# input_hash: 2ded1a5e8a7c81e393e358171ff923c72d099e77a007d70daa2a15beb3a59545 -@EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.10.8-ha878542_0.tar.bz2#575611b8a84f45960e87722eeb51fa26 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_11.tar.bz2#2dcb18a9a0fa31f4f29e5a9b3eade394 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_11.tar.bz2#0bf83958e788f1e75ba26154cb702afe -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.27-ha770c72_3.tar.bz2#49210aaa9080888f9f9b460c70202bd3 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_11.tar.bz2#4ea2f9f83b617a7682e8aa05dcb37c6a -https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_11.tar.bz2#1d16527c76842bf9c41e9399d39d8097 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_11.tar.bz2#e3495f4f93cfd6b68021cbe2b5844cd5 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.2-h9c3ff4c_0.tar.bz2#0fb039650fa638f258fdc9e9ef125f52 -https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.1-h9c3ff4c_1.tar.bz2#17a5f413039ce1e105fab5df9c668eb5 -https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d -https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-68.2-h9c3ff4c_0.tar.bz2#6618c9b191638993f2a818c6529e1b49 -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 -https://conda.anaconda.org/conda-forge/linux-64/lerc-3.0-h9c3ff4c_0.tar.bz2#7fcefde484980d23f0ec24c11e314d2e -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h7f98852_6.tar.bz2#b0f44f63f7d771d7670747a1dd5d5ac1 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.8-h7f98852_0.tar.bz2#91d22aefa665265e8e31988b15145c8a -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 -https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.18-pthreads_h8fe5266_0.tar.bz2#41532e4448c0cce086d6570f95e4e12e -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee -https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.1-h7f98852_0.tar.bz2#90607c4c0247f04ec98b48997de71c1a -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.11-h36c2ea0_1013.tar.bz2#dcddf696ff5dfcab567100d691678e18 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 -https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.3-h846660c_100.tar.bz2#1bb747e2de717cb9a6501d72539d6556 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e -https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1l-h7f98852_0.tar.bz2#de7b38a1542dbe6f41653a8ae71adc53 -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 -https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 -https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 -https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 -https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 -https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-12_linux64_openblas.tar.bz2#4f93ba28c628a2c27cf39c055e6b219c -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h7f98852_6.tar.bz2#c7c03a2592cac92246a13a0732bd1573 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h7f98852_6.tar.bz2#28bfe0a70154e6881da7bae97517c948 -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-hc3e0081_3.tar.bz2#a47110f41fcbf88fcdf8549d7f69a6d8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2#cf7190238072a41e9579e4476a6a60b8 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.1-ha95c52a_0.tar.bz2#4eec219a4bd69c11579601804cec5baf -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h7f98852_6.tar.bz2#9e94bf16f14c78a36561d5019f490d22 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-12_linux64_openblas.tar.bz2#2e5082d4a9a18c21100e6ce5b6bcb4ec -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.70.2-h174f98d_1.tar.bz2#d03a54631298fd1ab732ff65f6ed3a07 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-12_linux64_openblas.tar.bz2#9f401a6807a97e0c859d7522ae3d51ec -https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_1.tar.bz2#d0a7846b7b3b8fb0d8b36904a53b8155 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-ha56f1ee_2.tar.bz2#6ab4eaa11ff01801cffca0a27489dc04 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-h6f004c6_2.tar.bz2#34fda41ca84e67232888c9a885903055 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_1.tar.bz2#175a746a43d42c053b91aa765fbc197d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.27-hfa10184_3.tar.bz2#7cd299934880b05703ee86a62325982f -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.37.0-h9cd32fc_0.tar.bz2#eb66fc098824d25518a79e83d12a81d6 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.11-h27826a3_1.tar.bz2#84e76fb280e735fec1efd2d21fd9cb27 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h7f98852_6.tar.bz2#612385c4a83edb0619fe911d9da317f4 -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_2.tar.bz2#6221115a24700aa8598ae5aa1574902d -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e29650992ae593bc05fc93722483e5c3 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f -https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.1-h3452ae3_0.tar.bz2#6d4bf6265d998b6c975c26a6a24062a2 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/nss-3.73-hb5efdd6_0.tar.bz2#a5b91a14292ac34bac1f0506a3772fd5 -https://conda.anaconda.org/conda-forge/linux-64/python-3.7.12-hb7a2778_100_cpython.tar.bz2#2d94b3e6a9fdaf83f6955d008c8011a7 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.9-pyhd8ed1ab_0.tar.bz2#a57a3f6f2b0a7400e340f850c405df19 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f -https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_0.tar.bz2#d05900c9b0ef4c3d1cef2e8a5c49350e -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb -https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.11.1-pyhd8ed1ab_0.tar.bz2#a510ec93fdb50775091d2afba98a8acb -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_2.tar.bz2#3cf866063f2803944ddaee8b1d6da531 -https://conda.anaconda.org/conda-forge/noarch/idna-3.1-pyhd3deb0d_0.tar.bz2#9c9aea4b8391264477df484f798562d0 -https://conda.anaconda.org/conda-forge/noarch/imagesize-1.3.0-pyhd8ed1ab_0.tar.bz2#be807e7606fff9436e5e700f6bffb7c6 -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.80.0-h2574ce0_0.tar.bz2#5d0784b790350f7939bb5d3f2c32e700 -https://conda.anaconda.org/conda-forge/linux-64/libpq-13.5-hd57d9b9_1.tar.bz2#a0f425d61c7df890d6381ea352c3f1d7 -https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d -https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.6-pyhd8ed1ab_0.tar.bz2#3087df8c636c5a00e694605c39ce4982 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.7-2_cp37m.tar.bz2#afff88bf9a7048da740c70aeb8cdbb82 -https://conda.anaconda.org/conda-forge/noarch/pytz-2021.3-pyhd8ed1ab_0.tar.bz2#7e4f811bff46a5a6a7e0094921389395 -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 -https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.2-pyhd8ed1ab_0.tar.bz2#f348d1590550371edfac5ed3c1d44f7e -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.0.1-pyha770c72_0.tar.bz2#1fc03816925d3cb7fdab9ab234e7fea7 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.6.0-pyhd8ed1ab_0.tar.bz2#855e2c4622f5eb50a4f6f7167b9ba17a -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py37h89c1867_1003.tar.bz2#490366305378c8690b65c4bce9b9f6a4 -https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f -https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.10.8-py37h89c1867_1.tar.bz2#48e8442b6097c7d4a0e3494c74ff9eeb -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.0-py37h036bc23_0.tar.bz2#05ab26c7685bcb7dd8bc8752c121f823 -https://conda.anaconda.org/conda-forge/linux-64/curl-7.80.0-h2574ce0_0.tar.bz2#4d8fd67e5ab7e00fde8ad085464f43b7 -https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.26-py37hcd2ae1e_0.tar.bz2#ab81ddd8474c4cee87fe2f9ef163f44f -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py37h89c1867_1.tar.bz2#e0a3be74a594032b73f22762ba9941cc -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.1-mpi_mpich_h9c45103_3.tar.bz2#4f1a733e563d27b98010b62888e149c9 -https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.10.0-py37h89c1867_0.tar.bz2#5187ab9fedd67074b301ba81ae01fd45 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py37h2527ec5_1.tar.bz2#441ac4d93d0d57d21ea9dcac48cb5d0d -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6ad9fb6_0.tar.bz2#45142dc44fcd04934f9ad68ce205e54d -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py37h5e8e339_1.tar.bz2#6c7c14c95d4c435b66261639b64c7c51 -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py37h1e5cb63_0.tar.bz2#3d5ca9f081a7756df4f027776ff23b73 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.5-py37hf2998dd_0.tar.bz2#ae1049dd3d8d15fc02af2f417cff5494 -https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 -https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 -https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.2-py37h718be6c_0.tar.bz2#ecac4e308b87ff93d44ea5e56ab39084 -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.0-h277dcde_0.tar.bz2#7ba8c7a9bf1c2fedf4a6d6dc92839baf -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py37hcd2ae1e_8.tar.bz2#ae12b17bbd5733cb8884b42dcc5c59f0 -https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py37h89c1867_4.tar.bz2#44df88d27e2891f90e3f06dcfcca0927 -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py37h5e8e339_1.tar.bz2#c89489cddb9e53155e241e9aacd35e4b -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py37h5e8e339_3.tar.bz2#7f167ecf4d4771ee33589e09479238e7 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.2.0-py37h89c1867_0.tar.bz2#2ad2bbd333df969fb4ecadbabec85603 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py37h5e8e339_2.tar.bz2#ec86ae00c96dea5f2d810957a8fabc26 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py37h5e8e339_0.tar.bz2#9f4ac5fb219d7c63c3c3cd9c630b81a6 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py37h5e8e339_1003.tar.bz2#4ad2e74470a3c08b0f6d59699f0d9a32 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.1.1-py37hb1e94ed_1.tar.bz2#1b5b81088bc7d7e0bef7de4ef4bd1221 -https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py37hf1a17b8_0.tar.bz2#7ad2c98aaab85d80017b3a6f79a2aa5d -https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.12.0-pyhd8ed1ab_0.tar.bz2#e572bf40b1e8783fed2526ecb5f5209e -https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py37h2527ec5_2.tar.bz2#9aba6bcb02d12dbd2fead23b85720712 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.28.5-py37h5e8e339_0.tar.bz2#3761f28aaafe435080d26b00fbcd7af8 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.1.1-h83ec7ef_0.tar.bz2#ca8faaee04a83e3c4d6f708a35ac2ec3 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-4.10.0-hd8ed1ab_0.tar.bz2#1de4b4503b2803c1b0fcba6bb91ab274 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py37hb1e94ed_1006.tar.bz2#e06cf91c2624284413641be2cb8c3198 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.3.5-py37he8f5f7f_0.tar.bz2#6ebf1968b199a141a5cce6adaedb3651 -https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.1-pyhd8ed1ab_0.tar.bz2#6f857f10fe2960dce20d59d71a290d51 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.2.1-py37hb589d83_5.tar.bz2#ea78cbba7d43ad17ec043a9ebdee3bf5 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py37hac37412_8.tar.bz2#148f2e971a67831ed0691f63cd826468 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.1.1-py37h6f94858_1004.tar.bz2#42b37830a63405589fef3d13db505e7d -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.2.0-py37hb1e94ed_1.tar.bz2#3a94b25c520754b56cdfa7d865806524 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.3-py37hf2a6cf1_0.tar.bz2#129c613e1d0f09d9fd0473a0da6161a9 -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py37h9b0f7a3_4.tar.bz2#568474687cd6be5f834cb682637ac0de -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py37h89c1867_1.tar.bz2#cbe5a8c8ae88d1e73b4297a73d08408a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py37hb1e94ed_2.tar.bz2#ba9daa43279450692efc63037867ed93 -https://conda.anaconda.org/conda-forge/noarch/identify-2.3.7-pyhd8ed1ab_0.tar.bz2#ae1a5e834fbca62ee88ab55fb276be63 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py37h1058ff1_0.tar.bz2#b431c18c1cf130f03d83498f2ef7047b -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h1364a43_6.tar.bz2#9caa0cf923af3d037897c6d7f8ea57c0 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.8-nompi_py37hf784469_101.tar.bz2#5b05dc55e51be0696878e9a575c12f77 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.10-h54213e6_2.tar.bz2#b7ed7c76c9360db1f91afba2e220007b -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-21.0.0-pyhd8ed1ab_0.tar.bz2#8c49efecb7dca466e18b06015e8c88ce -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py37he336c9b_8.tar.bz2#2fe25d82cb4e59191df561c40870ca6b -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py37he336c9b_8.tar.bz2#0a67d477c0524897883ca0f86d6fb15c -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.1-py37h9a08e6e_5.tar.bz2#e44dc116f747b0a7bceaf1533acc6b48 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h4975321_100.tar.bz2#56f5c650937b1667ad0a557a0dff3bc4 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-hc3c00ef_0.tar.bz2#43694e152ee85559ddf64b1acb8801dd -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.0-pyhd8ed1ab_0.tar.bz2#9113b4e4fa2fa4a7f129c71a6f319475 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.16.0-py37h89c1867_0.tar.bz2#43b270fe44130353e540037ad27da097 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py37h89c1867_8.tar.bz2#8038f9765a907fcf6fdfa6a9db71e371 -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.7-pyhd8ed1ab_0.tar.bz2#be75bab4820a56f77ba1a3fc9139c36a -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py37h7352969_101.tar.bz2#64fd02e7a0cefe0b5c604fea03774c73 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h85b4f2f_1.tar.bz2#bc6418fd87ea67cf14417337ced3daa2 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.1-py37h89c1867_0.tar.bz2#b14435faa62d35cea49a9183d595f145 -https://conda.anaconda.org/conda-forge/noarch/requests-2.26.0-pyhd8ed1ab_1.tar.bz2#358581cc782802270d77c454c73a811a -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.3.2-pyh6c4a22f_0.tar.bz2#e8ffaea0961c0d7a6767f2394042043d -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.4.0-pyhd8ed1ab_0.tar.bz2#80fd2cc25ad45911b4e42d5b91593e2f -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.10.1-pyhd8ed1ab_0.tar.bz2#4918585fe5e5341740f7e63c61743efb -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-1.0.0-pyhd8ed1ab_0.tar.bz2#9f633f2f2869184e31acfeae95b24345 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_1.tar.bz2#63d2f874f990fdcab47c822b608d6ade diff --git a/requirements/ci/py37.yml b/requirements/ci/py37.yml deleted file mode 100644 index 094ae847027..00000000000 --- a/requirements/ci/py37.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: iris-dev - -channels: - - conda-forge - -dependencies: - - python =3.7 - -# Setup dependencies. - - setuptools >=40.8.0 - -# Core dependencies. - - cartopy >=0.20 - - cf-units >=3 - - cftime >=1.5 - - dask-core >=2 - - matplotlib - - netcdf4 - - numpy >=1.19 - - python-xxhash - - scipy - -# Optional dependencies. - - esmpy >=7.0 - - graphviz - - iris-sample-data >=2.4.0 - - mo_pack - - nc-time-axis >=1.3 - - pandas - - pip - - python-stratify - -# Test dependencies. - - filelock - - imagehash >=4.0 - - nose - - pillow <7 - - pre-commit - - requests - -# Documentation dependencies. - - sphinx - - sphinxcontrib-napoleon - - sphinx-copybutton - - sphinx-gallery - - sphinx-panels - - sphinx_rtd_theme diff --git a/setup.cfg b/setup.cfg index 501614e9d04..1d3fb8b7c90 100644 --- a/setup.cfg +++ b/setup.cfg @@ -59,7 +59,7 @@ packages = find: package_dir = =lib python_requires = - >=3.7 + >=3.8 zip_safe = False [options.packages.find] From 55e6a21c7b3ca24eb6f670802dca440261e856ac Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 11 Jan 2022 10:46:45 +0000 Subject: [PATCH 05/69] address np printoptions pre/post v1.22 (#4486) --- lib/iris/util.py | 38 +++++++++++--------------------------- 1 file changed, 11 insertions(+), 27 deletions(-) diff --git a/lib/iris/util.py b/lib/iris/util.py index 9ab413a493b..a9aaf9f9305 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -10,7 +10,6 @@ from abc import ABCMeta, abstractmethod from collections.abc import Hashable, Iterable -from contextlib import contextmanager import copy import functools import inspect @@ -1054,18 +1053,20 @@ def format_array(arr): """ - summary_insert = "" summary_threshold = 85 + summary_insert = "..." if arr.size > summary_threshold else "" edge_items = 3 ffunc = str - formatArray = np.core.arrayprint._formatArray max_line_len = 50 - legacy = "1.13" - if arr.size > summary_threshold: - summary_insert = "..." - options = np.get_printoptions() - options["legacy"] = legacy - with _printopts_context(**options): + + # Format the array with version 1.13 legacy behaviour + with np.printoptions(legacy="1.13"): + # Use this (private) routine for more control. + formatArray = np.core.arrayprint._formatArray + # N.B. the 'legacy' arg had different forms in different numpy versions + # -- fetch the required form from the internal options dict + format_options_legacy = np.core.arrayprint._format_options["legacy"] + result = formatArray( arr, ffunc, @@ -1074,29 +1075,12 @@ def format_array(arr): separator=", ", edge_items=edge_items, summary_insert=summary_insert, - legacy=legacy, + legacy=format_options_legacy, ) return result -@contextmanager -def _printopts_context(**kwargs): - """ - Update the numpy printoptions for the life of this context manager. - - Note: this function can be removed with numpy>=1.15 thanks to - https://github.com/numpy/numpy/pull/10406 - - """ - original_opts = np.get_printoptions() - np.set_printoptions(**kwargs) - try: - yield - finally: - np.set_printoptions(**original_opts) - - def new_axis(src_cube, scalar_coord=None): """ Create a new axis as the leading dimension of the cube, promoting a scalar From 273919adc27c2a7032073432ac959ffeba338a6d Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 11 Jan 2022 19:35:46 +0000 Subject: [PATCH 06/69] docs: installing update (#4490) * docs: installing update * Update docs/src/installing.rst Co-authored-by: Ruth Comer <10599679+rcomer@users.noreply.github.com> Co-authored-by: Ruth Comer <10599679+rcomer@users.noreply.github.com> --- docs/src/installing.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/src/installing.rst b/docs/src/installing.rst index e358bb42c98..37a8942ab38 100644 --- a/docs/src/installing.rst +++ b/docs/src/installing.rst @@ -85,10 +85,12 @@ local copy of Iris:: conda env create --force --file=requirements/ci/iris.yml conda activate iris-dev -The ``--force`` option is used when creating the environment, this is optional -and will force the any existing ``iris-dev`` conda environment to be deleted -first if present. This is useful when rebuilding your environment due to a -change in requirements. +.. note:: + + The ``--force`` option, used when creating the environment, first removes + any previously existing ``iris-dev`` environment of the same name. This is + particularly useful when rebuilding your environment due to a change in + requirements. The ``requirements/ci/iris.yml`` file defines the Iris development conda environment *name* and all the relevant *top level* `conda-forge` package From f7f96189e58ae565b8fe5559ee28299ecb6431d5 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Wed, 12 Jan 2022 09:12:50 +0000 Subject: [PATCH 07/69] Clarify interpolate and regrid scheme use (#4196) --- lib/iris/cube.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 3e1e98d12d2..d75b94ea09f 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -4234,7 +4234,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): dates or times may optionally be supplied as datetime.datetime or cftime.datetime instances. * scheme: - The type of interpolation to use to interpolate from this + An instance of the type of interpolation to use to interpolate from this :class:`~iris.cube.Cube` to the given sample points. The interpolation schemes currently available in Iris are: @@ -4307,7 +4307,7 @@ def regrid(self, grid, scheme): * grid: A :class:`~iris.cube.Cube` that defines the target grid. * scheme: - The type of regridding to use to regrid this cube onto the + An instance of the type of regridding to use to regrid this cube onto the target grid. The regridding schemes in Iris currently include: * :class:`iris.analysis.Linear`\*, From 9b64c942a0b7f9c6fcf4fa8974c9f9da8f54c7bb Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Thu, 13 Jan 2022 11:34:35 +0000 Subject: [PATCH 08/69] Delete equalise_cubes.py and add whatsnew (#4496) * Delete equalise_cubes.py and add whatsnew * Add PR number to whatsnew * Move whatsnew entry from Internal to Deprecations * Formatting tweak in whatsnew --- docs/src/whatsnew/latest.rst | 6 ++++- lib/iris/experimental/equalise_cubes.py | 30 ------------------------- 2 files changed, 5 insertions(+), 31 deletions(-) delete mode 100644 lib/iris/experimental/equalise_cubes.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 53d215a352d..bd09ca56516 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -168,7 +168,11 @@ This document explains the changes made to Iris for this release 🔥 Deprecations =============== -#. N/A +#. `@wjbenfold`_ removed :mod:`iris.experimental.equalise_cubes`. In ``v3.0`` + the experimental ``equalise_attributes`` functionality was moved to the + :mod:`iris.util.equalise_attributes` function. Since then, calling the + :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised + an exception. (:issue:`3528`, :pull:`4496`) 🔗 Dependencies diff --git a/lib/iris/experimental/equalise_cubes.py b/lib/iris/experimental/equalise_cubes.py deleted file mode 100644 index 8be71750673..00000000000 --- a/lib/iris/experimental/equalise_cubes.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Experimental cube-adjusting functions to assist merge operations. - -""" - - -def equalise_attributes(cubes): - """ - Delete cube attributes that are not identical over all cubes in a group. - - .. warning:: - - This function is now **disabled**. - - The functionality has been moved to - :func:`iris.util.equalise_attributes`. - - """ - old = "iris.experimental.equalise_cubes.equalise_attributes" - new = "iris.util.equalise_attributes" - emsg = ( - f'The function "{old}" has been moved.\n' - f'Please replace "{old}()" with "{new}()".' - ) - raise Exception(emsg) From 3790b44c0c7e74abcf2ed2755e38bbca4f2b23ad Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 13 Jan 2022 13:24:45 +0000 Subject: [PATCH 09/69] Remove all Metarelate dependencies. (#4484) * Remove all Metarelate dependencies. * Added whatsnew. --- docs/src/whatsnew/latest.rst | 5 + lib/iris/fileformats/_ff_cross_references.py | 6 +- lib/iris/fileformats/um_cf_map.py | 8 - tools/gen_helpers.py | 36 - tools/gen_stash_refs.py | 126 -- tools/gen_translations.py | 216 ---- tools/translator/__init__.py | 1116 ------------------ 7 files changed, 7 insertions(+), 1506 deletions(-) delete mode 100644 tools/gen_helpers.py delete mode 100644 tools/gen_stash_refs.py delete mode 100644 tools/gen_translations.py delete mode 100644 tools/translator/__init__.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index bd09ca56516..5f1a7314dc0 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -94,6 +94,10 @@ This document explains the changes made to Iris for this release #. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). (:issue:`4479`, :pull:`4483`) +#. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations + from `Metarelate`_. From now we intend to manage phenonemon translation + in Iris itself. (:pull:`4484`) + 🐛 Bugs Fixed ============= @@ -303,3 +307,4 @@ This document explains the changes made to Iris for this release .. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ .. _sort-all: https://github.com/aio-libs/sort-all .. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba +.. _Metarelate: http://www.metarelate.net/ \ No newline at end of file diff --git a/lib/iris/fileformats/_ff_cross_references.py b/lib/iris/fileformats/_ff_cross_references.py index 0c7af263242..ca41f5257fa 100644 --- a/lib/iris/fileformats/_ff_cross_references.py +++ b/lib/iris/fileformats/_ff_cross_references.py @@ -3,11 +3,9 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -# -# DO NOT EDIT: AUTO-GENERATED """ -Auto-generated from iris/tools/gen_stash_refs.py -Relates grid code and field code to the stash code. +Table providing UM grid-code, field-code and pseudolevel-type for (some) +stash codes. Used in UM file i/o. """ diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index c2a0a5d09e2..f3d392fc5fd 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -3,14 +3,6 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -# -# DO NOT EDIT: AUTO-GENERATED -# Created on 29 November 2019 14:11 from -# http://www.metarelate.net/metOcean -# at commit 448f2ef5e676edaaa27408b9f3ddbecbf05e3289 -# -# https://github.com/metarelate/metOcean/commit/448f2ef5e676edaaa27408b9f3ddbecbf05e3289 - """ Provides UM/CF phenomenon translations. diff --git a/tools/gen_helpers.py b/tools/gen_helpers.py deleted file mode 100644 index 825c78139e5..00000000000 --- a/tools/gen_helpers.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -from datetime import datetime -import os -import os.path - -HEADER = \ - '''# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -# -# DO NOT EDIT: AUTO-GENERATED''' - - -def absolute_path(path): - return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) - - -def prep_module_file(module_path): - """ - prepare a module file, creating directory if needed and writing the - header into that file - - """ - module_path = absolute_path(module_path) - module_dir = os.path.dirname(module_path) - if not os.path.isdir(module_dir): - os.makedirs(module_dir) - with open(module_path, 'w') as module_file: - module_file.write(HEADER.format(datetime.utcnow().year)) diff --git a/tools/gen_stash_refs.py b/tools/gen_stash_refs.py deleted file mode 100644 index e614b52ab2f..00000000000 --- a/tools/gen_stash_refs.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -import json -import urllib -import urllib2 - -from iris.fileformats.pp import STASH - -import gen_helpers - - -HEADER = ''' -""" -Auto-generated from iris/tools/gen_stash_refs.py -Relates grid code and field code to the stash code. - -""" -''' - -CODE_PREAMBLE = ("from collections import namedtuple\n\n\n" - "Stash = namedtuple('Stash', " - "'grid_code field_code pseudo_level_type')\n\n\n") - - -def _value_from_xref(xref, name): - """Return the value for the key name from xref. - - Will return 0 if the key does not look like an integer. - """ - - result = xref.get(name) - try: - int(result) - except (ValueError, TypeError): - result = 0 - return result - - -def write_cross_reference_module(module_path, xrefs): - gen_helpers.prep_module_file(module_path) - with open(module_path, 'a') as module_file: - module_file.write(HEADER) - module_file.write(CODE_PREAMBLE) - module_file.write('STASH_TRANS = {\n') - for xref in xrefs: - stash = xref.get('stash') - try: - STASH.from_msi(stash.replace('"', '')) - except ValueError: - msg = ('stash code is not of a recognised' - '"m??s??i???" form: {}'.format(stash)) - print(msg) - grid = xref.get('grid') - if grid is not None: - try: - int(grid) - except ValueError: - msg = ('grid code retrieved from STASH lookup' - 'is not an integer: {}'.format(grid)) - print(msg) - else: - grid = 0 - - lbfc = _value_from_xref(xref, 'lbfcn') - pseudT = _value_from_xref(xref, 'pseudT') - - module_file.write( - ' "{}": Stash({}, {}, {}),\n'.format(stash, - grid, - lbfc, - pseudT)) - module_file.write('}\n') - - -def stash_grid_retrieve(): - """return a dictionary of stash codes and rel;ated information from - the Met Office Reference Registry - """ - baseurl = 'http://reference.metoffice.gov.uk/system/query?query=' - query = '''prefix rdf: -prefix rdfs: -prefix skos: - -SELECT ?stash ?grid ?lbfcn ?pseudT -WHERE { - ?stashcode rdf:type ; - skos:notation ?stash ; - ?gridcode . -OPTIONAL { ?gridcode skos:notation ?grid .} -OPTIONAL {?stashcode ?lbfc . - ?lbfc skos:notation ?lbfcn .} -OPTIONAL {?stashcode ?pseudT_id . - ?pseudT_id skos:notation ?pseudT . } -} -order by ?stash''' - - encquery = urllib.quote_plus(query) - out_format = '&output=json' - url = baseurl + encquery + out_format - - response = urllib2.urlopen(url) - stash = json.loads(response.read()) - - ## heads will be of the form [u'stash', u'grid', u'lbfcn', u'pseudT'] - ## as defined in the query string - heads = stash['head']['vars'] - - stashcodes = [] - - for result in stash['results']['bindings']: - res = {} - for head in heads: - if head in result: - res[head] = result[head]['value'] - stashcodes.append(res) - return stashcodes - - -if __name__ == '__main__': - xrefs = stash_grid_retrieve() - outfile = '../lib/iris/fileformats/_ff_cross_references.py' - write_cross_reference_module(outfile, xrefs) diff --git a/tools/gen_translations.py b/tools/gen_translations.py deleted file mode 100644 index 5ac0dc02bac..00000000000 --- a/tools/gen_translations.py +++ /dev/null @@ -1,216 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Processing of metarelate metOcean content to provide Iris encodings of -metOcean mapping translations. - -""" - -from datetime import datetime -import os.path -import requests -import sys - -import metarelate -from metarelate.fuseki import FusekiServer - -from translator import (FORMAT_URIS, FieldcodeCFMappings, StashCFNameMappings, - StashCFHeightConstraintMappings, - CFFieldcodeMappings, - GRIB1LocalParamCFConstrainedMappings, - GRIB1LocalParamCFMappings, GRIB2ParamCFMappings, - CFConstrainedGRIB1LocalParamMappings, - CFGRIB2ParamMappings, CFGRIB1LocalParamMappings) - -HEADER = """# Copyright {name} contributors -# -# This file is part of {name} and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -# -# DO NOT EDIT: AUTO-GENERATED -# Created on {datestamp} from -# http://www.metarelate.net/metOcean -# at commit {git_sha} -# https://github.com/metarelate/metOcean/commit/{git_sha} -{doc_string} - - -from collections import namedtuple - - -CFName = namedtuple('CFName', 'standard_name long_name units') -""" - -HEADER_GRIB = """ -DimensionCoordinate = namedtuple('DimensionCoordinate', - 'standard_name units points') - -G1LocalParam = namedtuple('G1LocalParam', 'edition t2version centre iParam') -G2Param = namedtuple('G2Param', 'edition discipline category number') -""" - -DOC_STRING_GRIB = r'''""" -Provides GRIB/CF phenomenon translations. - -"""''' - -DOC_STRING_UM = r'''""" -Provides UM/CF phenomenon translations. - -"""''' - -YEAR = datetime.utcnow().year - -def _retrieve_mappings(fuseki, source, target): - """ - Interrogate the metarelate triple store for all - phenomenon translation mappings from the source - scheme to the target scheme. - - Args: - * fuseki: - The :class:`metrelate.fuseki.FusekiServer` instance. - * source: - The source metarelate metadata type for the mapping. - * target: - The target metarelate metadata type for the mapping. - - Return: - The sequence of :class:`metarelate.Mapping` - instances. - - """ - suri = 'http://www.metarelate.net/sparql/metOcean' - msg = 'Retrieving {!r} to {!r} mappings ...' - print(msg.format(source, target)) - return fuseki.retrieve_mappings(source, target, service=suri) - - -def build_um_cf_map(fuseki, now, git_sha, base_dir): - """ - Encode the UM/CF phenomenon translation mappings - within the specified file. - - Args: - * fuseki: - The :class:`metarelate.fuseki.FusekiServer` instance. - * now: - Time stamp to write into the file - * git_sha: - The git SHA1 of the metarelate commit - * base_dir: - The root directory of the Iris source. - - """ - filename = os.path.join(base_dir, 'lib', 'iris', 'fileformats', - 'um_cf_map.py') - - # Create the base directory. - if not os.path.exists(os.path.dirname(filename)): - os.makedirs(os.path.dirname(filename)) - - # Create the file to contain UM/CF translations. - with open(filename, 'w') as fh: - fh.write(HEADER.format(year=YEAR, doc_string=DOC_STRING_UM, - datestamp=now, git_sha=git_sha, name='Iris')) - fh.write('\n') - - # Encode the relevant UM to CF translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['umf'], - FORMAT_URIS['cff']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - fccf = FieldcodeCFMappings(maps) - stcf = StashCFNameMappings(maps) - stcfhcon = StashCFHeightConstraintMappings(maps) - fh.writelines(fccf.lines(fuseki)) - fh.writelines(stcf.lines(fuseki)) - fh.writelines(stcfhcon.lines(fuseki)) - - # Encode the relevant CF to UM translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['cff'], - FORMAT_URIS['umf']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - cffc = CFFieldcodeMappings(maps) - fh.writelines(cffc.lines(fuseki)) - - -def build_grib_cf_map(fuseki, now, git_sha, base_dir): - """ - Encode the GRIB/CF phenomenon translation mappings - within the specified file. - - Args: - * fuseki: - The :class:`metarelate.fuseki.FusekiServer` instance. - * now: - Time stamp to write into the file - * git_sha: - The git SHA1 of the metarelate commit - * base_dir: - The root directory of the Iris source. - - """ - filename = os.path.join(base_dir, 'lib', 'iris', 'fileformats', - 'grib', '_grib_cf_map.py') - if not os.path.exists(os.path.dirname(filename)): - os.makedirs(os.path.dirname(filename)) - - # Create the file to contain GRIB/CF translations. - with open(filename, 'w') as fh: - fh.write(HEADER.format(year=YEAR, doc_string=DOC_STRING_GRIB, - datestamp=now, git_sha=git_sha, - name='iris-grib')) - fh.write(HEADER_GRIB) - fh.write('\n') - - # Encode the relevant GRIB to CF translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['gribm'], - FORMAT_URIS['cff']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - g1cfc = GRIB1LocalParamCFConstrainedMappings(maps) - g1c = GRIB1LocalParamCFMappings(maps) - g2c = GRIB2ParamCFMappings(maps) - fh.writelines(g1cfc.lines(fuseki)) - fh.writelines(g1c.lines(fuseki)) - fh.writelines(g2c.lines(fuseki)) - - # Encode the relevant CF to GRIB translations. - maps = _retrieve_mappings(fuseki, FORMAT_URIS['cff'], - FORMAT_URIS['gribm']) - # create the collections, then call lines on each one - # for thread safety during lines and encode - cfcg1 = CFConstrainedGRIB1LocalParamMappings(maps) - cg1 = CFGRIB1LocalParamMappings(maps) - cg2 = CFGRIB2ParamMappings(maps) - fh.writelines(cfcg1.lines(fuseki)) - fh.writelines(cg1.lines(fuseki)) - fh.writelines(cg2.lines(fuseki)) - - -def main(): - # Protect metarelate resource from 1.0 emergent bug - if not float(metarelate.__version__) >= 1.1: - raise ValueError("Please ensure that Metarelate Version is >= 1.1") - now = datetime.utcnow().strftime('%d %B %Y %H:%m') - git_sha = requests.get('http://www.metarelate.net/metOcean/latest_sha').text - gen_path = os.path.abspath(sys.modules['__main__'].__file__) - iris_path = os.path.dirname(os.path.dirname(gen_path)) - with FusekiServer() as fuseki: - build_um_cf_map(fuseki, now, git_sha, iris_path) - build_grib_cf_map(fuseki, now, git_sha, iris_path) - - if (git_sha != - requests.get('http://www.metarelate.net/metOcean/latest_sha').text): - raise ValueError('The metarelate translation store has altered during' - 'your retrieval, the results may not be stable.\n' - 'Please rerun your retrieval.') - -if __name__ == '__main__': - main() diff --git a/tools/translator/__init__.py b/tools/translator/__init__.py deleted file mode 100644 index a83fee4edd8..00000000000 --- a/tools/translator/__init__.py +++ /dev/null @@ -1,1116 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Provides the framework to support the encoding of metarelate mapping -translations. - -""" - -from abc import ABCMeta, abstractmethod -from collections import deque, namedtuple -import copy -from queue import Queue -import re -from threading import Thread -import warnings - -from metarelate.fuseki import FusekiServer, WorkerThread, MAXTHREADS -import metarelate - -# known format identifier URIs -FORMAT_URIS = {'cff': '', - 'gribm': '', - 'umf': ''} - -CFName = namedtuple('CFName', 'standard_name long_name units') -DimensionCoordinate = namedtuple('DimensionCoordinate', - 'standard_name units points') -G1LocalParam = namedtuple('G1LocalParam', 'edition t2version centre iParam') -G2Param = namedtuple('G2Param', 'edition discipline category number') - - -class MappingEncodeWorker(WorkerThread): - """Worker thread class for handling EncodableMap instances""" - def dowork(self, resource): - resource.encode(self.fuseki_process) - - -class EncodableMap: - """ - A metarelate mapping able to encode itself as a string for use in Iris, - as defined by a translator Mappings subclass - - """ - def __init__(self, mapping, sourcemsg, targetmsg, sourceid, targetid): - """ - Args: - * mapping: - A :class:`metarelate.Mapping` instance representing a translation. - * sourcemsg: - The code snippet message for the source of the translation for - formatting - * targetmsg: - The code snippet message for the target of the translation for - formatting - * sourceid: - A dictionary of required key:value pairs required by the sourcemsg - * targetid: - A dictionary of required key:value pairs required by the targetmsg - - """ - self.mapping = mapping - self.sourcemsg = sourcemsg - self.targetmsg = targetmsg - self.sourceid = sourceid - self.targetid = targetid - self.encoding = None - - def encode(self, fuseki_process): - """ - Return a string of the Python source code required to represent an - entry in a dictionary mapping source to target. - - Args: - * fuseki_process: - A :class:`metarelate.fuseki.FusekiServer` instance. - - """ - sids, tids = self.mapping.get_identifiers(fuseki_process) - self.sourceid.update(sids) - self.targetid.update(tids) - self.encoding = '{}: {}'.format(self.sourcemsg.format(**self.sourceid), - self.targetmsg.format(**self.targetid)) - - -class Mappings(metaclass=ABCMeta): - """ - Abstract base class to support the encoding of specific metarelate - mapping translations. - - """ - - def __init__(self, mappings): - """ - Filter the given sequence of mappings for those member - :class:`metarelate.Mapping` translations containing a source - :class:`metarelate.Component` with a matching - :attribute:`Mapping.source_scheme` and a target - :class:`metarelate.Component` with a matching - :attribute:`Mapping.target_scheme`. - - Also see :method:`Mapping.valid_mapping` for further matching - criterion for candidate metarelate mapping translations. - - Args: - * mappings: - Iterator of :class:`metarelate.Mapping` instances. - - """ - temp = [] - # Filter the mappings for the required type of translations. - for mapping in mappings: - source = mapping.source - target = mapping.target - sourcemsg, targetmsg = self.msg_strings() - sourceid, targetid = self.get_initial_id_nones() - if source.com_type == self.source_scheme and \ - target.com_type == self.target_scheme and \ - self.valid_mapping(mapping): - temp.append(EncodableMap(mapping, sourcemsg, targetmsg, - sourceid, targetid)) - self.mappings = temp - if len(self) == 0: - msg = '{!r} contains no mappings.' - warnings.warn(msg.format(self.__class__.__name__)) - - def _sort_lines(self, payload): - """ - Return the payload, unsorted. - - """ - return payload - - def lines(self, fuseki_process): - """ - Provides an iterator generating the encoded string representation - of each member of this metarelate mapping translation. - - Returns: - An iterator of string. - - """ - msg = '\tGenerating phenomenon translation {!r}.' - print(msg.format(self.mapping_name)) - lines = ['\n%s = {\n' % self.mapping_name] - # Retrieve encodings for the collection of mapping instances. - # Retrieval is threaded as it is heavily bound by resource resolution - # over http. - # Queue for metarelate mapping instances - mapenc_queue = Queue() - for mapping in self.mappings: - mapenc_queue.put(mapping) - # deque to contain the results of the jobs processed from the queue - mapencs = deque() - # run worker threads - for i in range(MAXTHREADS): - MappingEncodeWorker(mapenc_queue, mapencs, fuseki_process).start() - # block progress until the queue is empty - mapenc_queue.join() - # end of threaded retrieval process. - - # now sort the payload - payload = [mapenc.encoding for mapenc in mapencs] - payload.sort(key=self._key) - lines.extend(payload) - lines.append(' }\n') - return iter(lines) - - def __len__(self): - return len(self.mappings) - - def _key(self, line): - """Method to provide the sort key of the mappings order.""" - return line - - @property - @abstractmethod - def mapping_name(self): - """ - Abstract property that specifies the name of the dictionary - to contain the encoding of this metarelate mapping translation. - - """ - - @property - @abstractmethod - def source_scheme(self): - """ - Abstract property that specifies the name of the scheme for - the source :class:`metarelate.Component` defining this metarelate - mapping translation. - - """ - - @property - @abstractmethod - def target_scheme(self): - """ - Abstract property that specifies the name of the scheme for - the target :class:`metarelate.Component` defining this metarelate - mapping translation. - - """ - - @abstractmethod - def valid_mapping(self, mapping): - """ - Abstract method that determines whether the provided - :class:`metarelate.Mapping` is a translation from the required - source :class:`metarelate.Component` to the required target - :class:`metarelate.Component`. - - """ - - def get_initial_id_nones(self): - """ - Return the identifier items which may not exist, in the translation - database, and are needed for a msg_string. These must exist, even - even if not written from the database. - - Returns two dictionaries to use as the start point for - population from the database. - - """ - sourceid = {} - targetid = {} - return sourceid, targetid - - def is_cf(self, comp): - """ - Determines whether the provided component from a mapping - represents a simple CF component of the given kind. - - Args: - * component: - A :class:`metarelate.Component` or - :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - kind = FORMAT_URIS['cff'] - result = False - result = hasattr(comp, 'com_type') and \ - comp.com_type == kind and \ - hasattr(comp, 'units') and \ - len(comp) in [1, 2] - return result - - def is_cf_constrained(self, comp): - """ - Determines whether the provided component from a mapping - represents a compound CF component for a phenomenon and - one, single valued dimension coordinate. - - Args: - * component: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - ftype = FORMAT_URIS['cff'] - result = False - cffield = hasattr(comp, 'com_type') and comp.com_type == ftype and \ - hasattr(comp, 'units') and (hasattr(comp, 'standard_name') or - hasattr(comp, 'long_name')) - dimcoord = hasattr(comp, 'dim_coord') and \ - isinstance(comp.dim_coord, metarelate.ComponentProperty) and \ - comp.dim_coord.component.com_type.notation == 'DimCoord' - result = cffield and dimcoord - return result - - def is_cf_height_constrained(self, comp): - item_sn = metarelate.Item((''), - 'standard_name') - item_h = metarelate.Item((''), - 'height') - snprop = metarelate.StatementProperty(item_sn, item_h) - item_u = metarelate.Item((''), - 'units') - uprop = metarelate.StatementProperty(item_u, - metarelate.Item('"m"', 'm')) - pts_pred = metarelate.Item((''), - 'points') - result = False - if self.is_cf_constrained(comp): - props = comp.dim_coord.component.properties - if len(props) == 3: - if snprop in props and uprop in props: - preds = [prop.predicate for prop in props] - if pts_pred in preds: - result = True - return result - - def is_fieldcode(self, component): - """ - Determines whether the provided concept from a mapping - represents a simple UM concept for a field-code. - - Args: - * concept: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - result = False - result = hasattr(component, 'lbfc') and len(component) == 1 - return result - - def is_grib1_local_param(self, component): - """ - Determines whether the provided component from a mapping - represents a simple GRIB edition 1 component for a local - parameter. - - Args: - * component: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - result = len(component) == 1 and hasattr(component, 'grib1_parameter') - return result - - def is_grib2_param(self, component): - """ - Determines whether the provided component from a mapping - represents a simple GRIB edition 2 component for a parameter. - - Args: - * component: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - - result = len(component) == 1 and hasattr(component, 'grib2_parameter') - return result - - def is_stash(self, component): - """ - Determines whether the provided concept for a mapping - represents a simple UM concept for a stash-code. - - Args: - * concept: - A :class:`metarelate.Component` instance. - - Returns: - Boolean. - - """ - result = False - result = hasattr(component, 'stash') and len(component) == 1 - return result - - -def _cfn(line): - """ - Helper function to parse dictionary lines using the CFName named tuple. - Matches to the line ' CFName({standard_name}, {long_name}, {units}:*) - giving access to these named parts - - """ - match = re.match('^ CFName\((.+), (.+), (.+)\):.+,', line) - if match is None: - raise ValueError('encoding not sortable') - standard_name, long_name, units = match.groups() - if standard_name == 'None': - standard_name = None - if long_name == 'None': - long_name = None - return [standard_name, long_name, units] - - -class CFFieldcodeMappings(Mappings): - """ - Represents a container for CF phenomenon to UM field-code metarelate - mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF standard name, long name, - and units to UM field-code. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return _cfn(line) - - def msg_strings(self): - return (' CFName({standard_name!r}, {long_name!r}, ' - '{units!r})', - '{lbfc},\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_TO_LBFC' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF to UM field-code translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf(mapping.source) and self.is_fieldcode(mapping.target) - - -class FieldcodeCFMappings(Mappings): - """ - Represents a container for UM field-code to CF phenomenon metarelate - mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from UM field-code to - CF standard name, long name, and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return int(line.split(':')[0].strip()) - - def msg_strings(self): - return (' {lbfc}', - 'CFName({standard_name!r}, {long_name!r}, {units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'LBFC_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - UM field-code to CF translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_fieldcode(mapping.source) and self.is_cf(mapping.target) - - -class StashCFNameMappings(Mappings): - """ - Represents a container for UM stash-code to CF phenomenon metarelate - mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from UM stash-code to CF - standard name, long name, and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return(' {stash!r}', - 'CFName({standard_name!r}, ' - '{long_name!r}, {units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'STASH_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - UM stash-code to CF translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return (self.is_stash(mapping.source) and - (self.is_cf(mapping.target) or - self.is_cf_constrained(mapping.target))) - - -class StashCFHeightConstraintMappings(Mappings): - """ - Represents a container for UM stash-code to CF phenomenon metarelate - mapping translations where a singular height constraint is defined by - the STASH code. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from UM stash-code to CF - standard name, long name, and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return(' {stash!r}', - '{dim_coord[points]},\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'STASHCODE_IMPLIED_HEIGHTS' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['umf'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - UM stash-code to CF translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return (self.is_stash(mapping.source) and - self.is_cf_height_constrained(mapping.target)) - - -class GRIB1LocalParamCFMappings(Mappings): - """ - Represents a container for GRIB (edition 1) local parameter to - CF phenomenon metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from GRIB1 edition, table II version, - centre and indicator of parameter to CF standard name, long name and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - matchstr = ('^ G1LocalParam\(([0-9]+), ([0-9]+), ' - '([0-9]+), ([0-9]+)\):.*') - match = re.match(matchstr, line) - if match is None: - raise ValueError('encoding not sortable') - return [int(i) for i in match.groups()] - - def msg_strings(self): - return (' G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter})', - 'CFName({standard_name!r}, ' - '{long_name!r}, {units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'GRIB1_LOCAL_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - GRIB1 local parameter to CF phenomenon translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_grib1_local_param(mapping.source) and \ - self.is_cf(mapping.target) - - -class CFGRIB1LocalParamMappings(Mappings): - """ - Represents a container for CF phenomenon to GRIB (edition 1) local - parameter metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF standard name, long name - and units to GRIB1 edition, table II version, centre and indicator of - parameter. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return _cfn(line) - - def msg_strings(self): - return (' CFName({standard_name!r}, {long_name!r}, ' - '{units!r})', - 'G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter}),\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_TO_GRIB1_LOCAL' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF phenomenon to GRIB1 local parameter translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf(mapping.source) and \ - self.is_grib1_local_param(mapping.target) - - -class GRIB1LocalParamCFConstrainedMappings(Mappings): - """ - Represents a container for GRIB (edition 1) local parameter to - CF phenomenon and dimension coordinate constraint metarelate mapping - translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from GRIB1 edition, table II version, - centre and indicator of parameter to CF phenomenon standard name, long name - and units, and CF dimension coordinate standard name, units and points. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return (' G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter})', - '(CFName({standard_name!r}, ' - '{long_name!r}, {units!r}), ' - 'DimensionCoordinate({dim_coord[standard_name]!r}, ' - '{dim_coord[units]!r}, {dim_coord[points]})),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'GRIB1_LOCAL_TO_CF_CONSTRAINED' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - GRIB1 local parameter to CF phenomenon and dimension coordinate - translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_grib1_local_param(mapping.source) and \ - self.is_cf_constrained(mapping.target) - - -class CFConstrainedGRIB1LocalParamMappings(Mappings): - """ - Represents a container for CF phenomenon and dimension coordinate - constraint to GRIB (edition 1) local parameter metarelate mapping - translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF phenomenon standard name, - long name and units, and CF dimension coordinate standard name, units and - points to GRIB1 edition, table II version, centre and indicator of - parameter. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return line.split(':')[0].strip() - - def msg_strings(self): - return (' (CFName({standard_name!r}, ' - '{long_name!r}, {units!r}), ' - 'DimensionCoordinate({dim_coord[standard_name]!r}, ' - '{dim_coord[units]!r}, {dim_coord[points]}))', - 'G1LocalParam({editionNumber}, {table2version}, ' - '{centre}, {indicatorOfParameter}),\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_CONSTRAINED_TO_GRIB1_LOCAL' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF phenomenon and dimension coordinate to GRIB1 local parameter - translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf_constrained(mapping.source) and \ - self.is_grib1_local_param(mapping.target) - - -class GRIB2ParamCFMappings(Mappings): - """ - Represents a container for GRIB (edition 2) parameter to CF phenomenon - metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from GRIB2 edition, discipline, - parameter category and indicator of parameter to CF standard name, - long name and units. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - matchstr = ('^ G2Param\(([0-9]+), ([0-9]+), ([0-9]+), ' - '([0-9]+)\):.*') - match = re.match(matchstr, line) - if match is None: - raise ValueError('encoding not sortable') - return [int(i) for i in match.groups()] - - def msg_strings(self): - return (' G2Param({editionNumber}, {discipline}, ' - '{parameterCategory}, {parameterNumber})', - 'CFName({standard_name!r}, {long_name!r}, ' - '{units!r}),\n') - - def get_initial_id_nones(self): - sourceid = {} - targetid = {'standard_name': None, 'long_name': None} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'GRIB2_TO_CF' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - GRIB2 parameter to CF phenomenon translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_grib2_param(mapping.source) and \ - self.is_cf(mapping.target) - - -class CFGRIB2ParamMappings(Mappings): - """ - Represents a container for CF phenomenon to GRIB (edition 2) parameter - metarelate mapping translations. - - Encoding support is provided to generate the Python dictionary source - code representation of these mappings from CF standard name, long name - and units to GRIB2 edition, discipline, parameter category and indicator - of parameter. - - """ - def _key(self, line): - """Provides the sort key of the mappings order.""" - return _cfn(line) - - def msg_strings(self): - return (' CFName({standard_name!r}, {long_name!r}, ' - '{units!r})', - 'G2Param({editionNumber}, {discipline}, ' - '{parameterCategory}, {parameterNumber}),\n') - - def get_initial_id_nones(self): - sourceid = {'standard_name': None, 'long_name': None} - targetid = {} - return sourceid, targetid - - @property - def mapping_name(self): - """ - Property that specifies the name of the dictionary to contain the - encoding of this metarelate mapping translation. - - """ - return 'CF_TO_GRIB2' - - @property - def source_scheme(self): - """ - Property that specifies the name of the scheme for the source - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['cff'] - - @property - def target_scheme(self): - """ - Property that specifies the name of the scheme for the target - :class:`metarelate.Component` defining this metarelate mapping - translation. - - """ - return FORMAT_URIS['gribm'] - - def valid_mapping(self, mapping): - """ - Determine whether the provided :class:`metarelate.Mapping` represents a - CF phenomenon to GRIB2 parameter translation. - - Args: - * mapping: - A :class:`metarelate.Mapping` instance. - - Returns: - Boolean. - - """ - return self.is_cf(mapping.source) and \ - self.is_grib2_param(mapping.target) From 8d6e65971ae78589f0939a949d3e0e2942e56b68 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Wed, 19 Jan 2022 09:44:22 +0000 Subject: [PATCH 10/69] turn off doc type hints (#4510) --- docs/src/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/conf.py b/docs/src/conf.py index 3362a9a0232..5a436f86cbe 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -209,6 +209,7 @@ def _dotv(version): # api generation configuration autodoc_member_order = "groupwise" autodoc_default_flags = ["show-inheritance"] +autodoc_typehints = "none" autosummary_generate = True autosummary_imported_members = True autopackage_name = ["iris"] From 30f751a954b889c0f870d8948b95dd7c9d1e6b41 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Wed, 19 Jan 2022 12:41:59 +0000 Subject: [PATCH 11/69] docs: fix whatnew entries (#4511) --- docs/src/whatsnew/latest.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 5f1a7314dc0..fec37a63ab9 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -146,12 +146,12 @@ This document explains the changes made to Iris for this release #. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube so that a cube with lazy data awaiting a unit conversion can be pickled. - (:issue:`4354 `, :pull:`4377`) + (:issue:`4354`, :pull:`4377`) #. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate was mistakenly interpreted as a latitude, usually resulting in two 'latitude's instead of one latitude and one longitude. - (:issue:`4460 `, :pull:`4470`) + (:issue:`4460`, :pull:`4470`) 💣 Incompatible Changes @@ -307,4 +307,4 @@ This document explains the changes made to Iris for this release .. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ .. _sort-all: https://github.com/aio-libs/sort-all .. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba -.. _Metarelate: http://www.metarelate.net/ \ No newline at end of file +.. _Metarelate: http://www.metarelate.net/ From e3df0a0c0aea83122813f8bf13926d5f057631e7 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Thu, 20 Jan 2022 12:37:09 +0000 Subject: [PATCH 12/69] manual update to py38-linux-64.lock (#4480) (#4491) * manual update to py38-linux-64.lock from #4480 * test robustness and disable numpy avx-512 (svml) * push numpy disable cpu down to nox --- .cirrus.yml | 4 -- lib/iris/tests/integration/test_trajectory.py | 16 +++++-- .../trajectory/tri_polar_latitude_slice.cml | 45 ++++++++++++++++--- .../cartography/test_rotate_grid_vectors.py | 6 +-- noxfile.py | 9 ++++ requirements/ci/nox.lock/py38-linux-64.lock | 22 ++++----- 6 files changed, 75 insertions(+), 27 deletions(-) diff --git a/.cirrus.yml b/.cirrus.yml index bdedbec43d2..92b8d788e6b 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -60,7 +60,6 @@ linux_task_template: &LINUX_TASK_TEMPLATE - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CONDA_CACHE_BUILD}" - uname -r populate_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - bash miniconda.sh -b -p ${HOME}/miniconda - conda config --set always_yes yes --set changeps1 no - conda config --set show_channel_urls True @@ -151,7 +150,6 @@ task: << : *IRIS_TEST_DATA_TEMPLATE << : *LINUX_TASK_TEMPLATE tests_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - echo "[Resources]" > ${SITE_CFG} - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} @@ -172,7 +170,6 @@ task: << : *IRIS_TEST_DATA_TEMPLATE << : *LINUX_TASK_TEMPLATE tests_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - echo "[Resources]" > ${SITE_CFG} - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} @@ -195,7 +192,6 @@ task: name: "${CIRRUS_OS}: py${PY_VER} link check" << : *LINUX_TASK_TEMPLATE tests_script: - - export CONDA_OVERRIDE_LINUX="$(uname -r | cut -d'+' -f1)" - mkdir -p ${MPL_RC_DIR} - echo "backend : agg" > ${MPL_RC_FILE} - echo "image.cmap : viridis" >> ${MPL_RC_FILE} diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py index a7d6c89994a..a8e3acaa410 100644 --- a/lib/iris/tests/integration/test_trajectory.py +++ b/lib/iris/tests/integration/test_trajectory.py @@ -216,8 +216,10 @@ def setUp(self): cube.coord("depth").bounds = b32 self.cube = cube # define a latitude trajectory (put coords in a different order - # to the cube, just to be awkward) - latitudes = list(range(-90, 90, 2)) + # to the cube, just to be awkward) although avoid south pole + # singularity as a sample point and the issue of snapping to + # multi-equidistant closest points from within orca antarctic hole + latitudes = list(range(-80, 90, 2)) longitudes = [-90] * len(latitudes) self.sample_points = [ ("longitude", longitudes), @@ -226,7 +228,9 @@ def setUp(self): def test_tri_polar(self): # extract - sampled_cube = traj_interpolate(self.cube, self.sample_points) + sampled_cube = traj_interpolate( + self.cube, self.sample_points, method="nearest" + ) self.assertCML( sampled_cube, ("trajectory", "tri_polar_latitude_slice.cml") ) @@ -329,8 +333,12 @@ def test_hybrid_height(self): # Put a lazy array into the cube so we can test deferred loading. cube.data = as_lazy_data(cube.data) + # Use opionated grid-latitudes to avoid the issue of platform + # specific behaviour within SciPy cKDTree choosing a different + # equi-distant nearest neighbour point when there are multiple + # valid candidates. traj = ( - ("grid_latitude", [20.5, 21.5, 22.5, 23.5]), + ("grid_latitude", [20.4, 21.6, 22.6, 23.6]), ("grid_longitude", [31, 32, 33, 34]), ) xsec = traj_interpolate(cube, traj, method="nearest") diff --git a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml index 701ca7beb7a..750d5974937 100644 --- a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml +++ b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml @@ -76,8 +76,25 @@ - + @@ -86,8 +103,26 @@ - + @@ -109,6 +144,6 @@ - + diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py index e9294f27dc6..f5c882a9839 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py @@ -33,7 +33,7 @@ def _check_angles_calculation( u_cube.rename("dx") u_cube.data[...] = 0 v_cube = u_cube.copy() - v_cube.name("dy") + v_cube.rename("dy") # Define 6 different vectors, repeated in each data row. in_vu = np.array([(0, 1), (2, -1), (-1, -1), (-3, 1), (2, 0), (0, 0)]) @@ -71,7 +71,7 @@ def _check_angles_calculation( ang_diffs = out_angs - expect_angs # Fix for null vectors, and +/-360 differences. ang_diffs[np.abs(out_mags) < 0.001] = 0.0 - ang_diffs = ang_diffs % 360.0 + ang_diffs[np.isclose(np.abs(ang_diffs), 360.0)] = 0.0 # Check that any differences are very small. self.assertArrayAllClose(ang_diffs, 0.0) @@ -97,7 +97,7 @@ def test_angles_from_grid(self): u_cube.rename("dx") u_cube.data[...] = 1.0 v_cube = u_cube.copy() - v_cube.name("dy") + v_cube.rename("dy") v_cube.data[...] = 0.0 # Setup a fake angles result from the inner call to 'gridcell_angles'. diff --git a/noxfile.py b/noxfile.py index 9d8706a04b8..8b23948677a 100755 --- a/noxfile.py +++ b/noxfile.py @@ -28,6 +28,13 @@ #: Default cartopy cache directory. CARTOPY_CACHE_DIR = os.environ.get("HOME") / Path(".local/share/cartopy") +# https://github.com/numpy/numpy/pull/19478 +# https://github.com/matplotlib/matplotlib/pull/22099 +#: Common session environment variables. +ENV = dict( + NPY_DISABLE_CPU_FEATURES="AVX512F,AVX512CD,AVX512VL,AVX512BW,AVX512DQ,AVX512_SKX" +) + def session_lockfile(session: nox.sessions.Session) -> Path: """Return the path of the session lockfile.""" @@ -210,6 +217,7 @@ def tests(session: nox.sessions.Session): """ prepare_venv(session) session.install("--no-deps", "--editable", ".") + session.env.update(ENV) session.run( "python", "-m", @@ -232,6 +240,7 @@ def doctest(session: nox.sessions.Session): """ prepare_venv(session) session.install("--no-deps", "--editable", ".") + session.env.update(ENV) session.cd("docs") session.run( "make", diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index 97fdfc68b26..6128e30e692 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -62,7 +62,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_10 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 -https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-12_linux64_openblas.tar.bz2#4f93ba28c628a2c27cf39c055e6b219c https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h7f98852_6.tar.bz2#c7c03a2592cac92246a13a0732bd1573 @@ -110,19 +110,19 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_ https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.9-pyhd8ed1ab_0.tar.bz2#a57a3f6f2b0a7400e340f850c405df19 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.10-pyhd8ed1ab_0.tar.bz2#ea77236c8031cfa821720b21b4cb0ceb https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_0.tar.bz2#d05900c9b0ef4c3d1cef2e8a5c49350e +https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_1.tar.bz2#d3f5797d3f9625c64860c93fc4359e64 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.11.1-pyhd8ed1ab_0.tar.bz2#a510ec93fdb50775091d2afba98a8acb https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_2.tar.bz2#3cf866063f2803944ddaee8b1d6da531 https://conda.anaconda.org/conda-forge/noarch/idna-3.1-pyhd3deb0d_0.tar.bz2#9c9aea4b8391264477df484f798562d0 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.3.0-pyhd8ed1ab_0.tar.bz2#be807e7606fff9436e5e700f6bffb7c6 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.80.0-h2574ce0_0.tar.bz2#5d0784b790350f7939bb5d3f2c32e700 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.81.0-h2574ce0_0.tar.bz2#1f8655741d0269ca6756f131522da1e8 https://conda.anaconda.org/conda-forge/linux-64/libpq-13.5-hd57d9b9_1.tar.bz2#a0f425d61c7df890d6381ea352c3f1d7 https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 @@ -148,14 +148,14 @@ https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#7 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.10.8-py38h578d9bd_1.tar.bz2#52a6cee65a5d10ed1c3f0af24fb48dd3 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.0-py38h3931269_0.tar.bz2#9c491a90ae11d08ca97326a0ed876f3a -https://conda.anaconda.org/conda-forge/linux-64/curl-7.80.0-h2574ce0_0.tar.bz2#4d8fd67e5ab7e00fde8ad085464f43b7 +https://conda.anaconda.org/conda-forge/linux-64/curl-7.81.0-h2574ce0_0.tar.bz2#3a95d393b490f82aa406f1892fad84d9 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py38h578d9bd_3.tar.bz2#a7866449fb9e5e4008a02df276549d34 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.1-mpi_mpich_h9c45103_3.tar.bz2#4f1a733e563d27b98010b62888e149c9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py38h1fd1430_1.tar.bz2#085365abfe53d5d13bb68b1dda0b439e https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6ad9fb6_0.tar.bz2#45142dc44fcd04934f9ad68ce205e54d https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_1.tar.bz2#1ef7b5f4826ca48a15e2cd98a5c3436d https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.5-py38h87f13fb_0.tar.bz2#07fef7a6a3c56e0410d047c0aa62416e +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.0-py38h6ae9a64_0.tar.bz2#0731ced21afb9adab62eb7aaf7abaf1e https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 @@ -167,7 +167,7 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_1.tar.bz2#977d03222271270ea8fe35388bf13752 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h497a2fe_3.tar.bz2#131de7d638aa59fb8afbce59f1a8aa98 https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.2.0-py38h578d9bd_0.tar.bz2#cbaabcbc6fb460f1a515188e6d966fa2 +https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.3.1-py38h578d9bd_0.tar.bz2#adb15768b02db40b8f7234bfe0a6e7fc https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_2.tar.bz2#63b3b55c98b4239134e0be080f448944 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h497a2fe_0.tar.bz2#8da7787169411910df2a62dc8ef533e0 https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py38h578d9bd_1.tar.bz2#37717ce393db8536ae2b613839af4274 @@ -184,7 +184,7 @@ https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h6c62de6_1006. https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 https://conda.anaconda.org/conda-forge/linux-64/pandas-1.3.5-py38h43a58ef_0.tar.bz2#171cc96da3b1a0ebd4bf2b5586b7cda3 https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.1-pyhd8ed1ab_0.tar.bz2#6f857f10fe2960dce20d59d71a290d51 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.2-pyhd8ed1ab_0.tar.bz2#caef60540e2239e27bf62569a5015e3b https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38hdd21e9b_0.tar.bz2#ceb8ec641cd5faa40b568f8ca008b6dc https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h6c62de6_1.tar.bz2#a350e3f4ca899e95122f66806e048858 @@ -205,15 +205,15 @@ https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c1 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.1-py38h2f98cf7_5.tar.bz2#8f989133575134016a0def90ae965e85 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h4975321_100.tar.bz2#56f5c650937b1667ad0a557a0dff3bc4 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-hc3c00ef_0.tar.bz2#43694e152ee85559ddf64b1acb8801dd +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-hc3c00ef_1.tar.bz2#9cd526f006d048eb912e09c5982393ea https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.0-pyhd8ed1ab_0.tar.bz2#9113b4e4fa2fa4a7f129c71a6f319475 https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.16.0-py38h578d9bd_0.tar.bz2#61e1e83f0eccef5e449db03c340ab6c2 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py38h578d9bd_8.tar.bz2#88368a5889f31dff922a2d57bbfc3f5b -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.7-pyhd8ed1ab_0.tar.bz2#be75bab4820a56f77ba1a3fc9139c36a +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.8-pyhd8ed1ab_1.tar.bz2#53f1387c68c21cecb386e2cde51b3f7c https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py38h9147699_101.tar.bz2#5a9de1dec507b6614150a77d1aabf257 https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h85b4f2f_1.tar.bz2#bc6418fd87ea67cf14417337ced3daa2 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.1-py38h578d9bd_0.tar.bz2#0d78be9cf1c400ba8e3077cf060492f1 -https://conda.anaconda.org/conda-forge/noarch/requests-2.26.0-pyhd8ed1ab_1.tar.bz2#358581cc782802270d77c454c73a811a +https://conda.anaconda.org/conda-forge/noarch/requests-2.27.1-pyhd8ed1ab_0.tar.bz2#7c1c427246b057b8fa97200ecdb2ed62 https://conda.anaconda.org/conda-forge/noarch/sphinx-4.3.2-pyh6c4a22f_0.tar.bz2#e8ffaea0961c0d7a6767f2394042043d https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.4.0-pyhd8ed1ab_0.tar.bz2#80fd2cc25ad45911b4e42d5b91593e2f https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.10.1-pyhd8ed1ab_0.tar.bz2#4918585fe5e5341740f7e63c61743efb From 4ff55fa176fc67f2f819b3f2395eddb9d3706264 Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Thu, 20 Jan 2022 13:14:15 +0000 Subject: [PATCH 13/69] GeogCS.as_cartopy_projection handling non-Earth planets (#4497) * GeogCS passes itself as a globe to ccrs.PlateCarree * Test that fails before change * Bug or feature? * Update as_projection test to use .srs * Fix failing test * Bug fix in test * Capitalise Earth to bump tests * Fix test_project to map between equal-sized planets * Fix test --- docs/src/whatsnew/latest.rst | 3 +++ lib/iris/coord_systems.py | 5 +++- lib/iris/tests/test_coordsystem.py | 23 +++++++++++++++++++ lib/iris/tests/test_mapping.py | 5 +++- .../unit/analysis/cartography/test_project.py | 2 +- 5 files changed, 35 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index fec37a63ab9..2ada97334ec 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -153,6 +153,9 @@ This document explains the changes made to Iris for this release instead of one latitude and one longitude. (:issue:`4460`, :pull:`4470`) +#. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` + from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) + 💣 Incompatible Changes ======================= diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 300f49014a5..2f875bb1591 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -297,7 +297,10 @@ def as_cartopy_crs(self): return ccrs.Geodetic(self.as_cartopy_globe()) def as_cartopy_projection(self): - return ccrs.PlateCarree() + return ccrs.PlateCarree( + central_longitude=self.longitude_of_prime_meridian, + globe=self.as_cartopy_globe(), + ) def as_cartopy_globe(self): # Explicitly set `ellipse` to None as a workaround for diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index 2c0569f0d66..42291259694 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -188,6 +188,29 @@ def test_as_cartopy_globe(self): self.assertEqual(res, expected) +class Test_GeogCS_as_cartopy_projection(tests.IrisTest): + def test_as_cartopy_projection(self): + geogcs_args = { + "semi_major_axis": 6543210, + "semi_minor_axis": 6500000, + "longitude_of_prime_meridian": 30, + } + cs = GeogCS(**geogcs_args) + res = cs.as_cartopy_projection() + + globe = ccrs.Globe( + semimajor_axis=geogcs_args["semi_major_axis"], + semiminor_axis=geogcs_args["semi_minor_axis"], + ellipse=None, + ) + expected = ccrs.PlateCarree( + globe=globe, + central_longitude=geogcs_args["longitude_of_prime_meridian"], + ) + + self.assertEqual(res, expected) + + class Test_GeogCS_as_cartopy_crs(tests.IrisTest): def test_as_cartopy_crs(self): cs = GeogCS(6543210, 6500000) diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index 06bedd497b5..a71385b5bcd 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -211,7 +211,10 @@ def test_grid(self): def test_default_projection_and_extent(self): self.assertEqual( - iplt.default_projection(self.cube), ccrs.PlateCarree() + iplt.default_projection(self.cube), + ccrs.PlateCarree( + globe=self.cube.coord_system("CoordSystem").as_cartopy_globe() + ), ) np_testing.assert_array_almost_equal( iplt.default_projection_extent(self.cube), diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py index 4dfa1a4a2ec..8649cc55ea0 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ b/lib/iris/tests/unit/analysis/cartography/test_project.py @@ -52,7 +52,7 @@ def setUp(self): 1, ) - self.tcs = iris.coord_systems.GeogCS(6000000) + self.tcs = iris.coord_systems.GeogCS(6371229) def test_is_iris_coord_system(self): res, _ = project(self.cube, self.tcs) From 81bcc518fe91b7d3d92b14d70b7ec634ab35ed88 Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Mon, 24 Jan 2022 10:21:02 +0000 Subject: [PATCH 14/69] Clarify that you can't append with the netcdf saver (#4516) --- docs/src/userguide/saving_iris_cubes.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/src/userguide/saving_iris_cubes.rst b/docs/src/userguide/saving_iris_cubes.rst index c801a1fbea8..2ffc8c47d35 100644 --- a/docs/src/userguide/saving_iris_cubes.rst +++ b/docs/src/userguide/saving_iris_cubes.rst @@ -51,6 +51,7 @@ The :py:func:`iris.save` function passes all other keywords through to the saver >>> # Save a cube list to a PP file, appending to the contents of the file >>> # if it already exists >>> iris.save(cubes, "myfile.pp", append=True) + >>> # Save a cube to netCDF, defaults to NETCDF4 file format >>> iris.save(cubes[0], "myfile.nc") >>> # Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option @@ -73,6 +74,12 @@ See for more details on supported arguments for the individual savers. +.. note:: + + The existence of a keyword argument for one saver does not guarantee the + same works for all savers. For example, it isn't possible to pass an + ``append`` keyword argument to the netCDF saver (see :ref:`netcdf_save`). + Customising the Save Process ---------------------------- @@ -102,6 +109,7 @@ Similarly a PP field may need to be written out with a specific value for LBEXP. yield field iris.fileformats.pp.save_fields(tweaked_fields(cubes[0]), '/tmp/app.pp') +.. _netcdf_save: NetCDF ^^^^^^ From 79d33b69cf612dc93dee73e1e221e9e25eddc8e4 Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Mon, 24 Jan 2022 17:06:25 +0000 Subject: [PATCH 15/69] Deprecate iris.util.approx_equal (#4514) * Deprecate iris.util.approx_equal * Update whatsnew with PR number --- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/util.py | 18 ++++++++++++++++-- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 2ada97334ec..970ab979e8b 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -181,6 +181,10 @@ This document explains the changes made to Iris for this release :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised an exception. (:issue:`3528`, :pull:`4496`) +#. `@wjbenfold`_ deprecated :func:`iris.util.approx_equal` in preference for + :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be + removed in a future release of Iris. (:pull:`4514`) + 🔗 Dependencies =============== diff --git a/lib/iris/util.py b/lib/iris/util.py index a9aaf9f9305..d08c29d4f8a 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -395,10 +395,24 @@ def normalise_array(array): def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): """ - Returns whether two numbers are almost equal, allowing for the - finite precision of floating point numbers. + Returns whether two numbers are almost equal, allowing for the finite + precision of floating point numbers. + + .. deprecated:: 3.2.0 + + Instead use :func:`math.isclose`. For example, rather than calling + ``approx_equal(a, b, max_abs, max_rel)`` replace with ``math.isclose(a, + b, max_rel, max_abs)``. Note that :func:`~math.isclose` will return True + if the actual error equals the maximum, whereas :func:`util.approx_equal` + will return False. """ + wmsg = ( + "iris.util.approx_equal has been deprecated and will be removed, " + "please use math.isclose instead." + ) + warn_deprecated(wmsg) + # Deal with numbers close to zero if abs(a - b) < max_absolute_error: return True From ad9e87d6bb2208a2c63024b3221470b157f5b935 Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Tue, 25 Jan 2022 11:44:29 +0000 Subject: [PATCH 16/69] Deprecate experimental.raster (#4525) --- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/experimental/raster.py | 9 +++++++++ 2 files changed, 13 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 970ab979e8b..d0364f5edfb 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -185,6 +185,10 @@ This document explains the changes made to Iris for this release :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be removed in a future release of Iris. (:pull:`4514`) +#. `@wjbenfold`_ deprecated :mod:`iris.experimental.raster` as it is not + believed to still be in use. The deprecation warnings invite users to contact + the Iris Developers if this isn't the case. (:pull:`4525`) + 🔗 Dependencies =============== diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 83fd761973d..f16dcbef5a4 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -20,8 +20,17 @@ from osgeo import gdal, osr import iris +from iris._deprecation import warn_deprecated import iris.coord_systems +wmsg = ( + "iris.experimental.raster has been deprecated and will be removed in a " + "future release. If you make use of this functionality, please contact " + "the Iris Developers to discuss how to retain it (which may involve " + "reversing the deprecation)." +) +warn_deprecated(wmsg) + _GDAL_DATATYPES = { "i2": gdal.GDT_Int16, "i4": gdal.GDT_Int32, From 280740e0bbcd1499a065fbea018d6c486ca67544 Mon Sep 17 00:00:00 2001 From: Simon Peatman Date: Tue, 25 Jan 2022 12:17:04 +0000 Subject: [PATCH 17/69] Support pathlib.Path objects for file names (#3411) (#3917) * Support pathlib.Path objects for file names (#3411) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Suggested revisions (#3917) for pathlib.Path support (#3411) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Remove test paths unhandled by pathlib.Path * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Remove pathlib test for PP files as not yet implemented * Don't test PP saver with Path (not yet implemented) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Code review for adoption of pathlib.Path (#3917) * Add unit tests for save and _generate_cubes with pathlib * Fix whatsnew link * Docstring fixes * Add integration tests for load_raw * Docstring bug fix * Update lib/iris/tests/test_io_init.py Use walrus Co-authored-by: Bill Little Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Will Benfold Co-authored-by: Bill Little Co-authored-by: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Co-authored-by: Bill Little --- docs/src/whatsnew/latest.rst | 7 +++ lib/iris/__init__.py | 15 ++++--- lib/iris/io/__init__.py | 16 +++++-- lib/iris/tests/test_file_save.py | 18 ++++++++ lib/iris/tests/test_io_init.py | 26 +++++++++++ lib/iris/tests/test_load.py | 37 +++++++++++++++ .../tests/unit/io/test__generate_cubes.py | 37 +++++++++++++++ lib/iris/tests/unit/io/test_save.py | 45 +++++++++++++++++++ 8 files changed, 190 insertions(+), 11 deletions(-) create mode 100755 lib/iris/tests/unit/io/test__generate_cubes.py create mode 100755 lib/iris/tests/unit/io/test_save.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index d0364f5edfb..53e7bb15836 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -94,6 +94,12 @@ This document explains the changes made to Iris for this release #. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). (:issue:`4479`, :pull:`4483`) +#. `@SimonPeatman`_ added support for filenames in the form of a :class:`~pathlib.PurePath` + in :func:`~iris.load`, :func:`~iris.load_cube`, :func:`~iris.load_cubes`, + :func:`~iris.load_raw` and :func:`~iris.save` (:issue:`3411`, :pull:`3917`). + Support for :class:`~pathlib.PurePath` is yet to be implemented across the rest + of Iris (:issue:`4523`). + #. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations from `Metarelate`_. From now we intend to manage phenonemon translation in Iris itself. (:pull:`4484`) @@ -308,6 +314,7 @@ This document explains the changes made to Iris for this release .. _@bsherratt: https://github.com/bsherratt .. _@larsbarring: https://github.com/larsbarring .. _@pdearnshaw: https://github.com/pdearnshaw +.. _@SimonPeatman: https://github.com/SimonPeatman .. _@tinyendian: https://github.com/tinyendian .. comment diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index c29772aaac7..26f03c05663 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -36,8 +36,8 @@ All the load functions share very similar arguments: * uris: - Either a single filename/URI expressed as a string, or an - iterable of filenames/URIs. + Either a single filename/URI expressed as a string or + :class:`pathlib.PurePath`, or an iterable of filenames/URIs. Filenames can contain `~` or `~user` abbreviations, and/or Unix shell-style wildcards (e.g. `*` and `?`). See the @@ -89,6 +89,7 @@ def callback(cube, field, filename): import glob import itertools import os.path +import pathlib import threading import iris._constraints @@ -241,7 +242,7 @@ def context(self, **kwargs): def _generate_cubes(uris, callback, constraints): """Returns a generator of cubes given the URIs and a callback.""" - if isinstance(uris, str): + if isinstance(uris, (str, pathlib.PurePath)): uris = [uris] # Group collections of uris by their iris handler @@ -285,7 +286,7 @@ def load(uris, constraints=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. Kwargs: @@ -313,7 +314,7 @@ def load_cube(uris, constraint=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. Kwargs: @@ -352,7 +353,7 @@ def load_cubes(uris, constraints=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. Kwargs: @@ -397,7 +398,7 @@ def load_raw(uris, constraints=None, callback=None): Args: * uris: - One or more filenames/URIs. + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. Kwargs: diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 64501afd1eb..034fa4baabe 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -12,6 +12,7 @@ from collections import OrderedDict import glob import os.path +import pathlib import re import iris.exceptions @@ -85,8 +86,9 @@ def decode_uri(uri, default="file"): r""" Decodes a single URI into scheme and scheme-specific parts. - In addition to well-formed URIs, it also supports bare file paths. - Both Windows and UNIX style paths are accepted. + In addition to well-formed URIs, it also supports bare file paths as strings + or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are + accepted. .. testsetup:: @@ -113,6 +115,8 @@ def decode_uri(uri, default="file"): ('file', 'dataZoo/...') """ + if isinstance(uri, pathlib.PurePath): + uri = str(uri) # make sure scheme has at least 2 letters to avoid windows drives # put - last in the brackets so it refers to the character, not a range # reference on valid schemes: http://tools.ietf.org/html/std66#section-3.1 @@ -312,7 +316,8 @@ def find_saver(filespec): Args: - * filespec - A string such as "my_file.pp" or "PP". + * filespec + A string such as "my_file.pp" or "PP". Returns: A save function or None. @@ -359,7 +364,8 @@ def save(source, target, saver=None, **kwargs): * target: A filename (or writeable, depending on file format). When given a filename or file, Iris can determine the - file format. + file format. Filename can be given as a string or + :class:`pathlib.PurePath`. Kwargs: @@ -414,6 +420,8 @@ def save(source, target, saver=None, **kwargs): from iris.cube import Cube, CubeList # Determine format from filename + if isinstance(target, pathlib.PurePath): + target = str(target) if isinstance(target, str) and saver is None: saver = find_saver(target) elif hasattr(target, "name") and saver is None: diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index 2b5619c6568..3b751cfcbed 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -13,6 +13,7 @@ from io import StringIO import os +import pathlib import unittest import iris @@ -97,6 +98,23 @@ def test_filename(self): CHKSUM_ERR.format(self.ext), ) + def test_filename_path_object(self): + # Save using iris.save and pp.save, passing filename for + # iris.save as pathlib.Path + save_by_filename( + self.temp_filename1, + pathlib.Path(self.temp_filename2), + self.cube1, + pp.save, + ) + + # Compare files + self.assertEqual( + self.file_checksum(self.temp_filename2), + self.file_checksum(self.temp_filename1), + CHKSUM_ERR.format(self.ext), + ) + def test_filehandle(self): # Save using iris.save and pp.save save_by_filehandle( diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index e88eaabaed0..c8b88754b53 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -12,6 +12,7 @@ import iris.tests as tests # isort:skip from io import BytesIO +import pathlib import iris.fileformats as iff import iris.io @@ -44,6 +45,31 @@ def test_decode_uri(self): for uri, pair in tests.items(): self.assertEqual(pair, iris.io.decode_uri(uri)) + def test_decode_uri_path_object(self): + tests = { + (uri := "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp"): ( + "file", + uri, + ), + (uri := r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp"): ( + "file", + uri, + ), + ( + uri := "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp" + ): (uri[:4], uri[7:]), + (uri := "http://www.somehost.com:8080/resource/thing.grib"): ( + uri[:4], + uri[6:], + ), + (uri := "/data/local/someDir/2013-11-25T13:49:17.632797"): ( + "file", + uri, + ), + } + for uri, expected in tests.items(): + self.assertEqual(expected, iris.io.decode_uri(pathlib.Path(uri))) + class TestFileFormatPicker(tests.IrisTest): def test_known_formats(self): diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 0674768a549..86ff2f1eceb 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -11,6 +11,8 @@ # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip +import pathlib + import iris import iris.io @@ -22,6 +24,13 @@ def test_normal(self): cubes = iris.load(paths) self.assertEqual(len(cubes), 1) + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + cubes = iris.load(paths) + self.assertEqual(len(cubes), 1) + def test_nonexist(self): paths = ( tests.get_data_path(["PP", "aPPglob1", "global.pp"]), @@ -71,6 +80,12 @@ def test_normal(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) _ = iris.load_cube(paths) + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + _ = iris.load_cube(paths) + def test_not_enough(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) with self.assertRaises(iris.exceptions.ConstraintMismatchError): @@ -92,6 +107,13 @@ def test_normal(self): cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1) + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + cubes = iris.load_cubes(paths) + self.assertEqual(len(cubes), 1) + def test_not_enough(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) with self.assertRaises(iris.exceptions.ConstraintMismatchError): @@ -111,6 +133,21 @@ def test_too_many(self): iris.load_cube(paths) +@tests.skip_data +class TestLoadRaw(tests.IrisTest): + def test_normal(self): + paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) + cubes = iris.load_raw(paths) + self.assertEqual(len(cubes), 1) + + def test_path_object(self): + paths = ( + pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), + ) + cubes = iris.load_raw(paths) + self.assertEqual(len(cubes), 1) + + class TestOpenDAP(tests.IrisTest): def test_load(self): # Check that calling iris.load_* with a http URI triggers a call to diff --git a/lib/iris/tests/unit/io/test__generate_cubes.py b/lib/iris/tests/unit/io/test__generate_cubes.py new file mode 100755 index 00000000000..3a896a111c4 --- /dev/null +++ b/lib/iris/tests/unit/io/test__generate_cubes.py @@ -0,0 +1,37 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.io._generate_cubes` function.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from pathlib import Path + +import iris + + +class TestGenerateCubes(tests.IrisTest): + def test_pathlib_paths(self): + test_variants = [ + ("string", "string"), + (["string"], "string"), + (Path("string"), Path("string")), + ] + + decode_uri_mock = self.patch( + "iris.iris.io.decode_uri", return_value=("file", None) + ) + self.patch("iris.iris.io.load_files") + + for gc_arg, du_arg in test_variants: + decode_uri_mock.reset_mock() + list(iris._generate_cubes(gc_arg, None, None)) + decode_uri_mock.assert_called_with(du_arg) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/io/test_save.py b/lib/iris/tests/unit/io/test_save.py new file mode 100755 index 00000000000..b92e26f2d12 --- /dev/null +++ b/lib/iris/tests/unit/io/test_save.py @@ -0,0 +1,45 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.io.save` function.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + +from pathlib import Path +from unittest import mock + +import iris +from iris.cube import Cube + + +class TestSave(tests.IrisTest): + def test_pathlib_save(self): + file_mock = mock.Mock() + # Have to configure after creation because "name" is special + file_mock.configure_mock(name="string") + + find_saver_mock = self.patch( + "iris.io.find_saver", return_value=(lambda *args, **kwargs: None) + ) + + test_variants = [ + ("string", "string"), + (Path("string/string"), "string/string"), + (file_mock, "string"), + ] + + for target, fs_val in test_variants: + try: + iris.save(Cube([]), target) + except ValueError: + print("ValueError") + pass + find_saver_mock.assert_called_with(fs_val) + + +if __name__ == "__main__": + tests.main() From 851c12d12946e68b292b81b9c91d1d997444568d Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Tue, 25 Jan 2022 14:14:00 +0000 Subject: [PATCH 18/69] Deprecate abf and dot (#4515) --- docs/src/whatsnew/latest.rst | 5 +++++ lib/iris/fileformats/__init__.py | 13 ++++++++++--- lib/iris/fileformats/abf.py | 9 +++++++++ lib/iris/fileformats/dot.py | 9 +++++++++ 4 files changed, 33 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 53e7bb15836..f4d6d6ab422 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -195,6 +195,11 @@ This document explains the changes made to Iris for this release believed to still be in use. The deprecation warnings invite users to contact the Iris Developers if this isn't the case. (:pull:`4525`) +#. `@wjbenfold`_ deprecated :mod:`iris.fileformats.abf` and + :mod:`iris.fileformats.dot` as they are not believed to still be in use. The + deprecation warnings invite users to contact the Iris Developers if this + isn't the case. (:pull:`4515`) + 🔗 Dependencies =============== diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index f2b0cfc0955..96a848deb04 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -17,7 +17,7 @@ UriProtocol, ) -from . import abf, name, netcdf, nimrod, pp, um +from . import name, netcdf, nimrod, pp, um __all__ = ["FORMAT_AGENT"] @@ -224,16 +224,23 @@ def _load_grib(*args, **kwargs): # # ABF/ABL +# TODO: now deprecated, remove later # +def load_cubes_abf_abl(*args, **kwargs): + from . import abf + + return abf.load_cubes(*args, **kwargs) + + FORMAT_AGENT.add_spec( FormatSpecification( - "ABF", FileExtension(), ".abf", abf.load_cubes, priority=3 + "ABF", FileExtension(), ".abf", load_cubes_abf_abl, priority=3 ) ) FORMAT_AGENT.add_spec( FormatSpecification( - "ABL", FileExtension(), ".abl", abf.load_cubes, priority=3 + "ABL", FileExtension(), ".abl", load_cubes_abf_abl, priority=3 ) ) diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 678d9b04cf5..5c70c5acf27 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -23,11 +23,20 @@ import numpy.ma as ma import iris +from iris._deprecation import warn_deprecated from iris.coord_systems import GeogCS from iris.coords import AuxCoord, DimCoord import iris.fileformats import iris.io.format_picker +wmsg = ( + "iris.fileformats.abf has been deprecated and will be removed in a " + "future release. If you make use of this functionality, please contact " + "the Iris Developers to discuss how to retain it (which may involve " + "reversing the deprecation)." +) +warn_deprecated(wmsg) + X_SIZE = 4320 Y_SIZE = 2160 diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index cc857c7f6ba..2fb628bebfd 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -12,8 +12,17 @@ import subprocess import iris +from iris._deprecation import warn_deprecated import iris.util +wmsg = ( + "iris.fileformats.dot has been deprecated and will be removed in a " + "future release. If you make use of this functionality, please contact " + "the Iris Developers to discuss how to retain it (which may involve " + "reversing the deprecation)." +) +warn_deprecated(wmsg) + _GRAPH_INDENT = " " * 4 _SUBGRAPH_INDENT = " " * 8 From 54d26d86a54a54a2f63d70ce36c3d6b3a294b966 Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Tue, 25 Jan 2022 15:18:47 +0000 Subject: [PATCH 19/69] Removed iris.util.as_compatible_shape and now unused imports (#4513) * Removed iris.util.as_compatible_shape and now unused imports * Removed tests * Update whatsnew with PR num * Remove unused cml * Restore import --- docs/src/whatsnew/latest.rst | 6 + .../util/as_compatible_shape_collapsed.cml | 144 ------------------ lib/iris/tests/test_util.py | 98 ------------ lib/iris/util.py | 128 ---------------- 4 files changed, 6 insertions(+), 370 deletions(-) delete mode 100644 lib/iris/tests/results/util/as_compatible_shape_collapsed.cml diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index f4d6d6ab422..861731d2449 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -200,6 +200,12 @@ This document explains the changes made to Iris for this release deprecation warnings invite users to contact the Iris Developers if this isn't the case. (:pull:`4515`) +#. `@wjbenfold`_ removed the :func:`iris.util.as_compatible_shape` function, + which was deprecated in ``v3.0``. Instead use + :class:`iris.common.resolve.Resolve`. For example, rather than calling + ``as_compatible_shape(src_cube, target_cube)`` replace with + ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) + 🔗 Dependencies =============== diff --git a/lib/iris/tests/results/util/as_compatible_shape_collapsed.cml b/lib/iris/tests/results/util/as_compatible_shape_collapsed.cml deleted file mode 100644 index 07eeb531574..00000000000 --- a/lib/iris/tests/results/util/as_compatible_shape_collapsed.cml +++ /dev/null @@ -1,144 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index cf921ae2104..ec7f8d10238 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -276,103 +276,5 @@ def test_output_file(self): self.assertFilesEqual(filename, "incompatible_cubes.str.txt") -@tests.skip_data -class TestAsCompatibleShape(tests.IrisTest): - def test_slice(self): - cube = tests.stock.realistic_4d() - sliced = cube[1, :, 2, :-2] - expected = cube[1:2, :, 2:3, :-2] - res = iris.util.as_compatible_shape(sliced, cube) - self.assertEqual(res, expected) - - def test_transpose(self): - cube = tests.stock.realistic_4d() - transposed = cube.copy() - transposed.transpose() - expected = cube - res = iris.util.as_compatible_shape(transposed, cube) - self.assertEqual(res, expected) - - def test_slice_and_transpose(self): - cube = tests.stock.realistic_4d() - sliced_and_transposed = cube[1, :, 2, :-2] - sliced_and_transposed.transpose() - expected = cube[1:2, :, 2:3, :-2] - res = iris.util.as_compatible_shape(sliced_and_transposed, cube) - self.assertEqual(res, expected) - - def test_collapsed(self): - cube = tests.stock.realistic_4d() - collapsed = cube.collapsed("model_level_number", iris.analysis.MEAN) - expected_shape = list(cube.shape) - expected_shape[1] = 1 - expected_data = collapsed.data.reshape(expected_shape) - res = iris.util.as_compatible_shape(collapsed, cube) - self.assertCML( - res, ("util", "as_compatible_shape_collapsed.cml"), checksum=False - ) - self.assertMaskedArrayEqual(expected_data, res.data) - - def test_reduce_dimensionality(self): - # Test that as_compatible_shape() can demote - # length one dimensions to scalars. - cube = tests.stock.realistic_4d() - src = cube[:, 2:3] - expected = reduced = cube[:, 2] - res = iris.util.as_compatible_shape(src, reduced) - self.assertEqual(res, expected) - - def test_anonymous_dims(self): - cube = tests.stock.realistic_4d() - # Move all coords from dim_coords to aux_coords. - for coord in cube.dim_coords: - dim = cube.coord_dims(coord) - cube.remove_coord(coord) - cube.add_aux_coord(coord, dim) - - sliced = cube[1, :, 2, :-2] - expected = cube[1:2, :, 2:3, :-2] - res = iris.util.as_compatible_shape(sliced, cube) - self.assertEqual(res, expected) - - def test_scalar_auxcoord(self): - def dim_to_aux(cube, coord_name): - """Convert coordinate on cube from DimCoord to AuxCoord.""" - coord = cube.coord(coord_name) - coord = iris.coords.AuxCoord.from_coord(coord) - cube.replace_coord(coord) - - cube = tests.stock.realistic_4d() - src = cube[:, :, 3] - dim_to_aux(src, "grid_latitude") - expected = cube[:, :, 3:4] - dim_to_aux(expected, "grid_latitude") - res = iris.util.as_compatible_shape(src, cube) - self.assertEqual(res, expected) - - def test_2d_auxcoord_transpose(self): - dim_coord1 = iris.coords.DimCoord(range(3), long_name="first_dim") - dim_coord2 = iris.coords.DimCoord(range(4), long_name="second_dim") - aux_coord_2d = iris.coords.AuxCoord( - np.arange(12).reshape(3, 4), long_name="spanning" - ) - aux_coord_2d_T = iris.coords.AuxCoord( - np.arange(12).reshape(3, 4).T, long_name="spanning" - ) - src = iris.cube.Cube( - np.ones((3, 4)), - dim_coords_and_dims=[(dim_coord1, 0), (dim_coord2, 1)], - aux_coords_and_dims=[(aux_coord_2d, (0, 1))], - ) - target = iris.cube.Cube( - np.ones((4, 3)), - dim_coords_and_dims=[(dim_coord1, 1), (dim_coord2, 0)], - aux_coords_and_dims=[(aux_coord_2d_T, (0, 1))], - ) - - res = iris.util.as_compatible_shape(src, target) - self.assertEqual(res[0], target[0]) - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/util.py b/lib/iris/util.py index d08c29d4f8a..53cd78724e2 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -10,7 +10,6 @@ from abc import ABCMeta, abstractmethod from collections.abc import Hashable, Iterable -import copy import functools import inspect import os @@ -1167,133 +1166,6 @@ def new_axis(src_cube, scalar_coord=None): return new_cube -def as_compatible_shape(src_cube, target_cube): - """ - Return a cube with added length one dimensions to match the dimensionality - and dimension ordering of `target_cube`. - - This function can be used to add the dimensions that have been collapsed, - aggregated or sliced out, promoting scalar coordinates to length one - dimension coordinates where necessary. It operates by matching coordinate - metadata to infer the dimensions that need modifying, so the provided - cubes must have coordinates with the same metadata - (see :class:`iris.common.CoordMetadata`). - - .. note:: This function will load and copy the data payload of `src_cube`. - - .. deprecated:: 3.0.0 - - Instead use :class:`~iris.common.resolve.Resolve`. For example, rather - than calling ``as_compatible_shape(src_cube, target_cube)`` replace - with ``Resolve(src_cube, target_cube)(target_cube.core_data())``. - - Args: - - * src_cube: - An instance of :class:`iris.cube.Cube` with missing dimensions. - - * target_cube: - An instance of :class:`iris.cube.Cube` with the desired dimensionality. - - Returns: - A instance of :class:`iris.cube.Cube` with the same dimensionality as - `target_cube` but with the data and coordinates from `src_cube` - suitably reshaped to fit. - - """ - from iris.cube import Cube - - wmsg = ( - "iris.util.as_compatible_shape has been deprecated and will be " - "removed, please use iris.common.resolve.Resolve instead." - ) - warn_deprecated(wmsg) - - dim_mapping = {} - for coord in target_cube.aux_coords + target_cube.dim_coords: - dims = target_cube.coord_dims(coord) - try: - collapsed_dims = src_cube.coord_dims(coord) - except iris.exceptions.CoordinateNotFoundError: - continue - if collapsed_dims: - if len(collapsed_dims) == len(dims): - for dim_from, dim_to in zip(dims, collapsed_dims): - dim_mapping[dim_from] = dim_to - elif dims: - for dim_from in dims: - dim_mapping[dim_from] = None - - if len(dim_mapping) != target_cube.ndim: - raise ValueError( - "Insufficient or conflicting coordinate " - "metadata. Cannot infer dimension mapping " - "to restore cube dimensions." - ) - - new_shape = [1] * target_cube.ndim - for dim_from, dim_to in dim_mapping.items(): - if dim_to is not None: - new_shape[dim_from] = src_cube.shape[dim_to] - - new_data = src_cube.data.copy() - - # Transpose the data (if necessary) to prevent assignment of - # new_shape doing anything except adding length one dims. - order = [v for k, v in sorted(dim_mapping.items()) if v is not None] - if order != sorted(order): - new_order = [order.index(i) for i in range(len(order))] - new_data = np.transpose(new_data, new_order).copy() - - new_cube = Cube(new_data.reshape(new_shape)) - new_cube.metadata = copy.deepcopy(src_cube.metadata) - - # Record a mapping from old coordinate IDs to new coordinates, - # for subsequent use in creating updated aux_factories. - coord_mapping = {} - - reverse_mapping = {v: k for k, v in dim_mapping.items() if v is not None} - - def add_coord(coord): - """Closure used to add a suitably reshaped coord to new_cube.""" - all_dims = target_cube.coord_dims(coord) - src_dims = [ - dim - for dim in src_cube.coord_dims(coord) - if src_cube.shape[dim] > 1 - ] - mapped_dims = [reverse_mapping[dim] for dim in src_dims] - length1_dims = [dim for dim in all_dims if new_cube.shape[dim] == 1] - dims = length1_dims + mapped_dims - shape = [new_cube.shape[dim] for dim in dims] - if not shape: - shape = [1] - points = coord.points.reshape(shape) - bounds = None - if coord.has_bounds(): - bounds = coord.bounds.reshape(shape + [coord.nbounds]) - new_coord = coord.copy(points=points, bounds=bounds) - # If originally in dim_coords, add to dim_coords, otherwise add to - # aux_coords. - if target_cube.coords(coord, dim_coords=True): - try: - new_cube.add_dim_coord(new_coord, dims) - except ValueError: - # Catch cases where the coord is an AuxCoord and therefore - # cannot be added to dim_coords. - new_cube.add_aux_coord(new_coord, dims) - else: - new_cube.add_aux_coord(new_coord, dims) - coord_mapping[id(coord)] = new_coord - - for coord in src_cube.aux_coords + src_cube.dim_coords: - add_coord(coord) - for factory in src_cube.aux_factories: - new_cube.add_aux_factory(factory.updated(coord_mapping)) - - return new_cube - - def squeeze(cube): """ Removes any dimension of length one. If it has an associated DimCoord or From fddbed45556c0c36200bd9e54a16a5638f4535e4 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 25 Jan 2022 17:01:52 +0000 Subject: [PATCH 20/69] Coord print (#4499) * Initial working. * Temporary test exercising. * Fix docstring, simplify. * Small mods. * Additional changes, with old+intermediate versions commented. * Tidy, removing old commented forms. * Repr always has shape, except scalar cases. * Don't print calendar in oneline summary. * Initial working dim-meta printout tests. * Printout features and all tests complete : existing tests *not* yet fixed. * Fix existing connectivity print tests. * Fix existing MeshCoord printout tests. * Fix various str+repr changes in tests/test_coord_api. * Fix various str+repr changes in tests/unit/coords/test_Coord. * Fix existing printout tests for Ancils and CellMeasures. * Added new str and repr for Mesh. * Doctest fixes. * Tidy up unused methods, api and docstrings * Add specific tests for 'summary' method. * Tiny fixes. * Use UGRID term 'optional' connectivities instead of 'extra'. * Revise kwargs and clarify their relationship to numpy printoptions. * More small cosmetic changes, plus minimal bounds in repr. * Fix coord_api repr tests. * Added whatsnew + fixed docstring formatting. * Add summary section-location info for MeshCoord. Fix and test for bounds repr when lazy. * Clarify text-output code. * Remove obsolete comment. --- docs/src/userguide/loading_iris_cubes.rst | 69 +- docs/src/whatsnew/latest.rst | 10 + lib/iris/coords.py | 404 +++++-- lib/iris/cube.py | 21 +- lib/iris/experimental/ugrid/mesh.py | 342 +++--- .../coord_api/str_repr/aux_nontime_repr.txt | 12 +- .../coord_api/str_repr/aux_nontime_str.txt | 25 +- .../coord_api/str_repr/aux_time_repr.txt | 3 +- .../coord_api/str_repr/aux_time_str.txt | 7 +- .../coord_api/str_repr/dim_nontime_repr.txt | 12 +- .../coord_api/str_repr/dim_nontime_str.txt | 25 +- .../coord_api/str_repr/dim_time_repr.txt | 3 +- .../coord_api/str_repr/dim_time_str.txt | 9 +- lib/iris/tests/test_coord_api.py | 144 ++- .../unit/coords/test_AncillaryVariable.py | 48 +- .../tests/unit/coords/test_CellMeasure.py | 32 +- lib/iris/tests/unit/coords/test_Coord.py | 60 +- .../unit/coords/test__DimensionalMetadata.py | 1049 ++++++++++++++++- .../ugrid/mesh/test_Connectivity.py | 30 +- .../unit/experimental/ugrid/mesh/test_Mesh.py | 242 +++- .../experimental/ugrid/mesh/test_MeshCoord.py | 111 +- 21 files changed, 2138 insertions(+), 520 deletions(-) diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/userguide/loading_iris_cubes.rst index ec459dbbdf3..ae2f807fe9c 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/userguide/loading_iris_cubes.rst @@ -304,13 +304,21 @@ for ease of calendar-based testing. >>> cube_all = iris.load_cube(filename, 'air_potential_temperature') >>> print('All times :\n' + str(cube_all.coord('time'))) All times : - DimCoord([2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] + shape: (3,) + dtype: float64 + standard_name: 'time' >>> # Define a function which accepts a datetime as its argument (this is simplified in later examples). >>> hour_11 = iris.Constraint(time=lambda cell: cell.point.hour == 11) >>> cube_11 = cube_all.extract(hour_11) >>> print('Selected times :\n' + str(cube_11.coord('time'))) Selected times : - DimCoord([2009-11-19 11:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' Secondly, the :class:`iris.time` module provides flexible time comparison facilities. An :class:`iris.time.PartialDateTime` object can be compared to @@ -335,7 +343,11 @@ The previous constraint example can now be written as: >>> print(iris.load_cube( ... iris.sample_data_path('uk_hires.pp'), ... 'air_potential_temperature' & the_11th_hour).coord('time')) - DimCoord([2009-11-19 11:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' It is common that a cube will need to be constrained between two given dates. In the following example we construct a time sequence representing the first @@ -355,10 +367,13 @@ day of every week for many years: :options: +NORMALIZE_WHITESPACE, +ELLIPSIS >>> print(long_ts.coord('time')) - DimCoord([2007-04-09 00:00:00, 2007-04-16 00:00:00, 2007-04-23 00:00:00, - ... - 2010-02-01 00:00:00, 2010-02-08 00:00:00, 2010-02-15 00:00:00], - standard_name='time', calendar='gregorian') + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-04-09 00:00:00, 2007-04-16 00:00:00, ..., + 2010-02-08 00:00:00, 2010-02-15 00:00:00] + shape: (150,) + dtype: int64 + standard_name: 'time' Given two dates in datetime format, we can select all points between them. @@ -371,9 +386,13 @@ Given two dates in datetime format, we can select all points between them. ... time=lambda cell: d1 <= cell.point < d2) >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) >>> print(within_st_swithuns_07.coord('time')) - DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00], - standard_name='time', calendar='gregorian') + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' Alternatively, we may rewrite this using :class:`iris.time.PartialDateTime` objects. @@ -387,9 +406,13 @@ objects. ... time=lambda cell: pdt1 <= cell.point < pdt2) >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) >>> print(within_st_swithuns_07.coord('time')) - DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00], - standard_name='time', calendar='gregorian') + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' A more complex example might require selecting points over an annually repeating date range. We can select points within a certain part of the year, in this case @@ -402,13 +425,19 @@ PartialDateTime this becomes simple: ... time=lambda cell: PartialDateTime(month=7, day=15) <= cell < PartialDateTime(month=8, day=25)) >>> within_st_swithuns = long_ts.extract(st_swithuns_daterange) ... - >>> print(within_st_swithuns.coord('time')) - DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, - 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, - 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, - 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, - 2009-08-17 00:00:00, 2009-08-24 00:00:00], standard_name='time', calendar='gregorian') + >>> # Note: using summary(max_values) to show more of the points + >>> print(within_st_swithuns.coord('time').summary(max_values=100)) + DimCoord : time / (days since 2007-04-09, gregorian calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, + 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, + 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, + 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, + 2009-08-17 00:00:00, 2009-08-24 00:00:00] + shape: (17,) + dtype: int64 + standard_name: 'time' Notice how the dates printed are between the range specified in the ``st_swithuns_daterange`` and that they span multiple years. diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 861731d2449..35628c4355a 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -104,6 +104,16 @@ This document explains the changes made to Iris for this release from `Metarelate`_. From now we intend to manage phenonemon translation in Iris itself. (:pull:`4484`) +#. `@pp-mo`_ improved printout of various cube data component objects : + :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, + :class:`~iris.coords.AncillaryVariable`, + :class:`~iris.experimental.ugrid.mesh.MeshCoord` and + :class:`~iris.experimental.ugrid.mesh.Mesh`. + These now all provide a more controllable ``summary()`` method, and + more convenient and readable ``str()`` and ``repr()`` output in the style of + the :class:`iris.cube.Cube`. + They also no longer realise lazy data. (:pull:`4499`). + 🐛 Bugs Fixed ============= diff --git a/lib/iris/coords.py b/lib/iris/coords.py index db193d00462..6e9bd6c8407 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -10,7 +10,7 @@ from abc import ABCMeta, abstractmethod from collections import namedtuple -from collections.abc import Iterator +from collections.abc import Container, Iterator import copy from itertools import chain, zip_longest import operator @@ -57,6 +57,10 @@ class _DimensionalMetadata(CFVariableMixin, metaclass=ABCMeta): _MODE_RDIV: "/", } + # Used by printout methods : __str__ and __repr__ + # Overridden in subclasses : Coord->'points', Connectivity->'indices' + _values_array_name = "data" + @abstractmethod def __init__( self, @@ -268,78 +272,317 @@ def _has_lazy_values(self): """ return self._values_dm.has_lazy_data() - def _repr_other_metadata(self): - fmt = "" - if self.long_name: - fmt = ", long_name={self.long_name!r}" - if self.var_name: - fmt += ", var_name={self.var_name!r}" - if len(self.attributes) > 0: - fmt += ", attributes={self.attributes}" - result = fmt.format(self=self) - return result + def summary( + self, + shorten=False, + max_values=None, + edgeitems=2, + linewidth=None, + precision=None, + convert_dates=True, + _section_indices=None, + ): + r""" + Make a printable text summary. + + Parameters + ---------- + shorten : bool, default = False + If True, produce an abbreviated one-line summary. + If False, produce a multi-line summary, with embedded newlines. + max_values : int or None, default = None + If more than this many data values, print truncated data arrays + instead of full contents. + If 0, print only the shape. + The default is 5 if :attr:`shorten`\ =True, or 15 otherwise. + This overrides ``numpy.get_printoptions['threshold']``\ . + linewidth : int or None, default = None + Character-width controlling line splitting of array outputs. + If unset, defaults to ``numpy.get_printoptions['linewidth']``\ . + edgeitems : int = 2 + Controls truncated array output. + Overrides ``numpy.getprintoptions['edgeitems']``\ . + precision : int or None, default = None + Controls number decimal formatting. + When :attr:`shorten`\ =True this is defaults to 3, in which case it + overrides ``numpy.get_printoptions()['precision']``\ . + convert_dates : bool, default = True + If the units has a calendar, then print array values as date + strings instead of the actual numbers. + + Returns + ------- + result : str + Output text, with embedded newlines when :attr:`shorten`\ =False. - def _str_dates(self, dates_as_numbers): - date_obj_array = self.units.num2date(dates_as_numbers) - kwargs = {"separator": ", ", "prefix": " "} - return np.core.arrayprint.array2string( - date_obj_array, formatter={"all": str}, **kwargs - ) - def __str__(self): - # Note: this method includes bounds handling code, but it only runs - # within Coord type instances, as only these allow bounds to be set. - if self.units.is_time_reference(): - fmt = ( - "{cls}({values}{bounds}" - ", standard_name={self.standard_name!r}" - ", calendar={self.units.calendar!r}{other_metadata})" + .. note:: + Arrays are formatted using :meth:`numpy.array2string`. Some aspects + of the array formatting are controllable in the usual way, via + :meth:`numpy.printoptions`, but others are overridden as detailed + above. + Control of those aspects is still available, but only via the call + arguments. + + """ + # NOTE: the *private* key "_section_indices" can be set to a dict, to + # return details of which (line, character) each particular section of + # the output text begins at. + # Currently only used by MeshCoord.summary(), which needs this info to + # modify the result string, for idiosyncratic reasons. + + def array_summary(data, n_max, n_edge, linewidth, precision): + # Return a text summary of an array. + # Take account of strings, dates and masked data. + result = "" + formatter = None + if convert_dates and self.units.is_time_reference(): + # Account for dates, if enabled. + # N.B. a time unit with a long time interval ("months" + # or "years") cannot be converted to a date using + # `num2date`, so gracefully fall back to printing + # values as numbers. + if not self.units.is_long_time_interval(): + # Otherwise ... replace all with strings. + if ma.is_masked(data): + mask = data.mask + else: + mask = None + data = np.array(self.units.num2date(data)) + data = data.astype(str) + # Masked datapoints do not survive num2date. + if mask is not None: + data = np.ma.masked_array(data, mask) + + if ma.is_masked(data): + # Masks are not handled by np.array2string, whereas + # MaskedArray.__str__ is using a private method to convert to + # objects. + # Our preferred solution is to convert to strings *and* fill + # with '--'. This is not ideal because numbers will not align + # with a common numeric format, but there is no *public* logic + # in numpy to arrange that, so let's not overcomplicate. + # It happens that array2string *also* does not use a common + # format (width) for strings, but we fix that below... + data = data.astype(str).filled("--") + + if data.dtype.kind == "U": + # Strings : N.B. includes all missing data + # find the longest. + length = max(len(str(x)) for x in data.flatten()) + # Pre-apply a common formatting width. + formatter = {"all": lambda x: str(x).ljust(length)} + + result = np.array2string( + data, + separator=", ", + edgeitems=n_edge, + threshold=n_max, + max_line_width=linewidth, + formatter=formatter, + precision=precision, + ) + + return result + + units_str = str(self.units) + if self.units.calendar and not shorten: + units_str += f", {self.units.calendar} calendar" + title_str = f"{self.name()} / ({units_str})" + cls_str = type(self).__name__ + shape_str = str(self.shape) + + # Implement conditional defaults for control args. + if max_values is None: + max_values = 5 if shorten else 15 + precision = 3 if shorten else None + n_indent = 4 + indent = " " * n_indent + newline_indent = "\n" + indent + if linewidth is not None: + given_array_width = linewidth + else: + given_array_width = np.get_printoptions()["linewidth"] + using_array_width = given_array_width - n_indent * 2 + # Make a printout of the main data array (or maybe not, if lazy). + if self._has_lazy_values(): + data_str = "" + elif max_values == 0: + data_str = "[...]" + else: + data_str = array_summary( + self._values, + n_max=max_values, + n_edge=edgeitems, + linewidth=using_array_width, + precision=precision, ) - if self.units.is_long_time_interval(): - # A time unit with a long time interval ("months" or "years") - # cannot be converted to a date using `num2date` so gracefully - # fall back to printing points as numbers, not datetimes. - values = self._values + + # The output under construction, divided into lines for convenience. + output_lines = [""] + + def add_output(text, section=None): + # Append output text and record locations of named 'sections' + if section and _section_indices is not None: + # defined a named 'section', recording the current line number + # and character position as its start position + i_line = len(output_lines) - 1 + i_char = len(output_lines[-1]) + _section_indices[section] = (i_line, i_char) + # Split the text-to-add into lines + lines = text.split("\n") + # Add initial text (before first '\n') to the current line + output_lines[-1] += lines[0] + # Add subsequent lines as additional output lines + for line in lines[1:]: + output_lines.append(line) # Add new lines + + if shorten: + add_output(f"<{cls_str}: ") + add_output(f"{title_str} ", section="title") + + if data_str != "": + # Flatten to a single line, reducing repeated spaces. + def flatten_array_str(array_str): + array_str = array_str.replace("\n", " ") + array_str = array_str.replace("\t", " ") + while " " in array_str: + array_str = array_str.replace(" ", " ") + return array_str + + data_str = flatten_array_str(data_str) + # Adjust maximum-width to allow for the title width in the + # repr form. + current_line_len = len(output_lines[-1]) + using_array_width = given_array_width - current_line_len + # Work out whether to include a summary of the data values + if len(data_str) > using_array_width: + # Make one more attempt, printing just the *first* point, + # as this is useful for dates. + data_str = data_str = array_summary( + self._values[:1], + n_max=max_values, + n_edge=edgeitems, + linewidth=using_array_width, + precision=precision, + ) + data_str = flatten_array_str(data_str) + data_str = data_str[:-1] + ", ...]" + if len(data_str) > using_array_width: + # Data summary is still too long : replace with array + # "placeholder" representation. + data_str = "[...]" + + if self.has_bounds(): + data_str += "+bounds" + + if self.shape != (1,): + # Anything non-scalar : show shape as well. + data_str += f" shape{shape_str}" + + # single-line output in 'shorten' mode + add_output(f"{data_str}>", section="data") + + else: + # Long (multi-line) output format. + add_output(f"{cls_str} : ") + add_output(f"{title_str}", section="title") + + def reindent_data_string(text, n_indent): + lines = [line for line in text.split("\n")] + indent = " " * (n_indent - 1) # allow 1 for the initial '[' + # Indent all but the *first* line. + line_1, rest_lines = lines[0], lines[1:] + rest_lines = ["\n" + indent + line for line in rest_lines] + result = line_1 + "".join(rest_lines) + return result + + data_array_str = reindent_data_string(data_str, 2 * n_indent) + + # NOTE: actual section name is variable here : data/points/indices + data_text = f"{self._values_array_name}: " + if "\n" in data_array_str: + # Put initial '[' here, and the rest on subsequent lines + data_text += "[" + newline_indent + indent + data_array_str[1:] else: - values = self._str_dates(self._values) - bounds = "" + # All on one line + data_text += data_array_str + + # N.B. indent section and record section start after that + add_output(newline_indent) + add_output(data_text, section="data") + if self.has_bounds(): - if self.units.is_long_time_interval(): - bounds_vals = self.bounds + # Add a bounds section : basically just like the 'data'. + if self._bounds_dm.has_lazy_data(): + bounds_array_str = "" + elif max_values == 0: + bounds_array_str = "[...]" else: - bounds_vals = self._str_dates(self.bounds) - bounds = ", bounds={vals}".format(vals=bounds_vals) - result = fmt.format( - self=self, - cls=type(self).__name__, - values=values, - bounds=bounds, - other_metadata=self._repr_other_metadata(), - ) - else: - result = repr(self) + bounds_array_str = array_summary( + self._bounds_dm.data, + n_max=max_values, + n_edge=edgeitems, + linewidth=using_array_width, + precision=precision, + ) + bounds_array_str = reindent_data_string( + bounds_array_str, 2 * n_indent + ) - return result + bounds_text = "bounds: " + if "\n" in bounds_array_str: + # Put initial '[' here, and the rest on subsequent lines + bounds_text += ( + "[" + newline_indent + indent + bounds_array_str[1:] + ) + else: + # All on one line + bounds_text += bounds_array_str + + # N.B. indent section and record section start after that + add_output(newline_indent) + add_output(bounds_text, section="bounds") + + if self.has_bounds(): + shape_str += f" bounds{self._bounds_dm.shape}" + + # Add shape section (always) + add_output(newline_indent) + add_output(f"shape: {shape_str}", section="shape") + + # Add dtype section (always) + add_output(newline_indent) + add_output(f"dtype: {self.dtype}", section="dtype") + + for name in self._metadata_manager._fields: + if name == "units": + # This was already included in the header line + continue + val = getattr(self, name, None) + if isinstance(val, Container): + # Don't print empty containers, like attributes={} + show = bool(val) + else: + # Don't print properties when not present, or set to None, + # or False. + # This works OK as long as we are happy to treat all + # boolean properties as 'off' when False : Which happens to + # work for all those defined so far. + show = val is not None and val is not False + if show: + # add a section for this property (metadata item) + # TODO: modify to do multi-line attribute output + add_output(newline_indent) + add_output(f"{name}: {val!r}", section=name) + + return "\n".join(output_lines) + + def __str__(self): + return self.summary() def __repr__(self): - # Note: this method includes bounds handling code, but it only runs - # within Coord type instances, as only these allow bounds to be set. - fmt = ( - "{cls}({self._values!r}{bounds}" - ", standard_name={self.standard_name!r}, units={self.units!r}" - "{other_metadata})" - ) - bounds = "" - # if coordinate, handle the bounds - if self.has_bounds(): - bounds = ", bounds=" + repr(self.bounds) - result = fmt.format( - self=self, - cls=type(self).__name__, - bounds=bounds, - other_metadata=self._repr_other_metadata(), - ) - return result + return self.summary(shorten=True) def __eq__(self, other): # Note: this method includes bounds handling code, but it only runs @@ -861,23 +1104,6 @@ def measure(self, measure): raise ValueError(emsg) self._metadata_manager.measure = measure - def __str__(self): - result = repr(self) - return result - - def __repr__(self): - fmt = ( - "{cls}({self.data!r}, " - "measure={self.measure!r}, standard_name={self.standard_name!r}, " - "units={self.units!r}{other_metadata})" - ) - result = fmt.format( - self=self, - cls=type(self).__name__, - other_metadata=self._repr_other_metadata(), - ) - return result - def cube_dims(self, cube): """ Return the cube dimensions of this CellMeasure. @@ -1303,6 +1529,8 @@ class Coord(_DimensionalMetadata): """ + _values_array_name = "points" + @abstractmethod def __init__( self, @@ -1603,14 +1831,6 @@ def has_lazy_bounds(self): result = self._bounds_dm.has_lazy_data() return result - def _repr_other_metadata(self): - result = super()._repr_other_metadata() - if self.coord_system: - result += ", coord_system={}".format(self.coord_system) - if self.climatological: - result += ", climatological={}".format(self.climatological) - return result - # Must supply __hash__ as Python 3 does not enable it if __eq__ is defined. # NOTE: Violates "objects which compare equal must have the same hash". # We ought to remove this, as equality of two coords can *change*, so they @@ -2512,12 +2732,6 @@ def collapsed(self, dims_to_collapse=None): coord.circular = False return coord - def _repr_other_metadata(self): - result = Coord._repr_other_metadata(self) - if self.circular: - result += ", circular=%r" % self.circular - return result - def _new_points_requirements(self, points): """ Confirm that a new set of coord points adheres to the requirements for diff --git a/lib/iris/cube.py b/lib/iris/cube.py index d75b94ea09f..2cd29682dd1 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -4263,8 +4263,11 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (time: 3; model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(cube.coord('time')) - DimCoord([2009-11-19 10:00:00, 2009-11-19 11:00:00, \ -2009-11-19 12:00:00], standard_name='time', calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] + shape: (3,) + dtype: float64 + standard_name: 'time' >>> print(cube.coord('time').points) [349618. 349619. 349620.] >>> samples = [('time', 349618.5)] @@ -4273,8 +4276,11 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result.coord('time')) - DimCoord([2009-11-19 10:30:00], standard_name='time', \ -calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:30:00] + shape: (1,) + dtype: float64 + standard_name: 'time' >>> print(result.coord('time').points) [349618.5] >>> # For datetime-like coordinates, we can also use @@ -4285,8 +4291,11 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result2.coord('time')) - DimCoord([2009-11-19 10:30:00], standard_name='time', \ -calendar='gregorian') + DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [2009-11-19 10:30:00] + shape: (1,) + dtype: float64 + standard_name: 'time' >>> print(result2.coord('time').points) [349618.5] >>> print(result == result2) diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 0f2bfd844cb..9e8074c83a5 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -12,9 +12,10 @@ """ from abc import ABC, abstractmethod from collections import namedtuple -import re +from collections.abc import Container from typing import Iterable +from cf_units import Unit from dask import array as da import numpy as np @@ -24,6 +25,7 @@ metadata_filter, metadata_manager_factory, ) +from ...common.metadata import BaseMetadata from ...config import get_logger from ...coords import AuxCoord, _DimensionalMetadata from ...exceptions import ConnectivityNotFoundError, CoordinateNotFoundError @@ -207,58 +209,6 @@ def validate_arg_vs_list(arg_name, arg, valid_list): attributes=attributes, ) - def __repr__(self): - def kwargs_filter(k, v): - result = False - if k != "cf_role": - if v is not None: - result = True - if ( - not isinstance(v, str) - and isinstance(v, Iterable) - and not v - ): - result = False - elif k == "units" and v == "unknown": - result = False - return result - - def array2repr(array): - if self.has_lazy_indices(): - result = repr(array) - else: - with np.printoptions( - threshold=NP_PRINTOPTIONS_THRESHOLD, - edgeitems=NP_PRINTOPTIONS_EDGEITEMS, - ): - result = re.sub("\n *", " ", repr(array)) - return result - - # positional arguments - args = ", ".join( - [ - f"{array2repr(self.core_indices())}", - f"cf_role={self.cf_role!r}", - ] - ) - - # optional arguments (metadata) - kwargs = ", ".join( - [ - f"{k}={v!r}" - for k, v in self.metadata._asdict().items() - if kwargs_filter(k, v) - ] - ) - - return f"{self.__class__.__name__}({', '.join([args, kwargs])})" - - def __str__(self): - args = ", ".join( - [f"cf_role={self.cf_role!r}", f"start_index={self.start_index!r}"] - ) - return f"{self.__class__.__name__}({args})" - @property def _values(self): # Overridden just to allow .setter override. @@ -975,77 +925,129 @@ def __ne__(self, other): result = not result return result - def __repr__(self): - def to_coord_and_axis(members): - def axis(member): - return member.split("_")[1] - - result = [ - f"({coord!s}, {axis(member)!r})" - for member, coord in members._asdict().items() - if coord is not None - ] - result = f"[{', '.join(result)}]" if result else None - return result + def summary(self, shorten=False): + """ + Return a string representation of the Mesh. - node_coords_and_axes = to_coord_and_axis(self.node_coords) - connectivities = [ - str(connectivity) - for connectivity in self.all_connectivities - if connectivity is not None - ] + Parameters + ---------- + shorten : bool, default = False + If True, produce a oneline string form of the form . + If False, produce a multi-line detailed print output. - if len(connectivities) == 1: - connectivities = connectivities[0] + Returns + ------- + result : str + + """ + if shorten: + result = self._summary_oneline() else: - connectivities = f"[{', '.join(connectivities)}]" + result = self._summary_multiline() + return result - # positional arguments - args = [ - f"topology_dimension={self.topology_dimension!r}", - f"node_coords_and_axes={node_coords_and_axes}", - f"connectivities={connectivities}", - ] + def __repr__(self): + return self.summary(shorten=True) - # optional argument - edge_coords_and_axes = to_coord_and_axis(self.edge_coords) - if edge_coords_and_axes: - args.append(f"edge_coords_and_axes={edge_coords_and_axes}") - - # optional argument - if self.topology_dimension > 1: - face_coords_and_axes = to_coord_and_axis(self.face_coords) - if face_coords_and_axes: - args.append(f"face_coords_and_axes={face_coords_and_axes}") - - def kwargs_filter(k, v): - result = False - if k != "topology_dimension": - if not ( - self.topology_dimension == 1 and k == "face_dimension" - ): - if v is not None: - result = True - if ( - not isinstance(v, str) - and isinstance(v, Iterable) - and not v - ): - result = False - elif k == "units" and v == "unknown": - result = False - return result - - # optional arguments (metadata) - args.extend( - [ - f"{k}={v!r}" - for k, v in self.metadata._asdict().items() - if kwargs_filter(k, v) - ] + def __str__(self): + return self.summary(shorten=False) + + def _summary_oneline(self): + # We use the repr output to produce short one-line identity summary, + # similar to the object.__str__ output "". + # This form also used in other str() constructions, like MeshCoord. + # By contrast, __str__ (below) produces a readable multi-line printout. + mesh_name = self.name() + if mesh_name in (None, "", "unknown"): + mesh_name = None + if mesh_name: + # Use a more human-readable form + mesh_string = f"" + else: + # Mimic the generic object.__str__ style. + mesh_id = id(self) + mesh_string = f"" + + return mesh_string + + def _summary_multiline(self): + # Produce a readable multi-line summary of the Mesh content. + lines = [] + n_indent = 4 + indent_str = " " * n_indent + + def line(text, i_indent=0): + indent = indent_str * i_indent + lines.append(f"{indent}{text}") + + line(f"Mesh : '{self.name()}'") + line(f"topology_dimension: {self.topology_dimension}", 1) + for element in ("node", "edge", "face"): + if element == "node": + element_exists = True + else: + main_conn_name = f"{element}_node_connectivity" + main_conn = getattr(self, main_conn_name, None) + element_exists = main_conn is not None + if element_exists: + # Include a section for this element + line(element, 1) + # Print element dimension + dim_name = f"{element}_dimension" + dim = getattr(self, dim_name) + line(f"{dim_name}: '{dim}'", 2) + # Print defining connectivity (except node) + if element != "node": + main_conn_string = main_conn.summary( + shorten=True, linewidth=0 + ) + line(f"{main_conn_name}: {main_conn_string}", 2) + # Print coords + include_key = f"include_{element}s" + coords = self.coords(**{include_key: True}) + if coords: + line(f"{element} coordinates", 2) + for coord in coords: + coord_string = coord.summary(shorten=True, linewidth=0) + line(coord_string, 3) + + # Having dealt with essential info, now add any optional connectivities + # N.B. includes boundaries: as optional connectivity, not an "element" + optional_conn_names = ( + "boundary_connectivity", + "face_face_connectivity", + "face_edge_connectivity", + "edge_face_connectivity", ) + optional_conns = [ + getattr(self, name, None) for name in optional_conn_names + ] + optional_conns = { + name: conn + for conn, name in zip(optional_conns, optional_conn_names) + if conn is not None + } + if optional_conns: + line("optional connectivities", 1) + for name, conn in optional_conns.items(): + conn_string = conn.summary(shorten=True, linewidth=0) + line(f"{name}: {conn_string}", 2) + + # Output the detail properties, basically those from CFVariableMixin + for name in BaseMetadata._members: + val = getattr(self, name, None) + if val is not None: + if name == "units": + show = val.origin != Unit(None) + elif isinstance(val, Container): + show = bool(val) + else: + show = val is not None + if show: + line(f"{name}: {val!r}", 1) - return f"{self.__class__.__name__}({', '.join(args)})" + result = "\n".join(lines) + return result def __setstate__(self, state): metadata_manager, coord_manager, connectivity_manager = state @@ -2943,62 +2945,62 @@ def __eq__(self, other): def __hash__(self): return hash(id(self)) - def _string_summary(self, repr_style): - # Note: bypass the immediate parent here, which is Coord, because we - # have no interest in reporting coord_system or climatological, or in - # printing out our points/bounds. - # We also want to list our defining properties, i.e. mesh/location/axis - # *first*, before names/units etc, so different from other Coord types. - - # First construct a shortform text summary to identify the Mesh. - # IN 'str-mode', this attempts to use Mesh.name() if it is set, - # otherwise uses an object-id style (as also for 'repr-mode'). - # TODO: use a suitable method provided by Mesh, e.g. something like - # "Mesh.summary(shorten=True)", when it is available. - mesh_name = None - if not repr_style: - mesh_name = self.mesh.name() - if mesh_name in (None, "", "unknown"): - mesh_name = None - if mesh_name: - # Use a more human-readable form - mesh_string = f"Mesh({mesh_name!r})" + def summary(self, *args, **kwargs): + # We need to specialise _DimensionalMetadata.summary, so that we always + # print the mesh+location of a MeshCoord. + if len(args) > 0: + shorten = args[0] else: - # Mimic the generic object.__str__ style. - mesh_id = id(self.mesh) - mesh_string = f"" - result = ( - f"mesh={mesh_string}" - f", location={self.location!r}" - f", axis={self.axis!r}" - ) - # Add 'other' metadata that is drawn from the underlying node-coord. - # But put these *afterward*, unlike other similar classes. - for item in ( - "shape", - "standard_name", - "units", - "long_name", - "attributes", - ): - # NOTE: order of these matches Coord.summary, but omit var_name. - val = getattr(self, item, None) - if item == "attributes": - is_blank = len(val) == 0 # an empty dict is as good as none - else: - is_blank = val is None - if not is_blank: - result += f", {item}={val!r}" - - result = f"MeshCoord({result})" + shorten = kwargs.get("shorten", False) + + # Get the default-form result. + if shorten: + # NOTE: we simply aren't interested in the values for the repr, + # so fix linewidth to suppress them + kwargs["linewidth"] = 1 + + # Plug private key, to get back the section structure info + section_indices = {} + kwargs["_section_indices"] = section_indices + result = super().summary(*args, **kwargs) + + # Modify the generic 'default-form' result to produce what we want. + if shorten: + # Single-line form : insert mesh+location before the array part + # Construct a text detailing the mesh + location + mesh_string = self.mesh.name() + if mesh_string == "unknown": + # If no name, replace with the one-line summary + mesh_string = self.mesh.summary(shorten=True) + extra_str = f"mesh({mesh_string}) location({self.location}) " + # find where in the line the data-array text begins + i_line, i_array = section_indices["data"] + assert i_line == 0 + # insert the extra text there + result = result[:i_array] + extra_str + result[i_array:] + # NOTE: this invalidates the original width calculation and may + # easily extend the result beyond the intended maximum linewidth. + # We do treat that as an advisory control over array printing, not + # an absolute contract, so just ignore the problem for now. + else: + # Multiline form + # find where the "location: ... " section is + i_location, i_namestart = section_indices["location"] + lines = result.split("\n") + location_line = lines[i_location] + # copy the indent spacing + indent = location_line[:i_namestart] + # use that to construct a suitable 'mesh' line + mesh_string = self.mesh.summary(shorten=True) + mesh_line = f"{indent}mesh: {mesh_string}" + # Move the 'location' line, putting it and the 'mesh' line right at + # the top, immediately after the header line. + del lines[i_location] + lines[1:1] = [mesh_line, location_line] + # Re-join lines to give the result + result = "\n".join(lines) return result - def __str__(self): - return self._string_summary(repr_style=False) - - def __repr__(self): - return self._string_summary(repr_style=True) - def _construct_access_arrays(self): """ Build lazy points and bounds arrays, providing dynamic access via the diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt index c1d62f28e45..3e7aeda3092 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_repr.txt @@ -1,11 +1 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt index c1d62f28e45..0361e88eef7 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt @@ -1,11 +1,14 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file +DimCoord : level_height / (m) + points: [ + 5. , 21.666664, 45. , 75. , 111.66668 , + 155. , 205. , 261.6667 , 325. , 395. ] + bounds: [ + [ 0. , 13.333332], + [ 13.333332, 33.333332], + ..., + [293.3333 , 360. ], + [360. , 433.3332 ]] + shape: (10,) bounds(10, 2) + dtype: float32 + long_name: 'level_height' + attributes: {'positive': 'up'} \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt b/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt index 120546311fd..57d5882e88e 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_time_repr.txt @@ -1,2 +1 @@ -DimCoord(array([347921.16666667, 347921.33333333, 347921.5 , 347921.66666666, - 347921.83333333, 347922. ]), standard_name='time', units=Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt b/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt index 9d209402e6f..f9cd09223a0 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_time_str.txt @@ -1,2 +1,5 @@ -DimCoord([2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, - 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00], standard_name='time', calendar='gregorian') \ No newline at end of file +DimCoord : forecast_period / (hours) + points: [0.] + shape: (1,) + dtype: float64 + standard_name: 'forecast_period' \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt index c1d62f28e45..cbf44b1dbbd 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_repr.txt @@ -1,11 +1 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt index c1d62f28e45..95c0a601596 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_nontime_str.txt @@ -1,11 +1,14 @@ -DimCoord(array([-0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, - -0.1215, -0.1206, -0.1197], dtype=float32), bounds=array([[-0.12825, -0.12735], - [-0.12735, -0.12645], - [-0.12645, -0.12555], - [-0.12555, -0.12465], - [-0.12465, -0.12375], - [-0.12375, -0.12285], - [-0.12285, -0.12195], - [-0.12195, -0.12105], - [-0.12105, -0.12015], - [-0.12015, -0.11925]], dtype=float32), standard_name='grid_latitude', units=Unit('degrees'), coord_system=RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))) \ No newline at end of file +DimCoord : grid_latitude / (degrees) + points: [ + -0.1278, -0.1269, -0.126 , -0.1251, -0.1242, -0.1233, -0.1224, + -0.1215, -0.1206, -0.1197] + bounds: [ + [-0.12825, -0.12735], + [-0.12735, -0.12645], + ..., + [-0.12105, -0.12015], + [-0.12015, -0.11925]] + shape: (10,) bounds(10, 2) + dtype: float32 + standard_name: 'grid_latitude' + coord_system: RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0)) \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt b/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt index 120546311fd..ae1b86ae023 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_time_repr.txt @@ -1,2 +1 @@ -DimCoord(array([347921.16666667, 347921.33333333, 347921.5 , 347921.66666666, - 347921.83333333, 347922. ]), standard_name='time', units=Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')) \ No newline at end of file + \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt index 9d209402e6f..6b95b572156 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt @@ -1,2 +1,7 @@ -DimCoord([2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, - 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00], standard_name='time', calendar='gregorian') \ No newline at end of file +DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + points: [ + 2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, + 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00] + shape: (6,) + dtype: float64 + standard_name: 'time' \ No newline at end of file diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 1b2ba563006..3445b089e8e 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -178,7 +178,9 @@ def test_complex(self): @tests.skip_data class TestCoord_ReprStr_nontime(tests.IrisTest): def setUp(self): - self.lat = iris.tests.stock.realistic_4d().coord("grid_latitude")[:10] + cube = iris.tests.stock.realistic_4d() + self.lat = cube.coord("grid_latitude")[:10] + self.height = cube.coord("level_height")[:10] def test_DimCoord_repr(self): self.assertRepr( @@ -187,7 +189,7 @@ def test_DimCoord_repr(self): def test_AuxCoord_repr(self): self.assertRepr( - self.lat, ("coord_api", "str_repr", "aux_nontime_repr.txt") + self.height, ("coord_api", "str_repr", "aux_nontime_repr.txt") ) def test_DimCoord_str(self): @@ -197,14 +199,16 @@ def test_DimCoord_str(self): def test_AuxCoord_str(self): self.assertString( - str(self.lat), ("coord_api", "str_repr", "aux_nontime_str.txt") + str(self.height), ("coord_api", "str_repr", "aux_nontime_str.txt") ) @tests.skip_data class TestCoord_ReprStr_time(tests.IrisTest): def setUp(self): - self.time = iris.tests.stock.realistic_4d().coord("time") + cube = iris.tests.stock.realistic_4d() + self.time = cube.coord("time") + self.fp = cube.coord("forecast_period") def test_DimCoord_repr(self): self.assertRepr( @@ -213,7 +217,7 @@ def test_DimCoord_repr(self): def test_AuxCoord_repr(self): self.assertRepr( - self.time, ("coord_api", "str_repr", "aux_time_repr.txt") + self.fp, ("coord_api", "str_repr", "aux_time_repr.txt") ) def test_DimCoord_str(self): @@ -223,7 +227,7 @@ def test_DimCoord_str(self): def test_AuxCoord_str(self): self.assertString( - str(self.time), ("coord_api", "str_repr", "aux_time_str.txt") + str(self.fp), ("coord_api", "str_repr", "aux_time_str.txt") ) @@ -232,23 +236,28 @@ def test_basic(self): a = iris.coords.AuxCoord( np.arange(10), "air_temperature", units="kelvin" ) - result = ( - "AuxCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin'))" + result = "\n".join( + [ + "AuxCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) b = iris.coords.AuxCoord( list(range(10)), attributes={"monty": "python"} ) - result = ( - "AuxCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name=None," - " units=Unit('unknown')," - " attributes={'monty': 'python'})" + result = "\n".join( + [ + "AuxCoord : unknown / (unknown)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " attributes: {'monty': 'python'}", + ] ) self.assertEqual(result, str(b)) @@ -276,12 +285,15 @@ def test_coord_system(self): units="kelvin", coord_system=iris.coord_systems.GeogCS(6000), ) - result = ( - "AuxCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin')," - " coord_system=GeogCS(6000.0))" + result = "\n".join( + [ + "AuxCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + " coord_system: GeogCS(6000.0)", + ] ) self.assertEqual(result, str(a)) @@ -292,14 +304,20 @@ def test_bounded(self): units="kelvin", bounds=np.arange(0, 20).reshape(10, 2), ) - result = ( - "AuxCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ", bounds=array([" - "[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n " - "[ 6, 7],\n [ 8, 9],\n [10, 11],\n " - "[12, 13],\n [14, 15],\n [16, 17],\n " - "[18, 19]])" - ", standard_name='air_temperature', units=Unit('kelvin'))" + result = "\n".join( + [ + "AuxCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " bounds: [", + " [ 0, 1],", + " [ 2, 3],", + " ...,", + " [16, 17],", + " [18, 19]]", + " shape: (10,) bounds(10, 2)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) @@ -322,23 +340,28 @@ def test_basic(self): a = iris.coords.DimCoord( np.arange(10), "air_temperature", units="kelvin" ) - result = ( - "DimCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin'))" + result = "\n".join( + [ + "DimCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) b = iris.coords.DimCoord( list(range(10)), attributes={"monty": "python"} ) - result = ( - "DimCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name=None," - " units=Unit('unknown')," - " attributes={'monty': 'python'})" + result = "\n".join( + [ + "DimCoord : unknown / (unknown)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " attributes: {'monty': 'python'}", + ] ) self.assertEqual(result, str(b)) @@ -366,12 +389,15 @@ def test_coord_system(self): units="kelvin", coord_system=iris.coord_systems.GeogCS(6000), ) - result = ( - "DimCoord(" - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," - " standard_name='air_temperature'," - " units=Unit('kelvin')," - " coord_system=GeogCS(6000.0))" + result = "\n".join( + [ + "DimCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " shape: (10,)", + " dtype: int64", + " standard_name: 'air_temperature'", + " coord_system: GeogCS(6000.0)", + ] ) self.assertEqual(result, str(a)) @@ -382,14 +408,20 @@ def test_bounded(self): units="kelvin", bounds=np.arange(0, 20).reshape(10, 2), ) - result = ( - "DimCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ", bounds=array([" - "[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n " - "[ 6, 7],\n [ 8, 9],\n [10, 11],\n " - "[12, 13],\n [14, 15],\n [16, 17],\n " - "[18, 19]])" - ", standard_name='air_temperature', units=Unit('kelvin'))" + result = "\n".join( + [ + "DimCoord : air_temperature / (kelvin)", + " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", + " bounds: [", + " [ 0, 1],", + " [ 2, 3],", + " ...,", + " [16, 17],", + " [18, 19]]", + " shape: (10,) bounds(10, 2)", + " dtype: int64", + " standard_name: 'air_temperature'", + ] ) self.assertEqual(result, str(a)) diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py index e94ad0cf701..51e070a2260 100644 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py @@ -433,11 +433,17 @@ def test_non_time_values(self): units="m", attributes={"notes": "Measured from sea level"}, ) - expected = ( - "AncillaryVariable(array([2, 5, 9]), " - "standard_name='height', units=Unit('m'), " - "long_name='height of detector', var_name='height', " - "attributes={'notes': 'Measured from sea level'})" + expected = "\n".join( + [ + "AncillaryVariable : height / (m)", + " data: [2, 5, 9]", + " shape: (3,)", + " dtype: int64", + " standard_name: 'height'", + " long_name: 'height of detector'", + " var_name: 'height'", + " attributes: {'notes': 'Measured from sea level'}", + ] ) self.assertEqual(expected, ancillary_var.__str__()) @@ -447,11 +453,20 @@ def test_time_values(self): units="hours since 1970-01-01 01:00", long_name="time of previous valid detection", ) - expected = ( - "AncillaryVariable([1970-01-01 03:00:00, " - "1970-01-01 06:00:00, 1970-01-01 10:00:00], " - "standard_name=None, calendar='gregorian', " - "long_name='time of previous valid detection')" + expected = "\n".join( + [ + ( + "AncillaryVariable : time of previous valid detection / " + "(hours since 1970-01-01 01:00, gregorian calendar)" + ), + ( + " data: [1970-01-01 03:00:00, 1970-01-01 06:00:00, " + "1970-01-01 10:00:00]" + ), + " shape: (3,)", + " dtype: int64", + " long_name: 'time of previous valid detection'", + ] ) self.assertEqual(expected, ancillary_var.__str__()) @@ -466,12 +481,7 @@ def test_non_time_values(self): units="m", attributes={"notes": "Measured from sea level"}, ) - expected = ( - "AncillaryVariable(array([2, 5, 9]), " - "standard_name='height', units=Unit('m'), " - "long_name='height of detector', var_name='height', " - "attributes={'notes': 'Measured from sea level'})" - ) + expected = "" self.assertEqual(expected, ancillary_var.__repr__()) def test_time_values(self): @@ -481,10 +491,8 @@ def test_time_values(self): long_name="time of previous valid detection", ) expected = ( - "AncillaryVariable(array([2, 5, 9]), standard_name=None, " - "units=Unit('hours since 1970-01-01 01:00', " - "calendar='gregorian'), " - "long_name='time of previous valid detection')" + "" ) self.assertEqual(expected, ancillary_var.__repr__()) diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py index c5016e6c735..873a257c8e7 100644 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ b/lib/iris/tests/unit/coords/test_CellMeasure.py @@ -93,30 +93,28 @@ def test_copy(self): copy_measure = self.measure.copy(new_vals) self.assertArrayEqual(copy_measure.data, new_vals) - def test_repr_other_metadata(self): - expected = ( - ", long_name='measured_area', " - "var_name='area', attributes={'notes': '1m accuracy'}" - ) - self.assertEqual(self.measure._repr_other_metadata(), expected) - def test___str__(self): - expected = ( - "CellMeasure(array([10., 12., 16., 9.]), " - "measure='area', standard_name='cell_area', " - "units=Unit('m^2'), long_name='measured_area', " - "var_name='area', attributes={'notes': '1m accuracy'})" + expected = "\n".join( + [ + "CellMeasure : cell_area / (m^2)", + " data: [10., 12., 16., 9.]", + " shape: (4,)", + " dtype: float64", + " standard_name: 'cell_area'", + " long_name: 'measured_area'", + " var_name: 'area'", + " attributes: {'notes': '1m accuracy'}", + " measure: 'area'", + ] ) self.assertEqual(self.measure.__str__(), expected) def test___repr__(self): expected = ( - "CellMeasure(array([10., 12., 16., 9.]), " - "measure='area', standard_name='cell_area', " - "units=Unit('m^2'), long_name='measured_area', " - "var_name='area', attributes={'notes': '1m accuracy'})" + "" ) - self.assertEqual(self.measure.__repr__(), expected) + self.assertEqual(expected, self.measure.__repr__()) def test__eq__(self): self.assertEqual(self.measure, self.measure) diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 640dbcd1315..43170b6c4e6 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -883,9 +883,14 @@ def test_short_time_interval(self): coord = DimCoord( [5], standard_name="time", units="days since 1970-01-01" ) - expected = ( - "DimCoord([1970-01-06 00:00:00], standard_name='time', " - "calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (days since 1970-01-01, gregorian calendar)", + " points: [1970-01-06 00:00:00]", + " shape: (1,)", + " dtype: int64", + " standard_name: 'time'", + ] ) result = coord.__str__() self.assertEqual(expected, result) @@ -895,11 +900,17 @@ def test_short_time_interval__bounded(self): [5, 6], standard_name="time", units="days since 1970-01-01" ) coord.guess_bounds() - expected = ( - "DimCoord([1970-01-06 00:00:00, 1970-01-07 00:00:00], " - "bounds=[[1970-01-05 12:00:00, 1970-01-06 12:00:00],\n" - " [1970-01-06 12:00:00, 1970-01-07 12:00:00]], " - "standard_name='time', calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (days since 1970-01-01, gregorian calendar)", + " points: [1970-01-06 00:00:00, 1970-01-07 00:00:00]", + " bounds: [", + " [1970-01-05 12:00:00, 1970-01-06 12:00:00],", + " [1970-01-06 12:00:00, 1970-01-07 12:00:00]]", + " shape: (2,) bounds(2, 2)", + " dtype: int64", + " standard_name: 'time'", + ] ) result = coord.__str__() self.assertEqual(expected, result) @@ -908,7 +919,15 @@ def test_long_time_interval(self): coord = DimCoord( [5], standard_name="time", units="years since 1970-01-01" ) - expected = "DimCoord([5], standard_name='time', calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (years since 1970-01-01, gregorian calendar)", + " points: [5]", + " shape: (1,)", + " dtype: int64", + " standard_name: 'time'", + ] + ) result = coord.__str__() self.assertEqual(expected, result) @@ -917,16 +936,31 @@ def test_long_time_interval__bounded(self): [5, 6], standard_name="time", units="years since 1970-01-01" ) coord.guess_bounds() - expected = ( - "DimCoord([5 6], bounds=[[4.5 5.5]\n [5.5 6.5]], " - "standard_name='time', calendar='gregorian')" + expected = "\n".join( + [ + "DimCoord : time / (years since 1970-01-01, gregorian calendar)", + " points: [5, 6]", + " bounds: [", + " [4.5, 5.5],", + " [5.5, 6.5]]", + " shape: (2,) bounds(2, 2)", + " dtype: int64", + " standard_name: 'time'", + ] ) result = coord.__str__() self.assertEqual(expected, result) def test_non_time_unit(self): coord = DimCoord([1.0]) - expected = repr(coord) + expected = "\n".join( + [ + "DimCoord : unknown / (unknown)", + " points: [1.]", + " shape: (1,)", + " dtype: float64", + ] + ) result = coord.__str__() self.assertEqual(expected, result) diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 82bd51a8aff..d403b6c4e29 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -9,7 +9,22 @@ # importing anything else. import iris.tests as tests # isort:skip -from iris.coords import _DimensionalMetadata + +from cf_units import Unit +import numpy as np + +import iris._lazy_data as lazy +from iris.coord_systems import GeogCS +from iris.coords import ( + AncillaryVariable, + AuxCoord, + CellMeasure, + DimCoord, + _DimensionalMetadata, +) +from iris.experimental.ugrid.mesh import Connectivity +from iris.tests.stock import climatology_3d as cube_with_climatology +from iris.tests.stock.mesh import sample_meshcoord class Test___init____abstractmethod(tests.IrisTest): @@ -22,5 +37,1037 @@ def test(self): _ = _DimensionalMetadata(0) +class Mixin__string_representations: + """ + Common testcode for generic `__str__`, `__repr__` and `summary` methods. + + Effectively, __str__ and __repr__ are thin wrappers around `summary`. + These are used by all the subclasses : notably Coord/DimCoord/AuxCoord, + but also AncillaryVariable, CellMeasure and MeshCoord. + + There are a lot of different aspects to consider: + + * different object classes with different class-specific properties + * changing with array sizes + dimensionalities + * masked data + * data types : int, float, string and (special) dates + * for Coords, handling of bounds + * "summary" controls (also can be affected by numpy printoptions). + + NOTE: since the details of formatting are important to us here, the basic + test method is to check printout results against an exact 'snapshot' + embedded (visibly) in the test itself. + + """ + + def repr_str_strings(self, dm, linewidth=55): + """ + Return a simple combination of repr and str printouts. + + N.B. we control linewidth to make the outputs easier to compare. + """ + with np.printoptions(linewidth=linewidth): + result = repr(dm) + "\n" + str(dm) + return result + + def sample_data(self, datatype=float, units="m", shape=(5,), masked=False): + """Make a sample data array for a test _DimensionalMetadata object.""" + # Get an actual Unit + units = Unit(units) + if units.calendar: + # fix string datatypes for date-based units + datatype = float + + # Get a dtype + dtype = np.dtype(datatype) + + # Make suitable test values for type/shape/masked + length = int(np.prod(shape)) + if dtype.kind == "U": + # String content. + digit_strs = [str(i) * (i + 1) for i in range(0, 10)] + if length < 10: + # ['0', '11', '222, '3333', ..] + values = np.array(digit_strs[:length]) + else: + # [... '9999999999', '0', '11' ....] + indices = [(i % 10) for i in range(length)] + values = np.array(digit_strs)[indices] + else: + # numeric content : a simple [0, 1, 2 ...] + values = np.arange(length).astype(dtype) + + if masked: + if np.prod(shape) >= 3: + # Mask 1 in 3 points : [x -- x x -- x ...] + i_firstmasked = 1 + else: + # Few points, mask 1 in 3 starting at 0 [-- x x -- x x -- ...] + i_firstmasked = 0 + masked_points = [(i % 3) == i_firstmasked for i in range(length)] + values = np.ma.masked_array(values, mask=masked_points) + + values = values.reshape(shape) + return values + + # Make a sample Coord, as _DimensionalMetadata is abstract and this is the + # obvious concrete subclass to use for testing + def sample_coord( + self, + datatype=float, + dates=False, + units="m", + long_name="x", + shape=(5,), + masked=False, + bounded=False, + dimcoord=False, + lazy_points=False, + lazy_bounds=False, + *coord_args, + **coord_kwargs, + ): + if masked: + dimcoord = False + if dates: + # Use a pre-programmed date unit. + units = Unit("days since 1970-03-5") + if not isinstance(units, Unit): + # This operation is *not* a no-op, it will wipe calendars ! + units = Unit(units) + values = self.sample_data( + datatype=datatype, units=units, shape=shape, masked=masked + ) + cls = DimCoord if dimcoord else AuxCoord + coord = cls( + points=values, + units=units, + long_name=long_name, + *coord_args, + **coord_kwargs, + ) + if bounded or lazy_bounds: + if shape == (1,): + # Guess-bounds doesn't work ! + val = coord.points[0] + bounds = [val - 10, val + 10] + # NB preserve masked/unmasked : avoid converting masks to NaNs + if np.ma.isMaskedArray(coord.points): + array = np.ma.array + else: + array = np.array + coord.bounds = array(bounds) + else: + coord.guess_bounds() + if lazy_points: + coord.points = lazy.as_lazy_data(coord.points) + if lazy_bounds: + coord.bounds = lazy.as_lazy_data(coord.bounds) + return coord + + def coord_representations(self, *args, **kwargs): + """ + Create a test coord and return its string representations. + + Pass args+kwargs to 'sample_coord' and return the 'repr_str_strings'. + + """ + coord = self.sample_coord(*args, **kwargs) + return self.repr_str_strings(coord) + + def assertLines(self, list_of_expected_lines, string_result): + """ + Assert equality between a result and expected output lines. + + For convenience, the 'expected lines' are joined with a '\\n', + because a list of strings is nicer to construct in code. + They should then match the actual result, which is a simple string. + + """ + self.assertEqual(list_of_expected_lines, string_result.split("\n")) + + +class Test__print_common(Mixin__string_representations, tests.IrisTest): + """ + Test aspects of __str__ and __repr__ output common to all + _DimensionalMetadata instances. + I.E. those from CFVariableMixin, plus values array (data-manager). + + Aspects : + * standard_name: + * long_name: + * var_name: + * attributes + * units + * shape + * dtype + + """ + + def test_simple(self): + result = self.coord_representations() + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_minimal(self): + result = self.coord_representations( + long_name=None, units=None, shape=(1,) + ) + expected = [ + "", + "AuxCoord : unknown / (unknown)", + " points: [0.]", + " shape: (1,)", + " dtype: float64", + ] + self.assertLines(expected, result) + + def test_names(self): + result = self.coord_representations( + standard_name="height", long_name="this", var_name="x_var" + ) + expected = [ + "", + "AuxCoord : height / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " standard_name: 'height'", + " long_name: 'this'", + " var_name: 'x_var'", + ] + self.assertLines(expected, result) + + def test_bounded(self): + result = self.coord_representations(shape=(3,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5],", + " [ 1.5, 2.5]]", + " shape: (3,) bounds(3, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_masked(self): + result = self.coord_representations(masked=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0.0, -- , 2.0, 3.0, -- ]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dtype_int(self): + result = self.coord_representations(units="1", datatype=np.int16) + expected = [ + "", + "AuxCoord : x / (1)", + " points: [0, 1, 2, 3, 4]", + " shape: (5,)", + " dtype: int16", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dtype_date(self): + # Note: test with a date 'longer' than the built-in one in + # 'sample_coord(dates=True)', because it includes a time-of-day + full_date_unit = Unit( + "days since 1892-05-17 03:00:25", calendar="360_day" + ) + result = self.coord_representations(units=full_date_unit) + expected = [ + ( + "" + ), + ( + "AuxCoord : x / (days since 1892-05-17 03:00:25, " + "360_day calendar)" + ), + " points: [", + " 1892-05-17 03:00:25, 1892-05-18 03:00:25,", + " 1892-05-19 03:00:25, 1892-05-20 03:00:25,", + " 1892-05-21 03:00:25]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_attributes(self): + # NOTE: scheduled for future change, to put each attribute on a line + coord = self.sample_coord( + attributes={ + "array": np.arange(7.0), + "list": [1, 2, 3], + "empty": [], + "None": None, + "string": "this", + "long_long_long_long_long_long_name": 3, + "other": "long_long_long_long_long_long_value", + "float": 4.3, + } + ) + result = self.repr_str_strings(coord) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + # At present, some nasty long lines... + ( + " attributes: {'array': array([0., 1., 2., 3., 4., 5., " + "6.]), 'list': [1, 2, 3], 'empty': [], " + "'None': None, 'string': 'this', " + "'long_long_long_long_long_long_name': 3, " + "'other': 'long_long_long_long_long_long_value', " + "'float': 4.3}" + ), + ] + self.assertLines(expected, result) + + def test_lazy_points(self): + result = self.coord_representations(lazy_points=True) + expected = [ + " shape(5,)>", + "AuxCoord : x / (m)", + " points: ", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_lazy_bounds(self): + result = self.coord_representations(lazy_bounds=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " bounds: ", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_lazy_points_and_bounds(self): + result = self.coord_representations(lazy_points=True, lazy_bounds=True) + expected = [ + "+bounds shape(5,)>", + "AuxCoord : x / (m)", + " points: ", + " bounds: ", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_scalar(self): + result = self.coord_representations(shape=(1,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0.]", + " bounds: [[-10., 10.]]", + " shape: (1,) bounds(1, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_scalar_masked(self): + result = self.coord_representations( + shape=(1,), bounded=True, masked=True + ) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [--]", + " bounds: [[--, --]]", + " shape: (1,) bounds(1, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_length_short(self): + result = self.coord_representations(shape=(2,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5]]", + " shape: (2,) bounds(2, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_length_medium(self): + # Where bounds are truncated, but points not. + result = self.coord_representations(shape=(14,), bounded=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [", + " 0., 1., 2., 3., 4., 5., 6., 7., 8.,", + " 9., 10., 11., 12., 13.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5],", + " ...,", + " [11.5, 12.5],", + " [12.5, 13.5]]", + " shape: (14,) bounds(14, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_length_long(self): + # Completely truncated representations + result = self.coord_representations(shape=(150,), bounded=True) + expected = [ + ( + "" + ), + "AuxCoord : x / (m)", + " points: [ 0., 1., ..., 148., 149.]", + " bounds: [", + " [ -0.5, 0.5],", + " [ 0.5, 1.5],", + " ...,", + " [147.5, 148.5],", + " [148.5, 149.5]]", + " shape: (150,) bounds(150, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_strings(self): + result = self.coord_representations(datatype=str) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0 , 11 , 222 , 3333 , 44444]", + " shape: (5,)", + " dtype: ", + "AuxCoord : x / (m)", + " points: [", + " 0 , 11 , 222 ,", + " 3333 , 44444 , 555555 ,", + " 6666666 , 77777777 , 888888888 ,", + " 9999999999, 0 , 11 ,", + " 222 , 3333 , 44444 ]", + " shape: (15,)", + " dtype: ", + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [1970-03-05 00:00:00, 1970-03-06 00:00:00]", + " shape: (2,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dates_scalar(self): + # Printouts for a scalar date coord. + # Demonstrate that a "typical" datetime coord can print with the date + # value visible in the repr. + long_time_unit = Unit("hours since 2025-03-23 01:00:00") + coord = self.sample_coord( + standard_name="time", + long_name=None, + shape=(1,), + units=long_time_unit, + ) + # Do this one with a default linewidth, not our default reduced one, so + # that we can get the date value in the repr output. + result = self.repr_str_strings(coord, linewidth=None) + expected = [ + ( + "" + ), + ( + "AuxCoord : time / (hours since 2025-03-23 01:00:00, " + "gregorian calendar)" + ), + " points: [2025-03-23 01:00:00]", + " shape: (1,)", + " dtype: float64", + " standard_name: 'time'", + ] + self.assertLines(expected, result) + + def test_dates_bounds(self): + result = self.coord_representations(dates=True, bounded=True) + expected = [ + "", + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [", + " 1970-03-05 00:00:00, 1970-03-06 00:00:00,", + " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", + " 1970-03-09 00:00:00]", + " bounds: [", + " [1970-03-04 12:00:00, 1970-03-05 12:00:00],", + " [1970-03-05 12:00:00, 1970-03-06 12:00:00],", + " [1970-03-06 12:00:00, 1970-03-07 12:00:00],", + " [1970-03-07 12:00:00, 1970-03-08 12:00:00],", + " [1970-03-08 12:00:00, 1970-03-09 12:00:00]]", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_dates_masked(self): + result = self.coord_representations(dates=True, masked=True) + expected = [ + "", + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [", + " 1970-03-05 00:00:00, -- ,", + " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", + " -- ]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_untypical_bounds(self): + # Check printing when n-bounds > 2 + coord = self.sample_coord() + bounds = coord.points.reshape((5, 1)) + np.array([[-3.0, -2, 2, 3]]) + coord.bounds = bounds + result = self.repr_str_strings(coord) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " bounds: [", + " [-3., -2., 2., 3.],", + " [-2., -1., 3., 4.],", + " ...,", + " [ 0., 1., 5., 6.],", + " [ 1., 2., 6., 7.]]", + " shape: (5,) bounds(5, 4)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_multidimensional(self): + # Demonstrate formatting of multdimensional arrays + result = self.coord_representations(shape=(7, 5, 3)) + # This one is a bit unavoidably long .. + expected = [ + "", + "AuxCoord : x / (m)", + " points: [", + " [[ 0., 1., 2.],", + " [ 3., 4., 5.],", + " ...,", + " [ 9., 10., 11.],", + " [ 12., 13., 14.]],", + " ", + " [[ 15., 16., 17.],", + " [ 18., 19., 20.],", + " ...,", + " [ 24., 25., 26.],", + " [ 27., 28., 29.]],", + " ", + " ...,", + " ", + " [[ 75., 76., 77.],", + " [ 78., 79., 80.],", + " ...,", + " [ 84., 85., 86.],", + " [ 87., 88., 89.]],", + " ", + " [[ 90., 91., 92.],", + " [ 93., 94., 95.],", + " ...,", + " [ 99., 100., 101.],", + " [102., 103., 104.]]]", + " shape: (7, 5, 3)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_multidimensional_small(self): + # Demonstrate that a small-enough multidim will print in the repr. + result = self.coord_representations(shape=(2, 2), datatype=int) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [", + " [0, 1],", + " [2, 3]]", + " shape: (2, 2)", + " dtype: int64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_integers_short(self): + result = self.coord_representations(datatype=np.int16) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0, 1, 2, 3, 4]", + " shape: (5,)", + " dtype: int16", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_integers_masked(self): + result = self.coord_representations(datatype=int, masked=True) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0 , --, 2 , 3 , --]", + " shape: (5,)", + " dtype: int64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_integers_masked_long(self): + result = self.coord_representations( + shape=(20,), datatype=int, masked=True + ) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0 , --, ..., 18, --]", + " shape: (20,)", + " dtype: int64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + +class Test__print_Coord(Mixin__string_representations, tests.IrisTest): + """ + Test Coord-specific aspects of __str__ and __repr__ output. + + Aspects : + * DimCoord / AuxCoord + * coord_system + * climatological + * circular + + """ + + def test_dimcoord(self): + result = self.coord_representations(dimcoord=True) + expected = [ + "", + "DimCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_coord_system(self): + result = self.coord_representations(coord_system=GeogCS(1000.0)) + expected = [ + "", + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + " coord_system: GeogCS(1000.0)", + ] + self.assertLines(expected, result) + + def test_climatological(self): + cube = cube_with_climatology() + coord = cube.coord("time") + coord = coord[:1] # Just to make it a bit shorter + result = self.repr_str_strings(coord) + expected = [ + ( + "" + ), + ( + "DimCoord : time / (days since 1970-01-01 00:00:00-00, " + "gregorian calendar)" + ), + " points: [2001-01-10 00:00:00]", + " bounds: [[2001-01-10 00:00:00, 2011-01-10 00:00:00]]", + " shape: (1,) bounds(1, 2)", + " dtype: float64", + " standard_name: 'time'", + " climatological: True", + ] + self.assertLines(expected, result) + + def test_circular(self): + coord = self.sample_coord(shape=(2,), dimcoord=True) + coord.circular = True + result = self.repr_str_strings(coord) + expected = [ + "", + "DimCoord : x / (m)", + " points: [0., 1.]", + " shape: (2,)", + " dtype: float64", + " long_name: 'x'", + " circular: True", + ] + self.assertLines(expected, result) + + +class Test__print_noncoord(Mixin__string_representations, tests.IrisTest): + """ + Limited testing of other _DimensionalMetadata subclasses. + + * AncillaryVariable + * CellMeasure + * Connectivity + * MeshCoord + + """ + + def test_ancillary(self): + # Check we can print an AncillaryVariable + # Practically, ~identical to an AuxCoord, but without bounds, and the + # array is called 'data'. + data = self.sample_data() + ancil = AncillaryVariable(data, long_name="v_aux", units="m s-1") + result = self.repr_str_strings(ancil) + expected = [ + "", + "AncillaryVariable : v_aux / (m s-1)", + " data: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'v_aux'", + ] + self.assertLines(expected, result) + + def test_cellmeasure(self): + # Check we can print an AncillaryVariable + # N.B. practically, identical to an AuxCoord (without bounds) + # Check we can print an AncillaryVariable + # Practically, ~identical to an AuxCoord, but without bounds, and the + # array is called 'data'. + data = self.sample_data() + cell_measure = CellMeasure( + data, measure="area", long_name="cell_area", units="m^2" + ) + result = self.repr_str_strings(cell_measure) + expected = [ + "", + "CellMeasure : cell_area / (m^2)", + " data: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'cell_area'", + " measure: 'area'", + ] + self.assertLines(expected, result) + + def test_connectivity(self): + # Check we can print a Connectivity + # Like a Coord, but always print : cf_role, src_dim, start_index + data = self.sample_data(shape=(3, 2), datatype=int) + conn = Connectivity( + data, cf_role="edge_node_connectivity", long_name="enc", units="1" + ) + result = self.repr_str_strings(conn) + expected = [ + "", + "Connectivity : enc / (1)", + " data: [", + " [0, 1],", + " [2, 3],", + " [4, 5]]", + " shape: (3, 2)", + " dtype: int64", + " long_name: 'enc'", + " cf_role: 'edge_node_connectivity'", + " start_index: 0", + " src_dim: 0", + ] + self.assertLines(expected, result) + + def test_connectivity__start_index(self): + # Check we can print a Connectivity + # Like a Coord, but always print : cf_role, src_dim, start_index + data = self.sample_data(shape=(3, 2), datatype=int) + conn = Connectivity( + data + 1, + start_index=1, + cf_role="edge_node_connectivity", + long_name="enc", + units="1", + ) + result = self.repr_str_strings(conn) + expected = [ + "", + "Connectivity : enc / (1)", + " data: [", + " [1, 2],", + " [3, 4],", + " [5, 6]]", + " shape: (3, 2)", + " dtype: int64", + " long_name: 'enc'", + " cf_role: 'edge_node_connectivity'", + " start_index: 1", + " src_dim: 0", + ] + self.assertLines(expected, result) + + def test_connectivity__src_dim(self): + # Check we can print a Connectivity + # Like a Coord, but always print : cf_role, src_dim, start_index + data = self.sample_data(shape=(3, 2), datatype=int) + conn = Connectivity( + data.transpose(), + src_dim=1, + cf_role="edge_node_connectivity", + long_name="enc", + units="1", + ) + result = self.repr_str_strings(conn) + expected = [ + "", + "Connectivity : enc / (1)", + " data: [", + " [0, 2, 4],", + " [1, 3, 5]]", + " shape: (2, 3)", + " dtype: int64", + " long_name: 'enc'", + " cf_role: 'edge_node_connectivity'", + " start_index: 0", + " src_dim: 1", + ] + self.assertLines(expected, result) + + def test_meshcoord(self): + meshco = sample_meshcoord() + meshco.mesh.long_name = "test_mesh" # For stable printout of the Mesh + result = self.repr_str_strings(meshco) + expected = [ + ( + "" + ), + "MeshCoord : longitude / (degrees_east)", + " mesh: ", + " location: 'face'", + " points: [3100, 3101, 3102]", + " bounds: [", + " [1100, 1101, 1102, 1103],", + " [1104, 1105, 1106, 1107],", + " [1108, 1109, 1110, 1111]]", + " shape: (3,) bounds(3, 4)", + " dtype: int64", + " standard_name: 'longitude'", + " long_name: 'long-name'", + " attributes: {'a': 1, 'b': 'c'}", + " axis: 'x'", + ] + self.assertLines(expected, result) + + +class Test_summary(Mixin__string_representations, tests.IrisTest): + """ + Test the controls of the 'summary' method. + """ + + def test_shorten(self): + coord = self.sample_coord() + expected = self.repr_str_strings(coord) + result = coord.summary(shorten=True) + "\n" + coord.summary() + self.assertEqual(expected, result) + + def test_max_values__default(self): + coord = self.sample_coord() + result = coord.summary() + expected = [ + "AuxCoord : x / (m)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_max_values__2(self): + coord = self.sample_coord() + result = coord.summary(max_values=2) + expected = [ + "AuxCoord : x / (m)", + " points: [0., 1., ..., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_max_values__bounded__2(self): + coord = self.sample_coord(bounded=True) + result = coord.summary(max_values=2) + expected = [ + "AuxCoord : x / (m)", + " points: [0., 1., ..., 3., 4.]", + " bounds: [", + " [-0.5, 0.5],", + " [ 0.5, 1.5],", + " ...,", + " [ 2.5, 3.5],", + " [ 3.5, 4.5]]", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_max_values__0(self): + coord = self.sample_coord(bounded=True) + result = coord.summary(max_values=0) + expected = [ + "AuxCoord : x / (m)", + " points: [...]", + " bounds: [...]", + " shape: (5,) bounds(5, 2)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_linewidth__default(self): + coord = self.sample_coord() + coord.points = coord.points + 1000.003 # Make the output numbers wider + result = coord.summary() + expected = [ + "AuxCoord : x / (m)", + " points: [1000.003, 1001.003, 1002.003, 1003.003, 1004.003]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + # Show that, when unset, it follows the numpy setting + with np.printoptions(linewidth=35): + result = coord.summary() + expected = [ + "AuxCoord : x / (m)", + " points: [", + " 1000.003, 1001.003,", + " 1002.003, 1003.003,", + " 1004.003]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + def test_linewidth__set(self): + coord = self.sample_coord() + coord.points = coord.points + 1000.003 # Make the output numbers wider + expected = [ + "AuxCoord : x / (m)", + " points: [", + " 1000.003, 1001.003,", + " 1002.003, 1003.003,", + " 1004.003]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + result = coord.summary(linewidth=35) + self.assertLines(expected, result) + + with np.printoptions(linewidth=999): + # Show that, when set, it ignores the numpy setting + result = coord.summary(linewidth=35) + self.assertLines(expected, result) + + def test_convert_dates(self): + coord = self.sample_coord(dates=True) + result = coord.summary() + expected = [ + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [", + ( + " 1970-03-05 00:00:00, 1970-03-06 00:00:00, " + "1970-03-07 00:00:00," + ), + " 1970-03-08 00:00:00, 1970-03-09 00:00:00]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + result = coord.summary(convert_dates=False) + expected = [ + "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", + " points: [0., 1., 2., 3., 4.]", + " shape: (5,)", + " dtype: float64", + " long_name: 'x'", + ] + self.assertLines(expected, result) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index 5d6f48fddaf..c0117ffc79f 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -95,18 +95,28 @@ def test_src_lengths(self): self.assertArrayEqual(expected, self.connectivity.src_lengths()) def test___str__(self): - expected = ( - "Connectivity(cf_role='face_node_connectivity', start_index=1)" + expected = "\n".join( + [ + "Connectivity : my_face_nodes / (unknown)", + " data: [", + " [ 1, 2, 3],", + " [ 4, 5, 6],", + " [ 7, 8, 9],", + " [10, 11, 12]]", + " shape: (4, 3)", + " dtype: int64", + " long_name: 'my_face_nodes'", + " var_name: 'face_nodes'", + " attributes: {'notes': 'this is a test'}", + " cf_role: 'face_node_connectivity'", + " start_index: 1", + " src_dim: 1", + ] ) self.assertEqual(expected, self.connectivity.__str__()) def test___repr__(self): - expected = ( - "Connectivity(array([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9], [10, 11, 12]]), " - "cf_role='face_node_connectivity', long_name='my_face_nodes', " - "var_name='face_nodes', attributes={'notes': 'this is a test'}, " - "start_index=1, src_dim=1)" - ) + expected = "" self.assertEqual(expected, self.connectivity.__repr__()) def test_xml_element(self): @@ -345,3 +355,7 @@ def test_indices_locations_masked(self): "Not all src_locations meet requirement: len>=3", connectivity.validate_indices, ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 98086600167..1df67deb66d 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -106,23 +106,32 @@ def test___getstate__(self): self.assertEqual(expected, self.mesh.__getstate__()) def test___repr__(self): - expected = ( - "Mesh(topology_dimension=1, node_coords_and_axes=[(AuxCoord(" - "array([0, 2, 1]), standard_name='longitude', units=Unit(" - "'unknown'), long_name='long_name', var_name='node_lon', " - "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='node_lat'), 'y')], connectivities=Connectivity(" - "cf_role='edge_node_connectivity', start_index=0), " - "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " - "standard_name='longitude', units=Unit('unknown'), " - "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='edge_lat'), 'y')], long_name='my_topology_mesh', " - "var_name='mesh', attributes={'notes': 'this is a test'}, " - "node_dimension='NodeDim', edge_dimension='EdgeDim')" - ) - self.assertEqual(expected, self.mesh.__repr__()) + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___str__(self): + expected = [ + "Mesh : 'my_topology_mesh'", + " topology_dimension: 1", + " node", + " node_dimension: 'NodeDim'", + " node coordinates", + " ", + " ", + " edge", + " edge_dimension: 'EdgeDim'", + ( + " edge_node_connectivity: " + "" + ), + " edge coordinates", + " ", + " ", + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " attributes: {'notes': 'this is a test'}", + ] + self.assertEqual(expected, str(self.mesh).split("\n")) def test___eq__(self): # The dimension names do not participate in equality. @@ -373,33 +382,101 @@ def setUpClass(cls): cls.mesh = mesh.Mesh(**cls.kwargs) def test___repr__(self): - expected = ( - "Mesh(topology_dimension=2, node_coords_and_axes=[(AuxCoord(" - "array([0, 2, 1]), standard_name='longitude', units=Unit(" - "'unknown'), long_name='long_name', var_name='node_lon', " - "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='node_lat'), 'y')], connectivities=[Connectivity(" - "cf_role='face_node_connectivity', start_index=0), Connectivity(" - "cf_role='edge_node_connectivity', start_index=0), Connectivity(" - "cf_role='face_edge_connectivity', start_index=0), Connectivity(" - "cf_role='face_face_connectivity', start_index=0), Connectivity(" - "cf_role='edge_face_connectivity', start_index=0), Connectivity(" - "cf_role='boundary_node_connectivity', start_index=0)], " - "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " - "standard_name='longitude', units=Unit('unknown'), " - "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='edge_lat'), 'y')], face_coords_and_axes=[(AuxCoord(" - "array([0.5]), standard_name='longitude', units=Unit('unknown'), " - "var_name='face_lon'), 'x'), (AuxCoord(array([0.5]), " - "standard_name='latitude', units=Unit('unknown'), " - "var_name='face_lat'), 'y')], long_name='my_topology_mesh', " - "var_name='mesh', attributes={'notes': 'this is a test'}, " - "node_dimension='NodeDim', edge_dimension='EdgeDim', " - "face_dimension='FaceDim')" - ) - self.assertEqual(expected, self.mesh.__repr__()) + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___str__(self): + expected = [ + "Mesh : 'my_topology_mesh'", + " topology_dimension: 2", + " node", + " node_dimension: 'NodeDim'", + " node coordinates", + " ", + " ", + " edge", + " edge_dimension: 'EdgeDim'", + ( + " edge_node_connectivity: " + "" + ), + " edge coordinates", + " ", + " ", + " face", + " face_dimension: 'FaceDim'", + ( + " face_node_connectivity: " + "" + ), + " face coordinates", + " ", + " ", + " optional connectivities", + ( + " face_face_connectivity: " + "" + ), + ( + " face_edge_connectivity: " + "" + ), + ( + " edge_face_connectivity: " + "" + ), + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " attributes: {'notes': 'this is a test'}", + ] + self.assertEqual(expected, str(self.mesh).split("\n")) + + # Test some different options of the str() operation here. + def test___str__noedgecoords(self): + mesh_kwargs = self.kwargs.copy() + del mesh_kwargs["edge_coords_and_axes"] + alt_mesh = mesh.Mesh(**mesh_kwargs) + expected = [ + "Mesh : 'my_topology_mesh'", + " topology_dimension: 2", + " node", + " node_dimension: 'NodeDim'", + " node coordinates", + " ", + " ", + " edge", + " edge_dimension: 'EdgeDim'", + ( + " edge_node_connectivity: " + "" + ), + " face", + " face_dimension: 'FaceDim'", + ( + " face_node_connectivity: " + "" + ), + " face coordinates", + " ", + " ", + " optional connectivities", + ( + " face_face_connectivity: " + "" + ), + ( + " face_edge_connectivity: " + "" + ), + ( + " edge_face_connectivity: " + "" + ), + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " attributes: {'notes': 'this is a test'}", + ] + self.assertEqual(expected, str(alt_mesh).split("\n")) def test_all_connectivities(self): expected = mesh.Mesh2DConnectivities( @@ -569,6 +646,79 @@ def test_face_node(self): self.assertEqual(self.FACE_NODE, self.mesh.face_node_connectivity) +class Test__str__various(TestMeshCommon): + # Some extra testing for the str() operation : based on 1D meshes as simpler + def setUp(self): + # All the tests here want modified meshes, so use standard setUp to + # create afresh for each test, allowing them to modify it. + super().setUp() + # Mesh kwargs with topology_dimension=1 and all applicable + # arguments populated - this tests correct property setting. + self.kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ), + "connectivities": [self.EDGE_NODE], + "long_name": "my_topology_mesh", + "var_name": "mesh", + "attributes": {"notes": "this is a test"}, + "node_dimension": "NodeDim", + "edge_dimension": "EdgeDim", + "edge_coords_and_axes": ( + (self.EDGE_LON, "x"), + (self.EDGE_LAT, "y"), + ), + } + self.mesh = mesh.Mesh(**self.kwargs) + + def test___repr__basic(self): + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___repr__varname(self): + self.mesh.long_name = None + expected = "" + self.assertEqual(expected, repr(self.mesh)) + + def test___repr__noname(self): + self.mesh.long_name = None + self.mesh.var_name = None + expected = "" + self.assertRegex(repr(self.mesh), expected) + + def test___str__noattributes(self): + self.mesh.attributes = None + self.assertNotIn("attributes", str(self.mesh)) + + def test___str__emptyattributes(self): + self.mesh.attributes.clear() + self.assertNotIn("attributes", str(self.mesh)) + + def test___str__units_stdname(self): + # These are usually missing, but they *can* be present. + mesh_kwargs = self.kwargs.copy() + mesh_kwargs["standard_name"] = "height" # Odd choice ! + mesh_kwargs["units"] = "m" + alt_mesh = mesh.Mesh(**mesh_kwargs) + result = str(alt_mesh) + # We expect these to appear at the end. + expected = "\n".join( + [ + " edge coordinates", + " ", + " ", + " standard_name: 'height'", + " long_name: 'my_topology_mesh'", + " var_name: 'mesh'", + " units: Unit('m')", + " attributes: {'notes': 'this is a test'}", + ] + ) + self.assertTrue(result.endswith(expected)) + + class TestOperations1D(TestMeshCommon): # Tests that cannot re-use an existing Mesh instance, instead need a new # one each time. @@ -1175,3 +1325,7 @@ def test_minimum_coords(self): mesh.Mesh, **kwargs, ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index 740258b77c0..8c2e652c314 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -11,12 +11,13 @@ # importing anything else. import iris.tests as tests # isort:skip +import re import unittest.mock as mock import dask.array as da import numpy as np -from iris._lazy_data import is_lazy_data +from iris._lazy_data import as_lazy_data, is_lazy_data from iris.common.metadata import BaseMetadata from iris.coords import AuxCoord, Coord from iris.cube import Cube @@ -268,42 +269,116 @@ def setUp(self): def _expected_elements_regexp( self, - mesh_strstyle=True, - standard_name=True, - long_name=True, + standard_name="longitude", + long_name="long-name", attributes=True, + location="face", + axis="x", ): - regexp = r"^MeshCoord\(mesh=" - if mesh_strstyle: - regexp += r"Mesh\('test_mesh'\)" - else: - regexp += "" - regexp += r", location='face', axis='x', shape=\(3,\)" + # Printed name is standard or long -- we don't have a case with neither + coord_name = standard_name or long_name + # Construct regexp in 'sections' + # NB each consumes upto first non-space in the next line + regexp = f"MeshCoord : {coord_name} / [^\n]+\n *" + regexp += r"mesh: \\n *" + regexp += f"location: '{location}'\n *" + # Now some optional sections : whichever comes first will match + # arbitrary content leading up to it. + matched_any_upto = False if standard_name: - regexp += ", standard_name='longitude'" - regexp += r", units=Unit\('degrees_east'\)" + regexp += ".*" + matched_any_upto = True + regexp += f"standard_name: '{standard_name}'\n *" if long_name: - regexp += ", long_name='long-name'" + if not matched_any_upto: + regexp += ".*" + matched_any_upto = True + regexp += f"long_name: '{long_name}'\n *" if attributes: - regexp += r", attributes={'a': 1, 'b': 'c'}" - regexp += r"\)$" + # if we expected attributes, they should come next + # TODO: change this when each attribute goes on a new line + if not matched_any_upto: + regexp += ".*" + matched_any_upto = True + regexp += "attributes: {[^}]*}\n *" + # After those items, expect 'axis' next + # N.B. this FAILS if we had attributes when we didn't expect them + regexp += f"axis: '{axis}'$" # N.B. this is always the end + + # Compile regexp, also allowing matches across newlines + regexp = re.compile(regexp, flags=re.DOTALL) return regexp def test_repr(self): + # A simple check for the condensed form. + result = repr(self.meshcoord) + expected = ( + "" + ) + self.assertEqual(expected, result) + + def test_repr_lazy(self): + # Displays lazy content (and does not realise!). + self.meshcoord.points = as_lazy_data(self.meshcoord.points) + self.meshcoord.bounds = as_lazy_data(self.meshcoord.bounds) + self.assertTrue(self.meshcoord.has_lazy_points()) + self.assertTrue(self.meshcoord.has_lazy_bounds()) + + result = repr(self.meshcoord) + self.assertTrue(self.meshcoord.has_lazy_points()) + self.assertTrue(self.meshcoord.has_lazy_bounds()) + + expected = ( + "+bounds shape(3,)>" + ) + self.assertEqual(expected, result) + + def test_repr__nameless_mesh(self): + # Check what it does when the Mesh doesn't have a name. + self.mesh.long_name = None + assert self.mesh.name() == "unknown" result = repr(self.meshcoord) - re_expected = self._expected_elements_regexp(mesh_strstyle=False) + re_expected = ( + r".MeshCoord: longitude / \(degrees_east\) " + r"mesh\(.Mesh object at 0x[^>]+.\) location\(face\) " + ) self.assertRegex(result, re_expected) def test__str__(self): + # Basic output contains mesh, location, standard_name, long_name, + # attributes, mesh, location and axis + result = str(self.meshcoord) + re_expected = self._expected_elements_regexp() + self.assertRegex(result, re_expected) + + def test__str__lazy(self): + # Displays lazy content (and does not realise!). + self.meshcoord.points = as_lazy_data(self.meshcoord.points) + self.meshcoord.bounds = as_lazy_data(self.meshcoord.bounds) + result = str(self.meshcoord) - re_expected = self._expected_elements_regexp(mesh_strstyle=True) + self.assertTrue(self.meshcoord.has_lazy_points()) + self.assertTrue(self.meshcoord.has_lazy_bounds()) + + self.assertIn("points: ", result) + self.assertIn("bounds: ", result) + re_expected = self._expected_elements_regexp() self.assertRegex(result, re_expected) def test_alternative_location_and_axis(self): meshcoord = sample_meshcoord(mesh=self.mesh, location="edge", axis="y") result = str(meshcoord) - re_expected = r", location='edge', axis='y'" + re_expected = self._expected_elements_regexp( + standard_name="latitude", + long_name=None, + location="edge", + axis="y", + attributes=None, + ) self.assertRegex(result, re_expected) + # Basic output contains standard_name, long_name, attributes def test_str_no_long_name(self): mesh = self.mesh From d156e81a92eb40bbc9fdc6424c85e37811b2bbad Mon Sep 17 00:00:00 2001 From: Bill Little Date: Wed, 26 Jan 2022 09:20:29 +0000 Subject: [PATCH 21/69] update test (#3917) (#4527) * update test (#3917) * review comments * Update lib/iris/tests/test_io_init.py Co-authored-by: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Co-authored-by: Will Benfold <69585101+wjbenfold@users.noreply.github.com> --- lib/iris/tests/test_io_init.py | 45 +++++++++++++++------------------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index c8b88754b53..d33b76ddeb0 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -12,40 +12,42 @@ import iris.tests as tests # isort:skip from io import BytesIO -import pathlib +from pathlib import Path import iris.fileformats as iff import iris.io class TestDecodeUri(tests.IrisTest): - def test_decode_uri(self): + def test_decode_uri__str(self): tests = { - "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp": ( + (uri := "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp"): ( "file", - "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp", + uri, ), - r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp": ( + (uri := r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp"): ( "file", - r"C:\data\local\someDir\PP\COLPEX\COLPEX_16a_pj001.pp", + uri, ), - "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp": ( - "file", - "///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp", + ( + uri := "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp" + ): ( + uri[:4], + uri[5:], ), - "http://www.somehost.com:8080/resource/thing.grib": ( - "http", - "//www.somehost.com:8080/resource/thing.grib", + (uri := "http://www.somehost.com:8080/resource/thing.grib"): ( + uri[:4], + uri[5:], ), - "/data/local/someDir/2013-11-25T13:49:17.632797": ( + (uri := "/data/local/someDir/2013-11-25T13:49:17.632797"): ( "file", - "/data/local/someDir/2013-11-25T13:49:17.632797", + uri, ), } - for uri, pair in tests.items(): - self.assertEqual(pair, iris.io.decode_uri(uri)) + for uri, expected in tests.items(): + self.assertEqual(expected, iris.io.decode_uri(uri)) - def test_decode_uri_path_object(self): + def test_decode_uri__path(self): tests = { (uri := "/data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp"): ( "file", @@ -55,20 +57,13 @@ def test_decode_uri_path_object(self): "file", uri, ), - ( - uri := "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp" - ): (uri[:4], uri[7:]), - (uri := "http://www.somehost.com:8080/resource/thing.grib"): ( - uri[:4], - uri[6:], - ), (uri := "/data/local/someDir/2013-11-25T13:49:17.632797"): ( "file", uri, ), } for uri, expected in tests.items(): - self.assertEqual(expected, iris.io.decode_uri(pathlib.Path(uri))) + self.assertEqual(expected, iris.io.decode_uri(Path(uri))) class TestFileFormatPicker(tests.IrisTest): From 633ed17dd32673136e9ba9a352ce9353b1a64918 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Wed, 26 Jan 2022 11:39:38 +0000 Subject: [PATCH 22/69] Improved use of UGRID terminology (#4498) * Improved use of UGRID terminology. * Differentiate location and location element in Connectivities. * Differentiate location and location element in Connectivities. * Various Connectivity rephrasing following review. * Adjust Connectivity validation tests for new phrasing. * Fix Connectivity line breaks. * Review actions on Connectivity docstrings. * Review actions on Connectivity docstrings. * Review actions on Connectivity docstrings. * Fix new tests to cope with Connectivity renaming. --- lib/iris/common/metadata.py | 2 +- lib/iris/experimental/ugrid/load.py | 22 +- lib/iris/experimental/ugrid/mesh.py | 285 +++++++++--------- lib/iris/experimental/ugrid/metadata.py | 13 +- lib/iris/fileformats/netcdf.py | 18 +- .../unit/common/mixin/test_CFVariableMixin.py | 6 +- .../unit/coords/test__DimensionalMetadata.py | 16 +- .../ugrid/mesh/test_Connectivity.py | 70 +++-- .../unit/experimental/ugrid/mesh/test_Mesh.py | 10 +- .../experimental/ugrid/mesh/test_MeshCoord.py | 18 +- .../ugrid/mesh/test_Mesh__from_coords.py | 2 +- .../metadata/test_ConnectivityMetadata.py | 46 +-- .../fileformats/netcdf/test_Saver__ugrid.py | 4 +- 13 files changed, 265 insertions(+), 247 deletions(-) diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 29ae910e386..cb5f53f5f43 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -204,7 +204,7 @@ def func(field): lambda field: field not in ( "circular", - "src_dim", + "location_axis", "node_dimension", "edge_dimension", "face_dimension", diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 5e42f2e6060..6c802e00d46 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -280,7 +280,7 @@ def _build_aux_coord(coord_var, file_path): return coord, axis -def _build_connectivity(connectivity_var, file_path, location_dims): +def _build_connectivity(connectivity_var, file_path, element_dims): """ Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a given :class:`~iris.experimental.ugrid.cf.CFUGridConnectivityVariable`, @@ -301,10 +301,10 @@ def _build_connectivity(connectivity_var, file_path, location_dims): dim_names = connectivity_var.dimensions # Connectivity arrays must have two dimensions. assert len(dim_names) == 2 - if dim_names[1] in location_dims: - src_dim = 1 + if dim_names[1] in element_dims: + location_axis = 1 else: - src_dim = 0 + location_axis = 0 standard_name, long_name, var_name = get_names( connectivity_var, None, attributes @@ -319,7 +319,7 @@ def _build_connectivity(connectivity_var, file_path, location_dims): units=attr_units, attributes=attributes, start_index=start_index, - src_dim=src_dim, + location_axis=location_axis, ) return connectivity, dim_names[0] @@ -423,20 +423,20 @@ def _build_mesh(cf, mesh_var, file_path): raise ValueError(message) # Used for detecting transposed connectivities. - location_dims = (edge_dimension, face_dimension) + element_dims = (edge_dimension, face_dimension) connectivity_args = [] for connectivity_var in mesh_var.cf_group.connectivities.values(): connectivity, first_dim_name = _build_connectivity( - connectivity_var, file_path, location_dims + connectivity_var, file_path, element_dims ) assert connectivity.var_name == getattr(mesh_var, connectivity.cf_role) connectivity_args.append(connectivity) # If the mesh_var has not supplied the dimension name, it is safe to # fall back on the connectivity's first dimension's name. - if edge_dimension is None and connectivity.src_location == "edge": + if edge_dimension is None and connectivity.location == "edge": edge_dimension = first_dim_name - if face_dimension is None and connectivity.src_location == "face": + if face_dimension is None and connectivity.location == "face": face_dimension = first_dim_name standard_name, long_name, var_name = get_names(mesh_var, None, attributes) @@ -480,12 +480,12 @@ def _build_mesh_coords(mesh, cf_var): """ # TODO: integrate with standard saving API when no longer 'experimental'. # Identify the cube's mesh dimension, for attaching MeshCoords. - locations_dimensions = { + element_dimensions = { "node": mesh.node_dimension, "edge": mesh.edge_dimension, "face": mesh.face_dimension, } - mesh_dim_name = locations_dimensions[cf_var.location] + mesh_dim_name = element_dimensions[cf_var.location] # (Only expecting 1 mesh dimension per cf_var). mesh_dim = cf_var.dimensions.index(mesh_dim_name) diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 9e8074c83a5..5cd4102f1b0 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -94,8 +94,8 @@ class Connectivity(_DimensionalMetadata): """ A CF-UGRID topology connectivity, describing the topological relationship - between two lists of dimensional locations. One or more connectivities - make up a CF-UGRID topology - a constituent of a CF-UGRID mesh. + between two types of mesh element. One or more connectivities make up a + CF-UGRID topology - a constituent of a CF-UGRID mesh. See: https://ugrid-conventions.github.io/ugrid-conventions @@ -124,7 +124,7 @@ def __init__( units=None, attributes=None, start_index=0, - src_dim=0, + location_axis=0, ): """ Constructs a single connectivity. @@ -132,17 +132,23 @@ def __init__( Args: * indices (numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array): - The index values describing a topological relationship. Constructed - of 2 dimensions - the list of locations, and within each location: - the indices of the 'target locations' it relates to. - Use a :class:`numpy.ma.core.MaskedArray` if :attr:`src_location` - lengths vary - mask unused index 'slots' within each - :attr:`src_location`. Use a :class:`dask.array.Array` to keep - indices 'lazy'. + 2D array giving the topological connection relationship between + :attr:`location` elements and :attr:`connected` elements. + The :attr:`location_axis` dimension indexes over the + :attr:`location` dimension of the mesh - i.e. its length matches + the total number of :attr:`location` elements in the mesh. The + :attr:`connected_axis` dimension can be any length, corresponding + to the highest number of :attr:`connected` elements connected to a + :attr:`location` element. The array values are indices into the + :attr:`connected` dimension of the mesh. If the number of + :attr:`connected` elements varies between :attr:`location` + elements: use a :class:`numpy.ma.core.MaskedArray` and mask the + :attr:`location` elements' unused index 'slots'. Use a + :class:`dask.array.Array` to keep indices 'lazy'. * cf_role (str): Denotes the topological relationship that this connectivity - describes. Made up of this array's locations, and the indexed - 'target location' within each location. + describes. Made up of this array's :attr:`location`, and the + :attr:`connected` element type that is indexed by the array. See :attr:`UGRID_CF_ROLES` for valid arguments. Kwargs: @@ -166,14 +172,14 @@ def __init__( Either ``0`` or ``1``. Default is ``0``. Denotes whether :attr:`indices` uses 0-based or 1-based indexing (allows support for Fortran and legacy NetCDF files). - * src_dim (int): - Either ``0`` or ``1``. Default is ``0``. Denotes which dimension - of :attr:`indices` varies over the :attr:`src_location`\\ s (the - alternate dimension therefore varying within individual - :attr:`src_location`\\ s). (This parameter allows support for fastest varying index being + * location_axis (int): + Either ``0`` or ``1``. Default is ``0``. Denotes which axis + of :attr:`indices` varies over the :attr:`location` elements (the + alternate axis therefore varying over :attr:`connected` elements). + (This parameter allows support for fastest varying index being either first or last). E.g. for ``face_node_connectivity``, for 10 faces: - ``indices.shape[src_dim] = 10``. + ``indices.shape[location_axis] == 10``. """ @@ -190,15 +196,15 @@ def validate_arg_vs_list(arg_name, arg, valid_list): validate_arg_vs_list("start_index", start_index, [0, 1]) # indices array will be 2-dimensional, so must be either 0 or 1. - validate_arg_vs_list("src_dim", src_dim, [0, 1]) + validate_arg_vs_list("location_axis", location_axis, [0, 1]) validate_arg_vs_list("cf_role", cf_role, Connectivity.UGRID_CF_ROLES) self._metadata_manager.start_index = start_index - self._metadata_manager.src_dim = src_dim + self._metadata_manager.location_axis = location_axis self._metadata_manager.cf_role = cf_role - self._tgt_dim = 1 - src_dim - self._src_location, self._tgt_location = cf_role.split("_")[:2] + self._connected_axis = 1 - location_axis + self._location, self._connected = cf_role.split("_")[:2] super().__init__( values=indices, @@ -233,25 +239,25 @@ def cf_role(self): return self._metadata_manager.cf_role @property - def src_location(self): + def location(self): """ Derived from the connectivity's :attr:`cf_role` - the first part, e.g. - ``face`` in ``face_node_connectivity``. Refers to the locations - listed by the :attr:`src_dim` of the connectivity's :attr:`indices` - array. + ``face`` in ``face_node_connectivity``. Refers to the elements that + vary along the :attr:`location_axis` of the connectivity's + :attr:`indices` array. """ - return self._src_location + return self._location @property - def tgt_location(self): + def connected(self): """ Derived from the connectivity's :attr:`cf_role` - the second part, e.g. - ``node`` in ``face_node_connectivity``. Refers to the locations indexed + ``node`` in ``face_node_connectivity``. Refers to the elements indexed by the values in the connectivity's :attr:`indices` array. """ - return self._tgt_location + return self._connected @property def start_index(self): @@ -266,47 +272,48 @@ def start_index(self): return self._metadata_manager.start_index @property - def src_dim(self): + def location_axis(self): """ - The dimension of the connectivity's :attr:`indices` array that varies - over the connectivity's :attr:`src_location`\\ s. Either ``0`` or ``1``. + The axis of the connectivity's :attr:`indices` array that varies + over the connectivity's :attr:`location` elements. Either ``0`` or ``1``. **Read-only** - validity of :attr:`indices` is dependent on - :attr:`src_dim`. Use :meth:`transpose` to create a new, transposed - :class:`Connectivity` if a different :attr:`src_dim` is needed. + :attr:`location_axis`. Use :meth:`transpose` to create a new, transposed + :class:`Connectivity` if a different :attr:`location_axis` is needed. """ - return self._metadata_manager.src_dim + return self._metadata_manager.location_axis @property - def tgt_dim(self): + def connected_axis(self): """ - Derived as the alternate value of :attr:`src_dim` - each must equal - either ``0`` or ``1``. - The dimension of the connectivity's :attr:`indices` array that varies - within the connectivity's individual :attr:`src_location`\\ s. + Derived as the alternate value of :attr:`location_axis` - each must + equal either ``0`` or ``1``. The axis of the connectivity's + :attr:`indices` array that varies over the :attr:`connected` elements + associated with each :attr:`location` element. """ - return self._tgt_dim + return self._connected_axis @property def indices(self): """ The index values describing the topological relationship of the connectivity, as a NumPy array. Masked points indicate a - :attr:`src_location` shorter than the longest :attr:`src_location` - described in this array - unused index 'slots' are masked. + :attr:`location` element with fewer :attr:`connected` elements than + other :attr:`location` elements described in this array - unused index + 'slots' are masked. **Read-only** - index values are only meaningful when combined with an appropriate :attr:`cf_role`, :attr:`start_index` and - :attr:`src_dim`. A new :class:`Connectivity` must therefore be + :attr:`location_axis`. A new :class:`Connectivity` must therefore be defined if different indices are needed. """ return self._values - def indices_by_src(self, indices=None): + def indices_by_location(self, indices=None): """ - Return a view of the indices array with :attr:`src_dim` **always** as - the first index - transposed if necessary. Can optionally pass in an + Return a view of the indices array with :attr:`location_axis` **always** as + the first axis - transposed if necessary. Can optionally pass in an identically shaped array on which to perform this operation (e.g. the output from :meth:`core_indices` or :meth:`lazy_indices`). @@ -318,7 +325,7 @@ def indices_by_src(self, indices=None): Returns: A view of the indices array, transposed - if necessary - to put - :attr:`src_dim` first. + :attr:`location_axis` first. """ if indices is None: @@ -330,20 +337,20 @@ def indices_by_src(self, indices=None): f"got shape={indices.shape} ." ) - if self.src_dim == 0: + if self.location_axis == 0: result = indices - elif self.src_dim == 1: + elif self.location_axis == 1: result = indices.transpose() else: - raise ValueError("Invalid src_dim.") + raise ValueError("Invalid location_axis.") return result def _validate_indices(self, indices, shapes_only=False): # Use shapes_only=True for a lower resource, less thorough validation # of indices by just inspecting the array shape instead of inspecting - # individual masks. So will not catch individual src_locations being - # unacceptably small. + # individual masks. So will not catch individual location elements + # having unacceptably low numbers of associated connected elements. def indices_error(message): raise ValueError("Invalid indices provided. " + message) @@ -372,43 +379,43 @@ def indices_error(message): len_req_fail = False if shapes_only: - src_shape = indices_shape[self.tgt_dim] + location_shape = indices_shape[self.connected_axis] # Wrap as lazy to allow use of the same operations below # regardless of shapes_only. - src_lengths = _lazy.as_lazy_data(np.asarray(src_shape)) + location_lengths = _lazy.as_lazy_data(np.asarray(location_shape)) else: # Wouldn't be safe to use during __init__ validation, since - # lazy_src_lengths requires self.indices to exist. Safe here since + # lazy_location_lengths requires self.indices to exist. Safe here since # shapes_only==False is only called manually, i.e. after # initialisation. - src_lengths = self.lazy_src_lengths() - if self.src_location in ("edge", "boundary"): - if (src_lengths != 2).any().compute(): + location_lengths = self.lazy_location_lengths() + if self.location in ("edge", "boundary"): + if (location_lengths != 2).any().compute(): len_req_fail = "len=2" else: - if self.src_location == "face": + if self.location == "face": min_size = 3 - elif self.src_location == "volume": - if self.tgt_location == "edge": + elif self.location == "volume": + if self.connected == "edge": min_size = 6 else: min_size = 4 else: raise NotImplementedError - if (src_lengths < min_size).any().compute(): + if (location_lengths < min_size).any().compute(): len_req_fail = f"len>={min_size}" if len_req_fail: indices_error( - f"Not all src_locations meet requirement: {len_req_fail} - " + f"Not all {self.location}s meet requirement: {len_req_fail} - " f"needed to describe '{self.cf_role}' ." ) def validate_indices(self): """ Perform a thorough validity check of this connectivity's - :attr:`indices`. Includes checking the sizes of individual - :attr:`src_location`\\ s (specified using masks on the - :attr:`indices` array) against the :attr:`cf_role`. + :attr:`indices`. Includes checking the number of :attr:`connected` + elements associated with each :attr:`location` element (specified using + masks on the :attr:`indices` array) against the :attr:`cf_role`. Raises a ``ValueError`` if any problems are encountered, otherwise passes silently. @@ -426,8 +433,8 @@ def __eq__(self, other): if isinstance(other, Connectivity): # Account for the fact that other could be the transposed equivalent # of self, which we consider 'safe' since the recommended - # interaction with the indices array is via indices_by_src, which - # corrects for this difference. (To enable this, src_dim does + # interaction with the indices array is via indices_by_location, which + # corrects for this difference. (To enable this, location_axis does # not participate in ConnectivityMetadata to ConnectivityMetadata # equivalence). if hasattr(other, "metadata"): @@ -436,22 +443,22 @@ def __eq__(self, other): if eq: eq = ( self.shape == other.shape - and self.src_dim == other.src_dim + and self.location_axis == other.location_axis ) or ( self.shape == other.shape[::-1] - and self.src_dim == other.tgt_dim + and self.location_axis == other.connected_axis ) if eq: eq = array_equal( - self.indices_by_src(self.core_indices()), - other.indices_by_src(other.core_indices()), + self.indices_by_location(self.core_indices()), + other.indices_by_location(other.core_indices()), ) return eq def transpose(self): """ Create a new :class:`Connectivity`, identical to this one but with the - :attr:`indices` array transposed and the :attr:`src_dim` value flipped. + :attr:`indices` array transposed and the :attr:`location_axis` value flipped. Returns: A new :class:`Connectivity` that is the transposed equivalent of @@ -467,7 +474,7 @@ def transpose(self): units=self.units, attributes=self.attributes, start_index=self.start_index, - src_dim=self.tgt_dim, + location_axis=self.connected_axis, ) return new_connectivity @@ -510,11 +517,11 @@ def has_lazy_indices(self): """ return super()._has_lazy_values() - def lazy_src_lengths(self): + def lazy_location_lengths(self): """ - Return a lazy array representing the lengths of each - :attr:`src_location` in the :attr:`src_dim` of the connectivity's - :attr:`indices` array, accounting for masks if present. + Return a lazy array representing the number of :attr:`connected` + elements associated with each of the connectivity's :attr:`location` + elements, accounting for masks if present. Accessing this method will never cause the :attr:`indices` values to be loaded. Similarly, calling methods on, or indexing, the returned Array @@ -524,26 +531,28 @@ def lazy_src_lengths(self): :attr:`indices` have already been loaded. Returns: - A lazy array, representing the lengths of each :attr:`src_location`. + A lazy array, representing the number of :attr:`connected` + elements associated with each :attr:`location` element. """ - src_mask_counts = da.sum( - da.ma.getmaskarray(self.indices), axis=self.tgt_dim + location_mask_counts = da.sum( + da.ma.getmaskarray(self.indices), axis=self.connected_axis ) - max_src_size = self.indices.shape[self.tgt_dim] - return max_src_size - src_mask_counts + max_location_size = self.indices.shape[self.connected_axis] + return max_location_size - location_mask_counts - def src_lengths(self): + def location_lengths(self): """ - Return a NumPy array representing the lengths of each - :attr:`src_location` in the :attr:`src_dim` of the connectivity's - :attr:`indices` array, accounting for masks if present. + Return a NumPy array representing the number of :attr:`connected` + elements associated with each of the connectivity's :attr:`location` + elements, accounting for masks if present. Returns: - A NumPy array, representing the lengths of each :attr:`src_location`. + A NumPy array, representing the number of :attr:`connected` + elements associated with each :attr:`location` element. """ - return self.lazy_src_lengths().compute() + return self.lazy_location_lengths().compute() def cube_dims(self, cube): """Not available on :class:`Connectivity`.""" @@ -556,7 +565,7 @@ def xml_element(self, doc): element.setAttribute("cf_role", self.cf_role) element.setAttribute("start_index", self.start_index) - element.setAttribute("src_dim", self.src_dim) + element.setAttribute("location_axis", self.location_axis) return element @@ -582,8 +591,8 @@ class Mesh(CFVariableMixin): AXES = ("x", "y") #: Valid range of values for ``topology_dimension``. TOPOLOGY_DIMENSIONS = (1, 2) - #: Valid mesh locations. - LOCATIONS = ("edge", "node", "face") + #: Valid mesh elements. + ELEMENTS = ("edge", "node", "face") def __init__( self, @@ -634,12 +643,12 @@ def __init__( self.attributes = attributes # based on the topology_dimension, create the appropriate coordinate manager - def normalise(location, axis): + def normalise(element, axis): result = str(axis).lower() if result not in self.AXES: - emsg = f"Invalid axis specified for {location} coordinate {coord.name()!r}, got {axis!r}." + emsg = f"Invalid axis specified for {element} coordinate {coord.name()!r}, got {axis!r}." raise ValueError(emsg) - return f"{location}_{result}" + return f"{element}_{result}" if not isinstance(node_coords_and_axes, Iterable): node_coords_and_axes = [node_coords_and_axes] @@ -1377,17 +1386,17 @@ def connectivities( :class:`~iris.experimental.ugrid.mesh.Connectivity`. * contains_node (bool): - Contains the ``node`` location as part of the + Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_edge (bool): - Contains the ``edge`` location as part of the + Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_face (bool): - Contains the ``face`` location as part of the + Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. @@ -1478,17 +1487,17 @@ def connectivity( :class:`~iris.experimental.ugrid.mesh.Connectivity`. * contains_node (bool): - Contains the ``node`` location as part of the + Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_edge (bool): - Contains the ``edge`` location as part of the + Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. * contains_face (bool): - Contains the ``face`` location as part of the + Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. @@ -1745,17 +1754,17 @@ def remove_connectivities( :class:`~iris.experimental.ugrid.mesh.Connectivity`. * contains_node (bool): - Contains the ``node`` location as part of the + Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. * contains_edge (bool): - Contains the ``edge`` location as part of the + Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. * contains_face (bool): - Contains the ``face`` location as part of the + Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. @@ -2091,12 +2100,12 @@ def _remove(self, **kwargs): return result - def _setter(self, location, axis, coord, shape): + def _setter(self, element, axis, coord, shape): axis = axis.lower() - member = f"{location}_{axis}" + member = f"{element}_{axis}" # enforce the UGRID minimum coordinate requirement - if location == "node" and coord is None: + if element == "node" and coord is None: emsg = ( f"{member!r} is a required coordinate, cannot set to 'None'." ) @@ -2123,22 +2132,22 @@ def _setter(self, location, axis, coord, shape): self._members[member] = coord - def _shape(self, location): - coord = getattr(self, f"{location}_x") + def _shape(self, element): + coord = getattr(self, f"{element}_x") shape = coord.shape if coord is not None else None if shape is None: - coord = getattr(self, f"{location}_y") + coord = getattr(self, f"{element}_y") if coord is not None: shape = coord.shape return shape @property def _edge_shape(self): - return self._shape(location="edge") + return self._shape(element="edge") @property def _node_shape(self): - return self._shape(location="node") + return self._shape(element="node") @property def all_members(self): @@ -2155,7 +2164,7 @@ def edge_x(self): @edge_x.setter def edge_x(self, coord): self._setter( - location="edge", axis="x", coord=coord, shape=self._edge_shape + element="edge", axis="x", coord=coord, shape=self._edge_shape ) @property @@ -2165,7 +2174,7 @@ def edge_y(self): @edge_y.setter def edge_y(self, coord): self._setter( - location="edge", axis="y", coord=coord, shape=self._edge_shape + element="edge", axis="y", coord=coord, shape=self._edge_shape ) @property @@ -2179,7 +2188,7 @@ def node_x(self): @node_x.setter def node_x(self, coord): self._setter( - location="node", axis="x", coord=coord, shape=self._node_shape + element="node", axis="x", coord=coord, shape=self._node_shape ) @property @@ -2189,7 +2198,7 @@ def node_y(self): @node_y.setter def node_y(self, coord): self._setter( - location="node", axis="y", coord=coord, shape=self._node_shape + element="node", axis="y", coord=coord, shape=self._node_shape ) def _add(self, coords): @@ -2365,7 +2374,7 @@ def __init__( @property def _face_shape(self): - return self._shape(location="face") + return self._shape(element="face") @property def all_members(self): @@ -2382,7 +2391,7 @@ def face_x(self): @face_x.setter def face_x(self, coord): self._setter( - location="face", axis="x", coord=coord, shape=self._face_shape + element="face", axis="x", coord=coord, shape=self._face_shape ) @property @@ -2392,7 +2401,7 @@ def face_y(self): @face_y.setter def face_y(self, coord): self._setter( - location="face", axis="y", coord=coord, shape=self._face_shape + element="face", axis="y", coord=coord, shape=self._face_shape ) def add( @@ -2512,24 +2521,20 @@ def add(self, *connectivities): # Validate shapes. proposed_members = {**self._members, **add_dict} - locations = set( - [ - c.src_location - for c in proposed_members.values() - if c is not None - ] + elements = set( + [c.location for c in proposed_members.values() if c is not None] ) - for location in locations: + for element in elements: counts = [ - len(c.indices_by_src(c.lazy_indices())) + len(c.indices_by_location(c.lazy_indices())) for c in proposed_members.values() - if c is not None and c.src_location == location + if c is not None and c.location == element ] # Check is list values are identical. if not counts.count(counts[0]) == len(counts): message = ( f"Invalid Connectivities provided - inconsistent " - f"{location} counts." + f"{element} counts." ) raise ValueError(message) @@ -2584,13 +2589,16 @@ def filters( instance for instance in members if instance.cf_role == cf_role ] - def location_filter(instances, loc_arg, loc_name): + def element_filter(instances, loc_arg, loc_name): if loc_arg is False: filtered = [ instance for instance in instances if loc_name - not in (instance.src_location, instance.tgt_location) + not in ( + instance.location, + instance.connected, + ) ] elif loc_arg is None: filtered = instances @@ -2599,8 +2607,7 @@ def location_filter(instances, loc_arg, loc_name): filtered = [ instance for instance in instances - if loc_name - in (instance.src_location, instance.tgt_location) + if loc_name in (instance.location, instance.connected) ] return filtered @@ -2610,7 +2617,7 @@ def location_filter(instances, loc_arg, loc_name): (contains_edge, "edge"), (contains_face, "face"), ): - members = location_filter(members, arg, loc) + members = element_filter(members, arg, loc) # No need to actually modify filtering behaviour - already won't return # any face cf-roles if none are present. @@ -2792,10 +2799,10 @@ def __init__( # NOTE: currently *not* included in metadata. In future it might be. self._mesh = mesh - if location not in Mesh.LOCATIONS: + if location not in Mesh.ELEMENTS: msg = ( f"'location' of {location} is not a valid Mesh location', " - f"must be one of {Mesh.LOCATIONS}." + f"must be one of {Mesh.ELEMENTS}." ) raise ValueError(msg) # Held in metadata, readable as self.location, but cannot set it. @@ -3037,7 +3044,7 @@ def _construct_access_arrays(self): # Data can be real or lazy, so operations must work in Dask, too. indices = bounds_connectivity.core_indices() # Normalise indices dimension order to [faces/edges, bounds] - indices = bounds_connectivity.indices_by_src(indices) + indices = bounds_connectivity.indices_by_location(indices) # Normalise the start index indices = indices - bounds_connectivity.start_index diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index 94128cdf508..ae0b7879084 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -28,9 +28,9 @@ class ConnectivityMetadata(BaseMetadata): """ - # The "src_dim" member is stateful only, and does not participate in + # The "location_axis" member is stateful only, and does not participate in # lenient/strict equivalence. - _members = ("cf_role", "start_index", "src_dim") + _members = ("cf_role", "start_index", "location_axis") __slots__ = () @@ -53,7 +53,7 @@ def _combine_lenient(self, other): A list of combined metadata member values. """ - # Perform "strict" combination for "cf_role", "start_index", "src_dim". + # Perform "strict" combination for "cf_role", "start_index", "location_axis". def func(field): left = getattr(self, field) right = getattr(other, field) @@ -82,9 +82,10 @@ def _compare_lenient(self, other): """ # Perform "strict" comparison for "cf_role", "start_index". - # The "src_dim" member is not part of lenient equivalence. + # The "location_axis" member is not part of lenient equivalence. members = filter( - lambda member: member != "src_dim", ConnectivityMetadata._members + lambda member: member != "location_axis", + ConnectivityMetadata._members, ) result = all( [ @@ -112,7 +113,7 @@ def _difference_lenient(self, other): A list of difference metadata member values. """ - # Perform "strict" difference for "cf_role", "start_index", "src_dim". + # Perform "strict" difference for "cf_role", "start_index", "location_axis". def func(field): left = getattr(self, field) right = getattr(other, field) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 77134259ad9..100ab29daaa 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -972,9 +972,9 @@ def __setitem__(self, keys, arr): self.target[keys] = arr -# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.LOCATIONS`, +# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, # but in the preferred order for coord/connectivity variables in the file. -MESH_LOCATIONS = ("node", "edge", "face") +MESH_ELEMENTS = ("node", "edge", "face") class Saver: @@ -1422,7 +1422,7 @@ def _add_mesh(self, cube_or_mesh): mesh_dims = self._mesh_dims[mesh] # Add all the element coordinate variables. - for location in MESH_LOCATIONS: + for location in MESH_ELEMENTS: coords_meshobj_attr = f"{location}_coords" coords_file_attr = f"{location}_coordinates" mesh_coords = getattr(mesh, coords_meshobj_attr, None) @@ -1460,7 +1460,7 @@ def _add_mesh(self, cube_or_mesh): last_dim = f"{cf_mesh_name}_{loc_from}_N_{loc_to}s" # Create if it does not already exist. if last_dim not in self._dataset.dimensions: - length = conn.shape[1 - conn.src_dim] + length = conn.shape[1 - conn.location_axis] self._dataset.createDimension(last_dim, length) # Create variable. @@ -1470,7 +1470,7 @@ def _add_mesh(self, cube_or_mesh): # when it is first created. loc_dim_name = mesh_dims[loc_from] conn_dims = (loc_dim_name, last_dim) - if conn.src_dim == 1: + if conn.location_axis == 1: # Has the 'other' dimension order, =reversed conn_dims = conn_dims[::-1] if iris.util.is_masked(conn.core_indices()): @@ -1494,7 +1494,7 @@ def _add_mesh(self, cube_or_mesh): _setncattr(cf_mesh_var, cf_conn_attr_name, cf_conn_name) # If the connectivity had the 'alternate' dimension order, add the # relevant dimension property - if conn.src_dim == 1: + if conn.location_axis == 1: loc_dim_attr = f"{loc_from}_dimension" # Should only get here once. assert loc_dim_attr not in cf_mesh_var.ncattrs() @@ -1813,7 +1813,7 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): # NOTE: one of these will be a cube dimension, but that one does not # get any special handling. We *do* want to list/create them in a # definite order (node,edge,face), and before non-mesh dimensions. - for location in MESH_LOCATIONS: + for location in MESH_ELEMENTS: # Find if this location exists in the mesh, and a characteristic # coordinate to identify it with. # To use only _required_ UGRID components, we use a location @@ -1850,7 +1850,9 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): (dim_length,) = dim_element.shape else: # extract source dim, respecting dim-ordering - dim_length = dim_element.shape[dim_element.src_dim] + dim_length = dim_element.shape[ + dim_element.location_axis + ] # Name it for the relevant mesh dimension location_dim_attr = f"{location}_dimension" dim_name = getattr(mesh, location_dim_attr) diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 1fa579ef94d..88a88be5674 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -286,13 +286,15 @@ def test_class_cellmeasuremetadata(self): ) def test_class_connectivitymetadata(self): - self.args.update(dict(cf_role=None, start_index=None, src_dim=None)) + self.args.update( + dict(cf_role=None, start_index=None, location_axis=None) + ) metadata = ConnectivityMetadata(**self.args) self.item.metadata = metadata expected = metadata._asdict() del expected["cf_role"] del expected["start_index"] - del expected["src_dim"] + del expected["location_axis"] self.assertEqual(self.item._metadata_manager.values, expected) self.assertIsNot( self.item._metadata_manager.attributes, metadata.attributes diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index d403b6c4e29..9a09c4e807e 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -821,7 +821,7 @@ def test_cellmeasure(self): def test_connectivity(self): # Check we can print a Connectivity - # Like a Coord, but always print : cf_role, src_dim, start_index + # Like a Coord, but always print : cf_role, location_axis, start_index data = self.sample_data(shape=(3, 2), datatype=int) conn = Connectivity( data, cf_role="edge_node_connectivity", long_name="enc", units="1" @@ -839,13 +839,13 @@ def test_connectivity(self): " long_name: 'enc'", " cf_role: 'edge_node_connectivity'", " start_index: 0", - " src_dim: 0", + " location_axis: 0", ] self.assertLines(expected, result) def test_connectivity__start_index(self): # Check we can print a Connectivity - # Like a Coord, but always print : cf_role, src_dim, start_index + # Like a Coord, but always print : cf_role, location_axis, start_index data = self.sample_data(shape=(3, 2), datatype=int) conn = Connectivity( data + 1, @@ -867,17 +867,17 @@ def test_connectivity__start_index(self): " long_name: 'enc'", " cf_role: 'edge_node_connectivity'", " start_index: 1", - " src_dim: 0", + " location_axis: 0", ] self.assertLines(expected, result) - def test_connectivity__src_dim(self): + def test_connectivity__location_axis(self): # Check we can print a Connectivity - # Like a Coord, but always print : cf_role, src_dim, start_index + # Like a Coord, but always print : cf_role, location_axis, start_index data = self.sample_data(shape=(3, 2), datatype=int) conn = Connectivity( data.transpose(), - src_dim=1, + location_axis=1, cf_role="edge_node_connectivity", long_name="enc", units="1", @@ -894,7 +894,7 @@ def test_connectivity__src_dim(self): " long_name: 'enc'", " cf_role: 'edge_node_connectivity'", " start_index: 0", - " src_dim: 1", + " location_axis: 1", ] self.assertLines(expected, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index c0117ffc79f..d447957918a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -29,28 +29,30 @@ def setUp(self): "var_name": "face_nodes", "attributes": {"notes": "this is a test"}, "start_index": 1, - "src_dim": 1, + "location_axis": 1, } self.connectivity = Connectivity(**self.kwargs) def test_cf_role(self): self.assertEqual(self.kwargs["cf_role"], self.connectivity.cf_role) - def test_src_location(self): + def test_location(self): expected = self.kwargs["cf_role"].split("_")[0] - self.assertEqual(expected, self.connectivity.src_location) + self.assertEqual(expected, self.connectivity.location) - def test_tgt_location(self): + def test_connected(self): expected = self.kwargs["cf_role"].split("_")[1] - self.assertEqual(expected, self.connectivity.tgt_location) + self.assertEqual(expected, self.connectivity.connected) def test_start_index(self): self.assertEqual( self.kwargs["start_index"], self.connectivity.start_index ) - def test_src_dim(self): - self.assertEqual(self.kwargs["src_dim"], self.connectivity.src_dim) + def test_location_axis(self): + self.assertEqual( + self.kwargs["location_axis"], self.connectivity.location_axis + ) def test_indices(self): self.assertArrayEqual( @@ -58,7 +60,7 @@ def test_indices(self): ) def test_read_only(self): - attributes = ("indices", "cf_role", "start_index", "src_dim") + attributes = ("indices", "cf_role", "start_index", "location_axis") for attribute in attributes: self.assertRaisesRegex( AttributeError, @@ -70,10 +72,10 @@ def test_read_only(self): ) def test_transpose(self): - expected_dim = 1 - self.kwargs["src_dim"] + expected_dim = 1 - self.kwargs["location_axis"] expected_indices = self.kwargs["indices"].transpose() new_connectivity = self.connectivity.transpose() - self.assertEqual(expected_dim, new_connectivity.src_dim) + self.assertEqual(expected_dim, new_connectivity.location_axis) self.assertArrayEqual(expected_indices, new_connectivity.indices) def test_lazy_indices(self): @@ -87,12 +89,14 @@ def test_core_indices(self): def test_has_lazy_indices(self): self.assertFalse(self.connectivity.has_lazy_indices()) - def test_lazy_src_lengths(self): - self.assertTrue(is_lazy_data(self.connectivity.lazy_src_lengths())) + def test_lazy_location_lengths(self): + self.assertTrue( + is_lazy_data(self.connectivity.lazy_location_lengths()) + ) - def test_src_lengths(self): + def test_location_lengths(self): expected = [4, 4, 4] - self.assertArrayEqual(expected, self.connectivity.src_lengths()) + self.assertArrayEqual(expected, self.connectivity.location_lengths()) def test___str__(self): expected = "\n".join( @@ -110,7 +114,7 @@ def test___str__(self): " attributes: {'notes': 'this is a test'}", " cf_role: 'face_node_connectivity'", " start_index: 1", - " src_dim: 1", + " location_axis: 1", ] ) self.assertEqual(expected, self.connectivity.__str__()) @@ -123,13 +127,13 @@ def test_xml_element(self): doc = minidom.Document() connectivity_element = self.connectivity.xml_element(doc) self.assertEqual(connectivity_element.tagName, "connectivity") - for attribute in ("cf_role", "start_index", "src_dim"): + for attribute in ("cf_role", "start_index", "location_axis"): self.assertIn(attribute, connectivity_element.attributes) def test___eq__(self): equivalent_kwargs = self.kwargs equivalent_kwargs["indices"] = self.kwargs["indices"].transpose() - equivalent_kwargs["src_dim"] = 1 - self.kwargs["src_dim"] + equivalent_kwargs["location_axis"] = 1 - self.kwargs["location_axis"] equivalent = Connectivity(**equivalent_kwargs) self.assertFalse( np.array_equal(equivalent.indices, self.connectivity.indices) @@ -160,16 +164,18 @@ def test_copy(self): copy_connectivity = self.connectivity.copy(new_indices) self.assertArrayEqual(new_indices, copy_connectivity.indices) - def test_indices_by_src(self): + def test_indices_by_location(self): expected = self.kwargs["indices"].transpose() - self.assertArrayEqual(expected, self.connectivity.indices_by_src()) + self.assertArrayEqual( + expected, self.connectivity.indices_by_location() + ) - def test_indices_by_src_input(self): + def test_indices_by_location_input(self): expected = as_lazy_data(self.kwargs["indices"].transpose()) - by_src = self.connectivity.indices_by_src( + by_location = self.connectivity.indices_by_location( self.connectivity.lazy_indices() ) - self.assertArrayEqual(expected, by_src) + self.assertArrayEqual(expected, by_location) class TestAltIndices(tests.IrisTest): @@ -220,14 +226,14 @@ def test_start_index(self): ValueError, "Invalid start_index .", Connectivity, **kwargs ) - def test_src_dim(self): + def test_location_axis(self): kwargs = { "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), "cf_role": "face_node_connectivity", - "src_dim": 2, + "location_axis": 2, } self.assertRaisesRegex( - ValueError, "Invalid src_dim .", Connectivity, **kwargs + ValueError, "Invalid location_axis .", Connectivity, **kwargs ) def test_cf_role(self): @@ -285,7 +291,7 @@ def test_indices_locations_edge(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len=2", + "Not all edges meet requirement: len=2", Connectivity, **kwargs, ) @@ -297,7 +303,7 @@ def test_indices_locations_face(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=3", + "Not all faces meet requirement: len>=3", Connectivity, **kwargs, ) @@ -309,7 +315,7 @@ def test_indices_locations_volume_face(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=4", + "Not all volumes meet requirement: len>=4", Connectivity, **kwargs, ) @@ -321,7 +327,7 @@ def test_indices_locations_volume_edge(self): } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=6", + "Not all volumes meet requirement: len>=6", Connectivity, **kwargs, ) @@ -331,11 +337,11 @@ def test_indices_locations_alt_dim(self): kwargs = { "indices": np.linspace(1, 9, 9, dtype=int).reshape((3, -1)), "cf_role": "volume_face_connectivity", - "src_dim": 1, + "location_axis": 1, } self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=4", + "Not all volumes meet requirement: len>=4", Connectivity, **kwargs, ) @@ -352,7 +358,7 @@ def test_indices_locations_masked(self): connectivity = Connectivity(**kwargs) self.assertRaisesRegex( ValueError, - "Not all src_locations meet requirement: len>=3", + "Not all faces meet requirement: len>=3", connectivity.validate_indices, ) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 1df67deb66d..6bd7ea54662 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -210,7 +210,7 @@ def test_connectivities(self): for kwargs in negative_kwargs: self.assertEqual([], func(**kwargs)) - def test_connectivities_locations(self): + def test_connectivities_elements(self): # topology_dimension-specific results. Method intended to be overridden. positive_kwargs = ( {"contains_node": True}, @@ -270,7 +270,7 @@ def test_coords(self): for kwargs in negative_kwargs: self.assertNotIn(self.NODE_LON, func(**kwargs)) - def test_coords_locations(self): + def test_coords_elements(self): # topology_dimension-specific results. Method intended to be overridden. all_expected = { "node_x": self.NODE_LON, @@ -522,7 +522,7 @@ def test_connectivity(self): contains_face=False, ) - def test_connectivities_locations(self): + def test_connectivities_elements(self): kwargs_expected = ( ( {"contains_node": True}, @@ -578,7 +578,7 @@ def test_connectivities_locations(self): for item in expected: self.assertIn(item, result) - def test_coords_locations(self): + def test_coords_elements(self): all_expected = { "node_x": self.NODE_LON, "node_y": self.NODE_LAT, @@ -732,7 +732,7 @@ def setUp(self): @staticmethod def new_connectivity(connectivity, new_len=False): """Provide a new connectivity recognisably different from the original.""" - # NOTE: assumes non-transposed connectivity (src_dim=0). + # NOTE: assumes non-transposed connectivity (location_axis=0). if new_len: shape = (connectivity.shape[0] + 1, connectivity.shape[1]) else: diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index 8c2e652c314..8f4222ca7bd 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -61,7 +61,7 @@ def test_fail_bad_mesh(self): sample_meshcoord(mesh=mock.sentinel.odd) def test_valid_locations(self): - for loc in Mesh.LOCATIONS: + for loc in Mesh.ELEMENTS: meshcoord = sample_meshcoord(location=loc) self.assertEqual(meshcoord.location, loc) @@ -536,12 +536,12 @@ def _make_test_meshcoord( lazy_sources=False, location="face", inds_start_index=0, - inds_src_dim=0, + inds_location_axis=0, facenodes_changes=None, ): # Construct a miniature face-nodes mesh for testing. # NOTE: we will make our connectivity arrays with standard - # start_index=0 and src_dim=0 : We only adjust that (if required) when + # start_index=0 and location_axis=0 : We only adjust that (if required) when # creating the actual connectivities. face_nodes_array = np.array( [ @@ -626,26 +626,26 @@ def lazify(arr): inds_start_index + ( face_nodes_array.transpose() - if inds_src_dim == 1 + if inds_location_axis == 1 else face_nodes_array ), cf_role="face_node_connectivity", long_name="face_nodes", start_index=inds_start_index, - src_dim=inds_src_dim, + location_axis=inds_location_axis, ) edge_node_conn = Connectivity( inds_start_index + ( edge_nodes_array.transpose() - if inds_src_dim == 1 + if inds_location_axis == 1 else edge_nodes_array ), cf_role="edge_node_connectivity", long_name="edge_nodes", start_index=inds_start_index, - src_dim=inds_src_dim, + location_axis=inds_location_axis, ) self.mesh = Mesh( @@ -729,9 +729,9 @@ def test_edge_bounds(self): # NB simpler than faces : no possibility of missing points self.assertArrayAlmostEqual(result, expected) - def test_bounds_connectivity__src_dim_1(self): + def test_bounds_connectivity__location_axis_1(self): # Test with a transposed indices array. - self._make_test_meshcoord(inds_src_dim=1) + self._make_test_meshcoord(inds_location_axis=1) self._check_expected_bounds_values() def test_bounds_connectivity__start_index_1(self): diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py index aee5018e5b3..edd34f94a1a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py @@ -217,7 +217,7 @@ def test_mixed_shapes(self): mesh = self.create() self.assertArrayEqual( - mesh.face_node_connectivity.src_lengths(), [4, 4, 3] + mesh.face_node_connectivity.location_lengths(), [4, 4, 3] ) self.assertEqual(mesh.node_coords.node_x.points[-1], 0.0) self.assertEqual(mesh.node_coords.node_y.points[-1], 0.0) diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py index f119f53729f..af92e69b080 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py @@ -29,7 +29,7 @@ def setUp(self): self.attributes = mock.sentinel.attributes self.cf_role = mock.sentinel.cf_role self.start_index = mock.sentinel.start_index - self.src_dim = mock.sentinel.src_dim + self.location_axis = mock.sentinel.location_axis self.cls = ConnectivityMetadata def test_repr(self): @@ -41,12 +41,12 @@ def test_repr(self): attributes=self.attributes, cf_role=self.cf_role, start_index=self.start_index, - src_dim=self.src_dim, + location_axis=self.location_axis, ) fmt = ( "ConnectivityMetadata(standard_name={!r}, long_name={!r}, " "var_name={!r}, units={!r}, attributes={!r}, cf_role={!r}, " - "start_index={!r}, src_dim={!r})" + "start_index={!r}, location_axis={!r})" ) expected = fmt.format( self.standard_name, @@ -56,7 +56,7 @@ def test_repr(self): self.attributes, self.cf_role, self.start_index, - self.src_dim, + self.location_axis, ) self.assertEqual(expected, repr(metadata)) @@ -69,7 +69,7 @@ def test__fields(self): "attributes", "cf_role", "start_index", - "src_dim", + "location_axis", ) self.assertEqual(self.cls._fields, expected) @@ -87,14 +87,14 @@ def setUp(self): attributes=sentinel.attributes, cf_role=sentinel.cf_role, start_index=sentinel.start_index, - src_dim=sentinel.src_dim, + location_axis=sentinel.location_axis, ) self.dummy = sentinel.dummy self.cls = ConnectivityMetadata - # The "src_dim" member is stateful only, and does not participate in + # The "location_axis" member is stateful only, and does not participate in # lenient/strict equivalence. - self.members_no_src_dim = filter( - lambda member: member != "src_dim", self.cls._members + self.members_no_location_axis = filter( + lambda member: member != "location_axis", self.cls._members ) def test_wraps_docstring(self): @@ -140,7 +140,7 @@ def test_op_lenient_same_none(self): self.assertTrue(rmetadata.__eq__(lmetadata)) def test_op_lenient_same_members_none(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = None @@ -152,10 +152,10 @@ def test_op_lenient_same_members_none(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_lenient_same_src_dim_none(self): + def test_op_lenient_same_location_axis_none(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = None + right["location_axis"] = None rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=True): @@ -173,7 +173,7 @@ def test_op_lenient_different(self): self.assertFalse(rmetadata.__eq__(lmetadata)) def test_op_lenient_different_members(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = self.dummy @@ -185,10 +185,10 @@ def test_op_lenient_different_members(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_lenient_different_src_dim(self): + def test_op_lenient_different_location_axis(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = self.dummy + right["location_axis"] = self.dummy rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=True): @@ -214,7 +214,7 @@ def test_op_strict_different(self): self.assertFalse(rmetadata.__eq__(lmetadata)) def test_op_strict_different_members(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = self.dummy @@ -226,10 +226,10 @@ def test_op_strict_different_members(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_strict_different_src_dim(self): + def test_op_strict_different_location_axis(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = self.dummy + right["location_axis"] = self.dummy rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=False): @@ -247,7 +247,7 @@ def test_op_strict_different_none(self): self.assertFalse(rmetadata.__eq__(lmetadata)) def test_op_strict_different_members_none(self): - for member in self.members_no_src_dim: + for member in self.members_no_location_axis: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = None @@ -259,10 +259,10 @@ def test_op_strict_different_members_none(self): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) - def test_op_strict_different_src_dim_none(self): + def test_op_strict_different_location_axis_none(self): lmetadata = self.cls(**self.values) right = self.values.copy() - right["src_dim"] = None + right["location_axis"] = None rmetadata = self.cls(**right) with mock.patch("iris.common.metadata._LENIENT", return_value=False): @@ -311,7 +311,7 @@ def setUp(self): attributes=sentinel.attributes, cf_role=sentinel.cf_role, start_index=sentinel.start_index, - src_dim=sentinel.src_dim, + location_axis=sentinel.location_axis, ) self.dummy = sentinel.dummy self.cls = ConnectivityMetadata @@ -508,7 +508,7 @@ def setUp(self): attributes=sentinel.attributes, cf_role=sentinel.cf_role, start_index=sentinel.start_index, - src_dim=sentinel.src_dim, + location_axis=sentinel.location_axis, ) self.dummy = sentinel.dummy self.cls = ConnectivityMetadata diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py index 87c2df7d45a..a914dd3314a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py @@ -184,7 +184,7 @@ def mesh_location_size(mesh, location): if conn is None: result = 0 else: - result = conn.shape[conn.src_dim] + result = conn.shape[conn.location_axis] return result @@ -705,7 +705,7 @@ def test_connectivity_dim_order(self): # Get the face-node and edge-node connectivities face_nodes_conn = mesh.face_node_connectivity edge_nodes_conn = mesh.edge_node_connectivity - # Transpose them : N.B. this sets src_dim=1, as it should be. + # Transpose them : N.B. this sets location_axis=1, as it should be. nodesfirst_faces_conn = face_nodes_conn.transpose() nodesfirst_edges_conn = edge_nodes_conn.transpose() # Make a new mesh with both face and edge connectivities 'transposed'. From 9c407ee007ea7a83489b8386316e81e91a9a9f40 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Wed, 26 Jan 2022 13:38:42 +0000 Subject: [PATCH 23/69] correct oktas (#4535) --- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/fileformats/um_cf_map.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 35628c4355a..328bad46d27 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -172,6 +172,10 @@ This document explains the changes made to Iris for this release #. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) +#. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` + to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 + (:issue:`3305`, :pull:`4535`) + 💣 Incompatible Changes ======================= diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index f3d392fc5fd..8aee67ae3ec 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -681,7 +681,7 @@ 'm01s09i212': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_4p5_oktas', 'kft'), 'm01s09i213': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_5p5_oktas', 'kft'), 'm01s09i214': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_6p5_oktas', 'kft'), - 'm01s09i215': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_7p5_oktas', 'kft'), + 'm01s09i215': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_7p9_oktas', 'kft'), 'm01s09i216': CFName(None, 'cloud_area_fraction_assuming_random_overlap', '1'), 'm01s09i217': CFName(None, 'cloud_area_fraction_assuming_maximum_random_overlap', '1'), 'm01s09i218': CFName(None, 'cloud_area_fraction_assuming_only_consider_surface_to_1000_feet_asl', '1'), From fc095fdfb04bf3860eeebb0486bd2c84d2fdac6f Mon Sep 17 00:00:00 2001 From: Bill Little Date: Wed, 26 Jan 2022 16:59:40 +0000 Subject: [PATCH 24/69] docs: remove duplicate first contributor entry (#4537) --- docs/src/whatsnew/latest.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 328bad46d27..e18a75a4826 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -27,9 +27,9 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@akuhnregnier`_, - `@bsherratt`_ and `@aaronspring`_ who made their first contributions to Iris. - The first of many we hope! +#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@bsherratt`_ and + `@aaronspring`_ who made their first contributions to Iris. The first of + many we hope! #. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 From 0d527bb2b2d5ca9935ec5069d39581f48168a003 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Thu, 27 Jan 2022 07:16:51 +0000 Subject: [PATCH 25/69] Update latest.rst (#4542) --- docs/src/whatsnew/latest.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index e18a75a4826..786c4e5b0ff 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -131,7 +131,7 @@ This document explains the changes made to Iris for this release (:issue:`4342`, :pull:`4345`) #. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid - parameter inverse_flattening = 0 (:issue: `4146`, :pull:`4348`) + parameter inverse_flattening = 0 (:issue:`4146`, :pull:`4348`) #. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate From c4c246d402b4f70b57024177c6269065c7c1183b Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Thu, 27 Jan 2022 10:47:17 +0000 Subject: [PATCH 26/69] Deprecate intersection_of_cubes (#4541) --- docs/src/whatsnew/latest.rst | 5 +++++ lib/iris/analysis/maths.py | 30 ++++++++++++++++++++++++++++-- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 786c4e5b0ff..a963bee516b 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -220,6 +220,11 @@ This document explains the changes made to Iris for this release ``as_compatible_shape(src_cube, target_cube)`` replace with ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) +#. `@wjbenfold`_ deprecated :func:`iris.analysis.maths.intersection_of_cubes` in + preference for :meth:`iris.cube.CubeList.extract_overlapping`. The + :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in + a future release of Iris. (:pull:`4541`) + 🔗 Dependencies =============== diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 571a66b756b..107d964ed4e 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -19,6 +19,7 @@ import numpy as np from numpy import ma +from iris._deprecation import warn_deprecated import iris.analysis from iris.common import SERVICES, Resolve from iris.common.lenient import _lenient_client @@ -138,10 +139,35 @@ def intersection_of_cubes(cube, other_cube): An instance of :class:`iris.cube.Cube`. Returns: - A pair of :class:`iris.cube.Cube` instances in a tuple corresponding - to the original cubes restricted to their intersection. + A pair of :class:`iris.cube.Cube` instances in a tuple corresponding to + the original cubes restricted to their intersection. + + .. deprecated:: 3.2.0 + + Instead use :meth:`iris.cube.CubeList.extract_overlapping`. For example, + rather than calling + + .. code:: + + cube1, cube2 = intersection_of_cubes(cube1, cube2) + + replace with + + .. code:: + + cubes = CubeList([cube1, cube2]) + coords = ["latitude", "longitude"] # Replace with relevant coords + intersections = cubes.extract_overlapping(coords) + cube1, cube2 = (intersections[0], intersections[1]) """ + wmsg = ( + "iris.analysis.maths.intersection_of_cubes has been deprecated and will " + "be removed, please use iris.cube.CubeList.extract_overlapping " + "instead. See intersection_of_cubes docstring for more information." + ) + warn_deprecated(wmsg) + # Take references of the original cubes (which will be copied when # slicing later). new_cube_self = cube From f718d42f36c10207543db184b0193b93d212649c Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Thu, 27 Jan 2022 13:06:24 +0000 Subject: [PATCH 27/69] Lockfile update and avoid Sphinx 4.4 warnings (#4546) * Updated environment lockfiles * follow suggestions * fix properly Co-authored-by: Lockfile bot --- docs/src/further_topics/index.rst | 3 +- docs/src/further_topics/metadata.rst | 5 +- docs/src/whatsnew/1.4.rst | 3 +- docs/src/whatsnew/3.0.rst | 5 +- docs/src/whatsnew/3.1.rst | 5 +- docs/src/whatsnew/latest.rst | 5 +- docs/src/whatsnew/latest.rst.template | 6 +- requirements/ci/nox.lock/py38-linux-64.lock | 78 +++++++++++---------- 8 files changed, 53 insertions(+), 57 deletions(-) diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst index dc162d6a1e2..ef6436b0efa 100644 --- a/docs/src/further_topics/index.rst +++ b/docs/src/further_topics/index.rst @@ -13,7 +13,7 @@ that may be of interest to the more advanced or curious user. .. hint:: If you wish further documentation on any specific topics or areas of Iris - that are missing, then please let us know by raising a `GitHub Documentation Issue`_ + that are missing, then please let us know by raising a :issue:`GitHub Documentation Issue` on `SciTools/Iris`_. @@ -22,5 +22,4 @@ that may be of interest to the more advanced or curious user. * :doc:`lenient_maths` -.. _GitHub Documentation Issue: https://github.com/SciTools/iris/issues/new?assignees=&labels=New%3A+Documentation%2C+Type%3A+Documentation&template=documentation.md&title= .. _SciTools/iris: https://github.com/SciTools/iris diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index 79e9c164a0d..689c983e511 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -38,8 +38,8 @@ Collectively, the aforementioned classes will be known here as the Iris .. hint:: If there are any `CF Conventions`_ metadata missing from Iris that you - care about, then please let us know by raising a `GitHub Issue`_ on - `SciTools/iris`_ + care about, then please let us know by raising a :issue:`GitHub Issue` + on `SciTools/iris`_ Common Metadata @@ -990,7 +990,6 @@ values. All other metadata members will be left unaltered. .. _CF Conventions: https://cfconventions.org/ .. _Cell Measures: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#cell-measures .. _Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags -.. _GitHub Issue: https://github.com/SciTools/iris/issues/new/choose .. _mapping: https://docs.python.org/3/glossary.html#term-mapping .. _namedtuple: https://docs.python.org/3/library/collections.html#collections.namedtuple .. _namedtuple._make: https://docs.python.org/3/library/collections.html#collections.somenamedtuple._make diff --git a/docs/src/whatsnew/1.4.rst b/docs/src/whatsnew/1.4.rst index 858f985ec6e..989198296ce 100644 --- a/docs/src/whatsnew/1.4.rst +++ b/docs/src/whatsnew/1.4.rst @@ -182,8 +182,7 @@ Cubes With no Vertical Coord can now be Exported to GRIB -------------------------------------------------------- Iris can now export cubes with no vertical coord to GRIB. -The solution is still under discussion: See -https://github.com/SciTools/iris/issues/519. +The solution is still under discussion: See :issue:`519`. .. _simple_cfg: diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 77458c70e93..771a6029542 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -35,8 +35,8 @@ This document explains the changes made to Iris for this release :ref:`incompatible changes ` and :ref:`deprecations `. - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! v3.0.1 (27 Jan 2021) @@ -617,7 +617,6 @@ v3.0.4 (22 July 2021) .. _xxHash: https://github.com/Cyan4973/xxHash .. _PyKE: https://pypi.org/project/scitools-pyke/ .. _@owena11: https://github.com/owena11 -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _readthedocs: https://readthedocs.org/ .. _CF Conventions and Metadata: https://cfconventions.org/ .. _flake8: https://flake8.pycqa.org/en/stable/ diff --git a/docs/src/whatsnew/3.1.rst b/docs/src/whatsnew/3.1.rst index 165e20d9bc2..bd046a0a24c 100644 --- a/docs/src/whatsnew/3.1.rst +++ b/docs/src/whatsnew/3.1.rst @@ -25,8 +25,8 @@ This document explains the changes made to Iris for this release * Multiple improvements to developer guide documentation. See entries in the :ref:`"Documentation" section `, below. - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! 📢 Announcements @@ -315,7 +315,6 @@ This document explains the changes made to Iris for this release .. _blacken-docs: https://github.com/asottile/blacken-docs .. _conda-lock: https://github.com/conda-incubator/conda-lock .. _deprecated numpy 1.20 aliases for builtin types: https://numpy.org/doc/1.20/release/1.20.0-notes.html#using-the-aliases-of-builtin-types-like-np-int-is-deprecated -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _Met Office: https://www.metoffice.gov.uk/ .. _numpy: https://numpy.org/doc/stable/release/1.20.0-notes.html .. |pre-commit.ci| image:: https://results.pre-commit.ci/badge/github/SciTools/iris/main.svg diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a963bee516b..7c1fd330755 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -20,8 +20,8 @@ This document explains the changes made to Iris for this release and attached to a cube. * We've also dropped support for ``Python 3.7``. - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! 📢 Announcements @@ -350,7 +350,6 @@ This document explains the changes made to Iris for this release .. comment Whatsnew resources in alphabetical order: -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html .. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ .. _sort-all: https://github.com/aio-libs/sort-all diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template index ced07780692..79c578ca655 100644 --- a/docs/src/whatsnew/latest.rst.template +++ b/docs/src/whatsnew/latest.rst.template @@ -18,8 +18,8 @@ This document explains the changes made to Iris for this release * N/A - And finally, get in touch with us on `GitHub`_ if you have any issues or - feature requests for improving Iris. Enjoy! + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! NOTE: section below is a template for bugfix patches @@ -109,4 +109,4 @@ NOTE: section above is a template for bugfix patches .. comment Whatsnew resources in alphabetical order: -.. _GitHub: https://github.com/SciTools/iris/issues/new/choose + diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index 6128e30e692..364b656f5db 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -12,7 +12,7 @@ https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_11.tar.bz2#2dcb18a9a0fa31f4f29e5a9b3eade394 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_11.tar.bz2#0bf83958e788f1e75ba26154cb702afe https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.27-ha770c72_3.tar.bz2#49210aaa9080888f9f9b460c70202bd3 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.28-ha770c72_0.tar.bz2#56594fdd5a80774a80d546fbbccf2c03 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_11.tar.bz2#4ea2f9f83b617a7682e8aa05dcb37c6a https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_11.tar.bz2#1d16527c76842bf9c41e9399d39d8097 @@ -22,12 +22,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_11.tar https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.2-h9c3ff4c_0.tar.bz2#0fb039650fa638f258fdc9e9ef125f52 +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.3-h9c3ff4c_0.tar.bz2#bd783d12b65023e333bb7016de41570b https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.1-h9c3ff4c_1.tar.bz2#17a5f413039ce1e105fab5df9c668eb5 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.2-h9c3ff4c_0.tar.bz2#fe9a66a351bfa7a84c3108304c7bcba5 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-68.2-h9c3ff4c_0.tar.bz2#6618c9b191638993f2a818c6529e1b49 +https://conda.anaconda.org/conda-forge/linux-64/icu-69.1-h9c3ff4c_0.tar.bz2#e0773c9556d588b062a4e1424a6a02fa https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 https://conda.anaconda.org/conda-forge/linux-64/lerc-3.0-h9c3ff4c_0.tar.bz2#7fcefde484980d23f0ec24c11e314d2e @@ -36,6 +36,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.8-h7f98852_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 +https://conda.anaconda.org/conda-forge/linux-64/libllvm13-13.0.0-hf817b99_0.tar.bz2#b10bb2ebebfffa8800fa80ad3285719e https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 @@ -47,7 +48,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.1-h7f98852_0.ta https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.11-h36c2ea0_1013.tar.bz2#dcddf696ff5dfcab567100d691678e18 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.3-h846660c_100.tar.bz2#1bb747e2de717cb9a6501d72539d6556 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h9c3ff4c_0.tar.bz2#fb31bcb7af058244479ca635d20f0f4a https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1l-h7f98852_0.tar.bz2#de7b38a1542dbe6f41653a8ae71adc53 https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa @@ -64,9 +65,10 @@ https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-12_linux64_openblas.tar.bz2#4f93ba28c628a2c27cf39c055e6b219c +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-13_linux64_openblas.tar.bz2#8a4038563ed92dfa622bd72c0d8f31d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h7f98852_6.tar.bz2#c7c03a2592cac92246a13a0732bd1573 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h7f98852_6.tar.bz2#28bfe0a70154e6881da7bae97517c948 +https://conda.anaconda.org/conda-forge/linux-64/libclang-13.0.0-default_hc23dcda_0.tar.bz2#7b140452b5bc91e46410b84807307249 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 @@ -75,20 +77,19 @@ https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-hc3e0081_3.tar.bz2#a47110f41fcbf88fcdf8549d7f69a6d8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2#cf7190238072a41e9579e4476a6a60b8 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.1-ha95c52a_0.tar.bz2#4eec219a4bd69c11579601804cec5baf +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-ha95c52a_0.tar.bz2#5222b231b1ef49a7f60d40b363469b70 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h7f98852_6.tar.bz2#9e94bf16f14c78a36561d5019f490d22 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-12_linux64_openblas.tar.bz2#2e5082d4a9a18c21100e6ce5b6bcb4ec +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-13_linux64_openblas.tar.bz2#b17676dbd6688396c3a3076259fb7907 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.70.2-h174f98d_1.tar.bz2#d03a54631298fd1ab732ff65f6ed3a07 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-12_linux64_openblas.tar.bz2#9f401a6807a97e0c859d7522ae3d51ec -https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-13_linux64_openblas.tar.bz2#018b80e8f21d8560ae4961567e3e00c9 https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_1.tar.bz2#d0a7846b7b3b8fb0d8b36904a53b8155 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-ha56f1ee_2.tar.bz2#6ab4eaa11ff01801cffca0a27489dc04 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-h6f004c6_2.tar.bz2#34fda41ca84e67232888c9a885903055 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h885dcf4_1.tar.bz2#d1355eaa48f465782f228275a0a69771 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_1.tar.bz2#175a746a43d42c053b91aa765fbc197d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.27-hfa10184_3.tar.bz2#7cd299934880b05703ee86a62325982f +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.28-hfa10184_0.tar.bz2#aac17542e50a474e2e632878dc696d50 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.37.0-h9cd32fc_0.tar.bz2#eb66fc098824d25518a79e83d12a81d6 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.11-h27826a3_1.tar.bz2#84e76fb280e735fec1efd2d21fd9cb27 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 @@ -97,13 +98,12 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h7f98852_6.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_2.tar.bz2#6221115a24700aa8598ae5aa1574902d +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_3.tar.bz2#511aa83cdfcc0132380db5daf2f15f27 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e29650992ae593bc05fc93722483e5c3 -https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.1-h3452ae3_0.tar.bz2#6d4bf6265d998b6c975c26a6a24062a2 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/nss-3.73-hb5efdd6_0.tar.bz2#a5b91a14292ac34bac1f0506a3772fd5 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.74-hb5efdd6_0.tar.bz2#136876ca50177058594f6c2944e95c40 https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-hb7a2778_2_cpython.tar.bz2#148ea076514259c7f562fbfba956a693 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb @@ -117,19 +117,19 @@ https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_1.tar.bz2#d3f5797d3f9625c64860c93fc4359e64 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb -https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.11.1-pyhd8ed1ab_0.tar.bz2#a510ec93fdb50775091d2afba98a8acb -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_2.tar.bz2#3cf866063f2803944ddaee8b1d6da531 -https://conda.anaconda.org/conda-forge/noarch/idna-3.1-pyhd3deb0d_0.tar.bz2#9c9aea4b8391264477df484f798562d0 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.1.0-pyhd8ed1ab_0.tar.bz2#188e095f4dc38887bb48b065734b9e8d +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_3.tar.bz2#524a9f1718bac53a6cf4906bcc51d044 +https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2#40b50b8b030f5f2f22085c062ed013dd https://conda.anaconda.org/conda-forge/noarch/imagesize-1.3.0-pyhd8ed1ab_0.tar.bz2#be807e7606fff9436e5e700f6bffb7c6 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.81.0-h2574ce0_0.tar.bz2#1f8655741d0269ca6756f131522da1e8 -https://conda.anaconda.org/conda-forge/linux-64/libpq-13.5-hd57d9b9_1.tar.bz2#a0f425d61c7df890d6381ea352c3f1d7 +https://conda.anaconda.org/conda-forge/linux-64/libpq-14.1-hd57d9b9_1.tar.bz2#a7024916bfdf33a014a0cc803580c9a1 https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.6-pyhd8ed1ab_0.tar.bz2#3087df8c636c5a00e694605c39ce4982 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.7-pyhd8ed1ab_0.tar.bz2#727e2216d9c47455d8ddc060eb2caad9 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860 https://conda.anaconda.org/conda-forge/noarch/pytz-2021.3-pyhd8ed1ab_0.tar.bz2#7e4f811bff46a5a6a7e0094921389395 @@ -143,41 +143,43 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.ta https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.2-pyhd8ed1ab_0.tar.bz2#f348d1590550371edfac5ed3c1d44f7e https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.7.0-pyhd8ed1ab_0.tar.bz2#947f7f41958eabc0f6e886557512bb76 https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha00ac49_1009.tar.bz2#d1dff57b8731c245d3247b46d002e1c9 https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.10.8-py38h578d9bd_1.tar.bz2#52a6cee65a5d10ed1c3f0af24fb48dd3 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.0-py38h3931269_0.tar.bz2#9c491a90ae11d08ca97326a0ed876f3a https://conda.anaconda.org/conda-forge/linux-64/curl-7.81.0-h2574ce0_0.tar.bz2#3a95d393b490f82aa406f1892fad84d9 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py38h578d9bd_3.tar.bz2#a7866449fb9e5e4008a02df276549d34 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.1-mpi_mpich_h9c45103_3.tar.bz2#4f1a733e563d27b98010b62888e149c9 +https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.10.1-py38h578d9bd_0.tar.bz2#26da12e39b1b93e82fb865e967d0cbe0 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py38h1fd1430_1.tar.bz2#085365abfe53d5d13bb68b1dda0b439e -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6ad9fb6_0.tar.bz2#45142dc44fcd04934f9ad68ce205e54d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h3cfcdeb_1.tar.bz2#37d7568c595f0cfcd0c493f5ca0344ab https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_1.tar.bz2#1ef7b5f4826ca48a15e2cd98a5c3436d https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.0-py38h6ae9a64_0.tar.bz2#0731ced21afb9adab62eb7aaf7abaf1e +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.0-py38h6ae9a64_1.tar.bz2#e06061f06c024fab614ace4bbb3e34df https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.0-h277dcde_0.tar.bz2#7ba8c7a9bf1c2fedf4a6d6dc92839baf +https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.1-h277dcde_0.tar.bz2#f2ceb1be6565c35e2db0ac948754751d https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py38h709712a_8.tar.bz2#11b72f5b1cc15427c89232321172a0bc https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py38h578d9bd_4.tar.bz2#9c4bbee6f682f2fc7d7803df3996e77e https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_1.tar.bz2#977d03222271270ea8fe35388bf13752 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h497a2fe_3.tar.bz2#131de7d638aa59fb8afbce59f1a8aa98 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.3.1-py38h578d9bd_0.tar.bz2#adb15768b02db40b8f7234bfe0a6e7fc +https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-ha98a1a1_5.tar.bz2#9b27fa0b1044a2119fb1b290617fe06f +https://conda.anaconda.org/conda-forge/linux-64/setuptools-59.8.0-py38h578d9bd_0.tar.bz2#5c6cd89b15e0059af9dd07cc60a6afbd https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_2.tar.bz2#63b3b55c98b4239134e0be080f448944 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h497a2fe_0.tar.bz2#8da7787169411910df2a62dc8ef533e0 https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py38h578d9bd_1.tar.bz2#37717ce393db8536ae2b613839af4274 https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1003.tar.bz2#9189b42c42b9c87b2b2068cbe31901a8 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.1.1-py38h6c62de6_1.tar.bz2#d4a47fd2bbc8292a322d462734b0ada5 https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py38h3e25421_0.tar.bz2#acc14d0d71dbf74f6a15f2456951b6cf -https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.12.0-pyhd8ed1ab_0.tar.bz2#e572bf40b1e8783fed2526ecb5f5209e +https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.1.0-pyhd8ed1ab_0.tar.bz2#e7d934ff2c617f0bfc62ab77c160f093 https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py38h1fd1430_2.tar.bz2#482431310c7b3320a31c8c6ce82a7a15 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.28.5-py38h497a2fe_0.tar.bz2#f611d0be8205d5b0566f9c97e7d66ae3 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-2.9.1-h83ec7ef_1.tar.bz2#9a9e823b2e31e84e5ce06f54ffce9d70 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.2.0-hb4a5f5f_0.tar.bz2#d03d53e6bcb97e6a97a1659fb38aa76e https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h6c62de6_1006.tar.bz2#829b1209dfadd431a11048d6eeaf5bef @@ -185,36 +187,36 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/pandas-1.3.5-py38h43a58ef_0.tar.bz2#171cc96da3b1a0ebd4bf2b5586b7cda3 https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.2-pyhd8ed1ab_0.tar.bz2#caef60540e2239e27bf62569a5015e3b -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38hdd21e9b_0.tar.bz2#ceb8ec641cd5faa40b568f8ca008b6dc +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38h5383654_1.tar.bz2#5b600e019fa7c33be73bdb626236936b https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h6c62de6_1.tar.bz2#a350e3f4ca899e95122f66806e048858 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.2.0-py38h6c62de6_1.tar.bz2#2953d3fc0113fc6ffb955a5b72811fb0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.3-py38h56a6a73_0.tar.bz2#2d318049369bb52d2687b0ac2be82751 -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h800f7b8_4.tar.bz2#5fb9a3af4ebd8b21ca099e107306be72 +https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h596eeab_5.tar.bz2#ec3b783081e14a9dc0eb5ce609649728 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py38h6c62de6_2.tar.bz2#350322b046c129e5802b79358a1343f7 https://conda.anaconda.org/conda-forge/noarch/identify-2.3.7-pyhd8ed1ab_0.tar.bz2#ae1a5e834fbca62ee88ab55fb276be63 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py38hf4fb855_0.tar.bz2#47cf0cab2ae368e1062e75cfbc4277af -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h1364a43_6.tar.bz2#9caa0cf923af3d037897c6d7f8ea57c0 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.4-mpi_mpich_h1364a43_0.tar.bz2#b6ba4f487ef9fd5d353ff277df06d133 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.8-nompi_py38h2823cc8_101.tar.bz2#1dfe1cdee4532c72f893955259eb3de9 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.10-hb8ff022_1.tar.bz2#f67c24bfd760cd50c285556ee7507853 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.3-h9967ed3_0.tar.bz2#37f1c68380bc5dfe0f5bb2655e207a73 https://conda.anaconda.org/conda-forge/noarch/pyopenssl-21.0.0-pyhd8ed1ab_0.tar.bz2#8c49efecb7dca466e18b06015e8c88ce https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.1-py38h2f98cf7_5.tar.bz2#8f989133575134016a0def90ae965e85 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.2-py38ha217159_3.tar.bz2#d7461e191f7a0522e4709612786bdf4e https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h4975321_100.tar.bz2#56f5c650937b1667ad0a557a0dff3bc4 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-hc3c00ef_1.tar.bz2#9cd526f006d048eb912e09c5982393ea +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-h0a9e6e8_2.tar.bz2#aa768fdaad03509a97df37f81163346b https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.0-pyhd8ed1ab_0.tar.bz2#9113b4e4fa2fa4a7f129c71a6f319475 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.16.0-py38h578d9bd_0.tar.bz2#61e1e83f0eccef5e449db03c340ab6c2 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.17.0-py38h578d9bd_0.tar.bz2#839ac9dba9a6126c9532781a9ea4506b https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py38h578d9bd_8.tar.bz2#88368a5889f31dff922a2d57bbfc3f5b https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.8-pyhd8ed1ab_1.tar.bz2#53f1387c68c21cecb386e2cde51b3f7c https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py38h9147699_101.tar.bz2#5a9de1dec507b6614150a77d1aabf257 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h85b4f2f_1.tar.bz2#bc6418fd87ea67cf14417337ced3daa2 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h8e749b2_2.tar.bz2#8c20fd968c8b6af73444b1199d5fb0cb https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.1-py38h578d9bd_0.tar.bz2#0d78be9cf1c400ba8e3077cf060492f1 https://conda.anaconda.org/conda-forge/noarch/requests-2.27.1-pyhd8ed1ab_0.tar.bz2#7c1c427246b057b8fa97200ecdb2ed62 -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.3.2-pyh6c4a22f_0.tar.bz2#e8ffaea0961c0d7a6767f2394042043d +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.4.0-pyh6c4a22f_1.tar.bz2#a9025d14c2a609e0d895ad3e75b5369c https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.4.0-pyhd8ed1ab_0.tar.bz2#80fd2cc25ad45911b4e42d5b91593e2f https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.10.1-pyhd8ed1ab_0.tar.bz2#4918585fe5e5341740f7e63c61743efb https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a From 41c7a71c452e7209f4398883cefde6411dfc2ba3 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 27 Jan 2022 13:40:14 +0000 Subject: [PATCH 28/69] Add deprecation on routine usage. (#4545) --- lib/iris/experimental/raster.py | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index f16dcbef5a4..7c952934695 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -24,10 +24,10 @@ import iris.coord_systems wmsg = ( - "iris.experimental.raster has been deprecated and will be removed in a " - "future release. If you make use of this functionality, please contact " - "the Iris Developers to discuss how to retain it (which may involve " - "reversing the deprecation)." + "iris.experimental.raster is deprecated since version 3.2, and will be " + "removed in a future release. If you make use of this functionality, " + "please contact the Iris Developers to discuss how to retain it (which may " + "involve reversing the deprecation)." ) warn_deprecated(wmsg) @@ -105,6 +105,14 @@ def export_geotiff(cube, fname): """ Writes cube data to raster file format as a PixelIsArea GeoTiff image. + .. deprecated:: 3.2.0 + + This method is scheduled to be removed in a future release, and no + replacement is currently planned. + If you make use of this functionality, please contact the Iris + Developers to discuss how to retain it (which could include reversing + the deprecation). + Args: * cube (Cube): The 2D regularly gridded cube slice to be exported. The cube must have regular, contiguous bounds. @@ -116,6 +124,13 @@ def export_geotiff(cube, fname): http://www.remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2.2 """ + wmsg = ( + "iris.experimental.raster.export_geotiff has been deprecated, and will " + "be removed in a future release. Please consult the docstring for " + "details." + ) + warn_deprecated(wmsg) + if cube.ndim != 2: raise ValueError("The cube must be two dimensional.") From 9dca7ebaff7e4c29f9e3a9ea7d7cc250916cb909 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 27 Jan 2022 17:49:22 +0000 Subject: [PATCH 29/69] Improve Coord and Cube attributes printout (#4528) * Print each attribute on a separate line. * Quote strings in cube attribute displays. * Doctest changes for quoted strings in cube attributes. * Clip long strings in attribute printouts, for Coord and Mesh as for Cube. --- docs/src/further_topics/lenient_maths.rst | 16 ++++---- docs/src/further_topics/metadata.rst | 6 +-- docs/src/userguide/cube_maths.rst | 6 +-- docs/src/userguide/cube_statistics.rst | 14 +++---- .../interpolation_and_regridding.rst | 4 +- docs/src/userguide/iris_cubes.rst | 4 +- docs/src/userguide/loading_iris_cubes.rst | 4 +- docs/src/userguide/navigating_a_cube.rst | 8 ++-- docs/src/userguide/subsetting_a_cube.rst | 6 +-- lib/iris/_representation/cube_summary.py | 2 +- lib/iris/common/resolve.py | 40 +++++++++---------- lib/iris/coords.py | 23 +++++++++-- lib/iris/cube.py | 14 +++---- lib/iris/experimental/ugrid/mesh.py | 18 ++++++++- .../0d_str.txt | 2 +- .../1d_str.txt | 2 +- .../2d_str.txt | 2 +- .../3d_str.txt | 2 +- .../4d_str.txt | 2 +- .../results/cdm/str_repr/0d_cube.__str__.txt | 2 +- .../cdm/str_repr/0d_cube.__unicode__.txt | 2 +- .../cdm/str_repr/cell_methods.__str__.txt | 2 +- .../cdm/str_repr/missing_coords_cube.str.txt | 2 +- .../results/cdm/str_repr/similar.__str__.txt | 2 +- .../unicode_attribute.__unicode__.txt | 2 +- .../coord_api/str_repr/aux_nontime_str.txt | 3 +- .../tests/results/derived/no_orog.__str__.txt | 2 +- .../results/derived/removed_orog.__str__.txt | 2 +- .../results/derived/removed_sigma.__str__.txt | 2 +- lib/iris/tests/test_coord_api.py | 6 ++- .../unit/coords/test_AncillaryVariable.py | 3 +- .../tests/unit/coords/test_CellMeasure.py | 3 +- .../unit/coords/test__DimensionalMetadata.py | 29 +++++++++----- .../ugrid/mesh/test_Connectivity.py | 3 +- .../unit/experimental/ugrid/mesh/test_Mesh.py | 25 ++++++++++-- .../experimental/ugrid/mesh/test_MeshCoord.py | 3 +- .../cube_printout/test_CubePrintout.py | 6 +-- .../cube_summary/test_CubeSummary.py | 6 ++- 38 files changed, 173 insertions(+), 107 deletions(-) diff --git a/docs/src/further_topics/lenient_maths.rst b/docs/src/further_topics/lenient_maths.rst index 643bd37e76b..818efe47632 100644 --- a/docs/src/further_topics/lenient_maths.rst +++ b/docs/src/further_topics/lenient_maths.rst @@ -84,10 +84,10 @@ represents the output of an low-resolution global atmospheric ``experiment``, forecast_reference_time 2009-09-09 17:10:00 time 2009-09-09 17:10:00 Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i004 - experiment-id RT3 50 - source Data from Met Office Unified Model 7.04 + experiment-id 'RT3 50' + source 'Data from Met Office Unified Model 7.04' Consider also the following :class:`~iris.cube.Cube`, which has the same global spatial extent, and acts as a ``control``, @@ -103,9 +103,9 @@ spatial extent, and acts as a ``control``, model_level_number 1 time 2009-09-09 17:10:00 Attributes: - Conventions CF-1.7 + Conventions 'CF-1.7' STASH m01s00i004 - source Data from Met Office Unified Model 7.04 + source 'Data from Met Office Unified Model 7.04' Now let's subtract these cubes in order to calculate a simple ``difference``, @@ -129,8 +129,8 @@ Now let's subtract these cubes in order to calculate a simple ``difference``, forecast_reference_time 2009-09-09 17:10:00 time 2009-09-09 17:10:00 Attributes: - experiment-id RT3 50 - source Data from Met Office Unified Model 7.04 + experiment-id 'RT3 50' + source 'Data from Met Office Unified Model 7.04' Note that, cube maths automatically takes care of broadcasting the dimensionality of the ``control`` up to that of the ``experiment``, in order to @@ -218,7 +218,7 @@ time perform **strict** cube maths instead, Scalar coordinates: time 2009-09-09 17:10:00 Attributes: - source Data from Met Office Unified Model 7.04 + source 'Data from Met Office Unified Model 7.04' Although the numerical result of this strict cube maths operation is identical, it is not as rich in metadata as the :ref:`lenient alternative `. diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index 689c983e511..1b81f7055c2 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -120,10 +120,10 @@ For example, given the following :class:`~iris.cube.Cube`, Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario A1B + Conventions 'CF-1.5' + Model scenario 'A1B' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' We can easily get all of the associated metadata of the :class:`~iris.cube.Cube` using the ``metadata`` property: diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/userguide/cube_maths.rst index 78490cd749d..e8a1744a44f 100644 --- a/docs/src/userguide/cube_maths.rst +++ b/docs/src/userguide/cube_maths.rst @@ -63,9 +63,9 @@ but with the data representing their difference: forecast_reference_time 1859-09-01 06:00:00 height 1.5 m Attributes: - Conventions CF-1.5 - Model scenario E1 - source Data from Met Office Unified Model 6.05 + Conventions 'CF-1.5' + Model scenario 'E1' + source 'Data from Met Office Unified Model 6.05' .. note:: diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst index ac66ff4e53b..980f1e132f4 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/userguide/cube_statistics.rst @@ -53,8 +53,8 @@ For instance, suppose we have a cube: forecast_reference_time 2009-11-19 04:00:00 Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' In this case we have a 4 dimensional cube; @@ -84,8 +84,8 @@ we can pass the coordinate name and the aggregation definition to the mean model_level_number Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Similarly other analysis operators such as ``MAX``, ``MIN`` and ``STD_DEV`` @@ -143,8 +143,8 @@ These areas can now be passed to the ``collapsed`` method as weights: mean grid_longitude, grid_latitude Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Several examples of area averaging exist in the gallery which may be of interest, including an example on taking a :ref:`global area-weighted mean @@ -229,7 +229,7 @@ Printing this cube now shows that two extra coordinates exist on the cube: Cell methods: mean month, year Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i024 diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst index 5573c4aa8ee..f590485606b 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -79,7 +79,7 @@ Let's take the air temperature cube we've seen previously: mean over years time Attributes: STASH m01s16i203 - source Data from Met Office Unified Model + source 'Data from Met Office Unified Model' We can interpolate specific values from the coordinates of the cube: @@ -98,7 +98,7 @@ We can interpolate specific values from the coordinates of the cube: mean over years time Attributes: STASH m01s16i203 - source Data from Met Office Unified Model + source 'Data from Met Office Unified Model' As we can see, the resulting cube is scalar and has longitude and latitude coordinates with the values defined in our sample points. diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst index 64a9bfd8229..d13dee369c1 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -172,8 +172,8 @@ output as this is the quickest way of inspecting the contents of a cube. Here is forecast_reference_time 2009-11-19 04:00:00 Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Using this output we can deduce that: diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/userguide/loading_iris_cubes.rst index ae2f807fe9c..fb938975e8b 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/userguide/loading_iris_cubes.rst @@ -100,8 +100,8 @@ list indexing can be used: forecast_reference_time 2009-11-19 04:00:00 Attributes: STASH m01s00i004 - source Data from Met Office Unified Model - um_version 7.3 + source 'Data from Met Office Unified Model' + um_version '7.3' Notice that the result of printing a **cube** is a little more verbose than it was when printing a **list of cubes**. In addition to the very short summary diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/userguide/navigating_a_cube.rst index 74b47b258e1..c5924a61c65 100644 --- a/docs/src/userguide/navigating_a_cube.rst +++ b/docs/src/userguide/navigating_a_cube.rst @@ -33,9 +33,9 @@ We have already seen a basic string representation of a cube when printing: forecast_reference_time 2006-06-15 00:00:00 time 2006-06-15 00:00:00 Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s16i222 - source Data from Met Office Unified Model 6.01 + source 'Data from Met Office Unified Model 6.01' This representation is equivalent to passing the cube to the :func:`str` function. This function can be used on @@ -169,9 +169,9 @@ We can add and remove coordinates via :func:`Cube.add_dim_coord>> print(cube2) air_temperature / (K) (longitude: 49; latitude: 37) @@ -130,10 +130,10 @@ class Resolve: Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario E1 + Conventions 'CF-1.5' + Model scenario 'E1' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> print(data.shape) (240, 37, 49) @@ -153,9 +153,9 @@ class Resolve: Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' Secondly, creating an *empty* ``resolver`` instance, that may be called *multiple* times with *different* :class:`~iris.cube.Cube` operands and *different* ``data``, @@ -2413,10 +2413,10 @@ def mapped(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario A1B + Conventions 'CF-1.5' + Model scenario 'A1B' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> print(cube2) air_temperature / (K) (longitude: 49; latitude: 37) Dimension coordinates: @@ -2430,10 +2430,10 @@ def mapped(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario E1 + Conventions 'CF-1.5' + Model scenario 'E1' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> Resolve().mapped is None True >>> resolver = Resolve(cube1, cube2) @@ -2481,10 +2481,10 @@ def shape(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario A1B + Conventions 'CF-1.5' + Model scenario 'A1B' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> print(cube2) air_temperature / (K) (longitude: 49; latitude: 37) Dimension coordinates: @@ -2498,10 +2498,10 @@ def shape(self): Cell methods: mean time (6 hour) Attributes: - Conventions CF-1.5 - Model scenario E1 + Conventions 'CF-1.5' + Model scenario 'E1' STASH m01s03i236 - source Data from Met Office Unified Model 6.05 + source 'Data from Met Office Unified Model 6.05' >>> Resolve().shape is None True >>> Resolve(cube1, cube2).shape diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 6e9bd6c8407..b236d407dae 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -571,10 +571,25 @@ def reindent_data_string(text, n_indent): # work for all those defined so far. show = val is not None and val is not False if show: - # add a section for this property (metadata item) - # TODO: modify to do multi-line attribute output - add_output(newline_indent) - add_output(f"{name}: {val!r}", section=name) + if name == "attributes": + # Use a multi-line form for this. + add_output(newline_indent) + add_output("attributes:", section="attributes") + max_attname_len = max(len(attr) for attr in val.keys()) + for attrname, attrval in val.items(): + attrname = attrname.ljust(max_attname_len) + if isinstance(attrval, str): + # quote strings + attrval = repr(attrval) + # and abbreviate really long ones + attrval = iris.util.clip_string(attrval) + attr_string = f"{attrname} {attrval}" + add_output(newline_indent + indent + attr_string) + else: + # add a one-line section for this property + # (aka metadata field) + add_output(newline_indent) + add_output(f"{name}: {val!r}", section=name) return "\n".join(output_lines) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 2cd29682dd1..8183dd385c6 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -745,7 +745,7 @@ class Cube(CFVariableMixin): mean over years time Attributes: STASH m01s16i203 - source Data from Met Office Unified Model + source 'Data from Met Office Unified Model' See the :doc:`user guide` for more information. @@ -3636,7 +3636,7 @@ def collapsed(self, coords, aggregator, **kwargs): mean month, year mean longitude Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i024 @@ -3871,7 +3871,7 @@ def aggregated_by(self, coords, aggregator, **kwargs): mean month, year mean year Attributes: - Conventions CF-1.5 + Conventions 'CF-1.5' STASH m01s00i024 """ @@ -4076,8 +4076,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Attributes: STASH m01s00i024 source \ -Data from Met Office Unified Model - um_version 7.6 +'Data from Met Office Unified Model' + um_version '7.6' >>> print(air_press.rolling_window('time', iris.analysis.MEAN, 3)) @@ -4102,8 +4102,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Attributes: STASH m01s00i024 source \ -Data from Met Office Unified Model - um_version 7.6 +'Data from Met Office Unified Model' + um_version '7.6' Notice that the forecast_period dimension now represents the 4 possible windows of size 3 from the original cube. diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 5cd4102f1b0..974a5630463 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -29,7 +29,7 @@ from ...config import get_logger from ...coords import AuxCoord, _DimensionalMetadata from ...exceptions import ConnectivityNotFoundError, CoordinateNotFoundError -from ...util import array_equal, guess_coord_axis +from ...util import array_equal, clip_string, guess_coord_axis from .metadata import ConnectivityMetadata, MeshCoordMetadata, MeshMetadata # Configure the logger. @@ -1053,7 +1053,21 @@ def line(text, i_indent=0): else: show = val is not None if show: - line(f"{name}: {val!r}", 1) + if name == "attributes": + # Use a multi-line form for this. + line("attributes:", 1) + max_attname_len = max(len(attr) for attr in val.keys()) + for attrname, attrval in val.items(): + attrname = attrname.ljust(max_attname_len) + if isinstance(attrval, str): + # quote strings + attrval = repr(attrval) + # and abbreviate really long ones + attrval = clip_string(attrval) + attr_string = f"{attrname} {attrval}" + line(attr_string, 2) + else: + line(f"{name}: {val!r}", 1) result = "\n".join(lines) return result diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt index a6738e654f3..a4c1157df26 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/0d_str.txt @@ -10,4 +10,4 @@ air_potential_temperature / (K) (scalar cube) surface_altitude 413.93686 m time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt index 95f7e7b57e0..7d43a997dac 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/1d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (grid_longitude: 100) sigma 0.9994238, bound=(1.0, 0.99846387) time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt index c4184d199ac..9adeb35c738 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/2d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (grid_latitude: 100; grid_longitude: 100) sigma 0.9994238, bound=(1.0, 0.99846387) time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt index af81d4e9914..dc5e71433f8 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/3d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (model_level_number: 70; grid_latitude: 100; forecast_period 0.0 hours time 2009-09-09 17:10:00 Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt index afcdedf100b..52adc03efb6 100644 --- a/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt +++ b/lib/iris/tests/results/cdm/TestStockCubeStringRepresentations/4d_str.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt b/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt index 6a3276d8618..02e9849d38b 100644 --- a/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt +++ b/lib/iris/tests/results/cdm/str_repr/0d_cube.__str__.txt @@ -8,4 +8,4 @@ air_temperature / (K) (scalar cube) time 1998-12-01 00:00:00 Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt b/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt index 6a3276d8618..02e9849d38b 100644 --- a/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt +++ b/lib/iris/tests/results/cdm/str_repr/0d_cube.__unicode__.txt @@ -8,4 +8,4 @@ air_temperature / (K) (scalar cube) time 1998-12-01 00:00:00 Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt b/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt index ba93542e518..ffb6a62daf6 100644 --- a/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt +++ b/lib/iris/tests/results/cdm/str_repr/cell_methods.__str__.txt @@ -14,4 +14,4 @@ air_temperature / (K) (latitude: 73; longitude: 96) percentile longitude (6 minutes, This is another test comment) Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt b/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt index 1b86bd6597d..0ac5bd3c8af 100644 --- a/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt +++ b/lib/iris/tests/results/cdm/str_repr/missing_coords_cube.str.txt @@ -11,4 +11,4 @@ air_potential_temperature / (K) (-- : 6; -- : 70; grid_latitude: 100; grid_l Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt b/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt index fc274ed4c1d..394e52e5c90 100644 --- a/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt +++ b/lib/iris/tests/results/cdm/str_repr/similar.__str__.txt @@ -15,4 +15,4 @@ air_temperature / (K) (latitude: 73; longitude: 96) time 1998-12-01 00:00:00 Attributes: STASH m01s16i203 - source Data from Met Office Unified Model \ No newline at end of file + source 'Data from Met Office Unified Model' \ No newline at end of file diff --git a/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt b/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt index 29c181345c4..594ad116888 100644 --- a/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt +++ b/lib/iris/tests/results/cdm/str_repr/unicode_attribute.__unicode__.txt @@ -2,4 +2,4 @@ thingness / (1) (foo: 11) Dimension coordinates: foo x Attributes: - source ꀀabcd޴ \ No newline at end of file + source 'ꀀabcd\u07b4' \ No newline at end of file diff --git a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt index 0361e88eef7..3030ea962a9 100644 --- a/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/aux_nontime_str.txt @@ -11,4 +11,5 @@ DimCoord : level_height / (m) shape: (10,) bounds(10, 2) dtype: float32 long_name: 'level_height' - attributes: {'positive': 'up'} \ No newline at end of file + attributes: + positive 'up' \ No newline at end of file diff --git a/lib/iris/tests/results/derived/no_orog.__str__.txt b/lib/iris/tests/results/derived/no_orog.__str__.txt index e277b5d2765..de139592a6e 100644 --- a/lib/iris/tests/results/derived/no_orog.__str__.txt +++ b/lib/iris/tests/results/derived/no_orog.__str__.txt @@ -13,4 +13,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/derived/removed_orog.__str__.txt b/lib/iris/tests/results/derived/removed_orog.__str__.txt index 0c24cded808..a9ef3bd0171 100644 --- a/lib/iris/tests/results/derived/removed_orog.__str__.txt +++ b/lib/iris/tests/results/derived/removed_orog.__str__.txt @@ -12,4 +12,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/results/derived/removed_sigma.__str__.txt b/lib/iris/tests/results/derived/removed_sigma.__str__.txt index 94e850ec629..c3eaf484833 100644 --- a/lib/iris/tests/results/derived/removed_sigma.__str__.txt +++ b/lib/iris/tests/results/derived/removed_sigma.__str__.txt @@ -12,4 +12,4 @@ air_potential_temperature / (K) (time: 6; model_level_number: 70; grid_latit Scalar coordinates: forecast_period 0.0 hours Attributes: - source Iris test case \ No newline at end of file + source 'Iris test case' \ No newline at end of file diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 3445b089e8e..87270b524c3 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -256,7 +256,8 @@ def test_basic(self): " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", " shape: (10,)", " dtype: int64", - " attributes: {'monty': 'python'}", + " attributes:", + " monty 'python'", ] ) self.assertEqual(result, str(b)) @@ -360,7 +361,8 @@ def test_basic(self): " points: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", " shape: (10,)", " dtype: int64", - " attributes: {'monty': 'python'}", + " attributes:", + " monty 'python'", ] ) self.assertEqual(result, str(b)) diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py index 51e070a2260..4d520ac4149 100644 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py @@ -442,7 +442,8 @@ def test_non_time_values(self): " standard_name: 'height'", " long_name: 'height of detector'", " var_name: 'height'", - " attributes: {'notes': 'Measured from sea level'}", + " attributes:", + " notes 'Measured from sea level'", ] ) self.assertEqual(expected, ancillary_var.__str__()) diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py index 873a257c8e7..0bd66c6e980 100644 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ b/lib/iris/tests/unit/coords/test_CellMeasure.py @@ -103,7 +103,8 @@ def test___str__(self): " standard_name: 'cell_area'", " long_name: 'measured_area'", " var_name: 'area'", - " attributes: {'notes': '1m accuracy'}", + " attributes:", + " notes '1m accuracy'", " measure: 'area'", ] ) diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 9a09c4e807e..fd10a6f2643 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -320,8 +320,11 @@ def test_attributes(self): "empty": [], "None": None, "string": "this", - "long_long_long_long_long_long_name": 3, - "other": "long_long_long_long_long_long_value", + "long_long_long_long_long_name": 3, + "other": ( + "long_long_long_long_long_long_long_long_" + "long_long_long_long_long_long_long_long_value" + ), "float": 4.3, } ) @@ -333,15 +336,19 @@ def test_attributes(self): " shape: (5,)", " dtype: float64", " long_name: 'x'", - # At present, some nasty long lines... + " attributes:", + " array [0. 1. 2. 3. 4. 5. 6.]", + " list [1, 2, 3]", + " empty []", + " None None", + " string 'this'", + " long_long_long_long_long_name 3", ( - " attributes: {'array': array([0., 1., 2., 3., 4., 5., " - "6.]), 'list': [1, 2, 3], 'empty': [], " - "'None': None, 'string': 'this', " - "'long_long_long_long_long_long_name': 3, " - "'other': 'long_long_long_long_long_long_value', " - "'float': 4.3}" + " other " + "'long_long_long_long_long_long_long_long_" + "long_long_long_long_long_long..." ), + " float 4.3", ] self.assertLines(expected, result) @@ -920,7 +927,9 @@ def test_meshcoord(self): " dtype: int64", " standard_name: 'longitude'", " long_name: 'long-name'", - " attributes: {'a': 1, 'b': 'c'}", + " attributes:", + " a 1", + " b 'c'", " axis: 'x'", ] self.assertLines(expected, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index d447957918a..9a81c79d449 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -111,7 +111,8 @@ def test___str__(self): " dtype: int64", " long_name: 'my_face_nodes'", " var_name: 'face_nodes'", - " attributes: {'notes': 'this is a test'}", + " attributes:", + " notes 'this is a test'", " cf_role: 'face_node_connectivity'", " start_index: 1", " location_axis: 1", diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 6bd7ea54662..f39f3706ee1 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -129,7 +129,8 @@ def test___str__(self): " ", " long_name: 'my_topology_mesh'", " var_name: 'mesh'", - " attributes: {'notes': 'this is a test'}", + " attributes:", + " notes 'this is a test'", ] self.assertEqual(expected, str(self.mesh).split("\n")) @@ -427,7 +428,8 @@ def test___str__(self): ), " long_name: 'my_topology_mesh'", " var_name: 'mesh'", - " attributes: {'notes': 'this is a test'}", + " attributes:", + " notes 'this is a test'", ] self.assertEqual(expected, str(self.mesh).split("\n")) @@ -474,7 +476,8 @@ def test___str__noedgecoords(self): ), " long_name: 'my_topology_mesh'", " var_name: 'mesh'", - " attributes: {'notes': 'this is a test'}", + " attributes:", + " notes 'this is a test'", ] self.assertEqual(expected, str(alt_mesh).split("\n")) @@ -696,6 +699,19 @@ def test___str__emptyattributes(self): self.mesh.attributes.clear() self.assertNotIn("attributes", str(self.mesh)) + def test__str__longstringattribute(self): + self.mesh.attributes["long_string"] = ( + "long_x_10_long_x_20_long_x_30_long_x_40_" + "long_x_50_long_x_60_long_x_70_long_x_80_" + ) + result = str(self.mesh) + # Note: initial single-quote, but no final one : this is correct ! + expected = ( + "'long_x_10_long_x_20_long_x_30_long_x_40_" + "long_x_50_long_x_60_long_x_70..." + ) + self.assertIn(expected + ":END", result + ":END") + def test___str__units_stdname(self): # These are usually missing, but they *can* be present. mesh_kwargs = self.kwargs.copy() @@ -713,7 +729,8 @@ def test___str__units_stdname(self): " long_name: 'my_topology_mesh'", " var_name: 'mesh'", " units: Unit('m')", - " attributes: {'notes': 'this is a test'}", + " attributes:", + " notes 'this is a test'", ] ) self.assertTrue(result.endswith(expected)) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index 8f4222ca7bd..ce99a8b4be5 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -300,7 +300,8 @@ def _expected_elements_regexp( if not matched_any_upto: regexp += ".*" matched_any_upto = True - regexp += "attributes: {[^}]*}\n *" + # match 'attributes:' followed by N*lines with larger indent + regexp += "attributes:(\n [^ \n]+ +[^ \n]+)+\n " # After those items, expect 'axis' next # N.B. this FAILS if we had attributes when we didn't expect them regexp += f"axis: '{axis}'$" # N.B. this is always the end diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py index 8370c719f03..40a932b9e0c 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py @@ -142,7 +142,7 @@ def test_columns_long_attribute(self): " Attributes:", ( " very_very_very_very_very_long_name " - "longish string extends beyond dim columns" + "'longish string extends beyond dim columns'" ), ] self.assertEqual(rep, expected) @@ -442,7 +442,7 @@ def test_section_cube_attributes(self): " Attributes:", " list [3]", " number 1.2", - " string four five in a string", + " string 'four five in a string'", " z_tupular (6, (7, 8))", ] self.assertEqual(rep, expected) @@ -464,7 +464,7 @@ def test_section_cube_attributes__string_extras(self): " Attributes:", " escaped 'escaped\\tstring'", ( - " long this is very very very " + " long 'this is very very very " "very very very very very very very very very very..." ), ( diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index c8af3437e66..3e411c020dd 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -183,7 +183,11 @@ def test_attributes(self): attribute_section = rep.scalar_sections["Attributes:"] attribute_contents = attribute_section.contents - expected_contents = ["a: 1", "b: two", "c: ' this \\n that\\tand.'"] + expected_contents = [ + "a: 1", + "b: 'two'", + "c: ' this \\n that\\tand.'", + ] # Note: a string with \n or \t in it gets "repr-d". # Other strings don't (though in coord 'extra' lines, they do.) From 29407a1d03950f0f9f5dd7cf8ccee3bbee856f0b Mon Sep 17 00:00:00 2001 From: Bill Little Date: Thu, 27 Jan 2022 18:13:45 +0000 Subject: [PATCH 30/69] docs: fix docs/conf.py plot_gallery warning (#4539) --- docs/src/conf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index 5a436f86cbe..39fd6dc3c96 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -329,8 +329,10 @@ def _dotv(version): "gallery_dirs": ["generated/gallery"], # filename pattern for the files in the gallery "filename_pattern": "/plot_", - # filename patternt to ignore in the gallery + # filename pattern to ignore in the gallery "ignore_pattern": r"__init__\.py", + # force gallery building, unless overridden (see src/Makefile) + "plot_gallery": "'True'", } # ----------------------------------------------------------------------------- From 831a76795baac80fcea55705993cc41d50ee1d64 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Thu, 27 Jan 2022 18:37:38 +0000 Subject: [PATCH 31/69] Update latest.rst (#4550) --- docs/src/whatsnew/latest.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 7c1fd330755..31b22f499a0 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -325,8 +325,8 @@ This document explains the changes made to Iris for this release #. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) -#. `@aaronspring `_ exchanged `dask` with - `dask-core` in testing environments reducing the number of dependencies +#. `@aaronspring `_ exchanged ``dask`` with + ``dask-core`` in testing environments reducing the number of dependencies installed for testing. (:pull:`4434`) #. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, From 224f1228b0532d0f0da861f1605c420153750d78 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Fri, 28 Jan 2022 13:30:11 +0000 Subject: [PATCH 32/69] Mesh Documentation (#4504) * UGRID docs stub pages. * UGRID docs index page. * Fleshed out UGRID docs stubs. * Correct UGRID docs linkage. * Minor adjustments to ugrid/index.rst. * UGRID data model explanation. * UGRID partner packages docs page. * Detail on Iris UGRID data model. * UGRID data model docs tweaks. * Reduced emphasis on UGRID in docs. * UGRID partner packages rephrasing. * Remove UGRID partner package comment. * Reduced emphasis on UGRID in docs. * Comment about UGRID data model diagrams. * Start documenting UGRID operations examples. * Improved linking between UGRID doc pages. * UGRID docs review actions. * Example UGRID operations. * More UGRID operations documenting. * UGRID docs review actions from @pp-mo. * UGRID docs review actions from @wjbenfold. * Minor UGRID docs corrections. * UGRID docs updates for new Connectivity names. * More example UGRID operations. * UGRID region extraction example tweaks. * UGRID docs review actions from @lbdreyer. * Second round UGRID docs review actions from @wjbenfold. * UGRID docs review action from @stephenworsley. * Add UGRID Operations Summary subheading. * Mention the unstructured dimension more in UGRID docs. * UGRID docs plotting example. * UGRID docs tidy-up. * UGRID docs plotting example. * UGRID docs lat-lon switcheroo. * UGRID docs image fix. * UGRID data model MeshCoord clarification. * Corrected UGRID docs example coords. * UGRID docs more Mesh and Cube printouts. * UGRID docs be more explicit about region extraction. * UGRID docs be more generic about low level languages. * UGRID docs cubed-sphere example explicitly 2-dimensional. * Make code-examples copybutton respect line continuations. * Sphinx copybutton fix. Co-authored-by: Patrick Peglar --- docs/src/common_links.inc | 1 + docs/src/conf.py | 4 +- docs/src/further_topics/index.rst | 1 + docs/src/further_topics/ugrid/data_model.rst | 566 ++++++++++ .../ugrid/images/data_structured_grid.svg | 1 + .../ugrid/images/data_ugrid_mesh.svg | 1 + .../ugrid/images/geovistalogo.svg | 573 ++++++++++ .../ugrid/images/iris-esmf-regrid.svg | 93 ++ .../ugrid/images/plotting_basic.png | Bin 0 -> 42338 bytes .../ugrid/images/plotting_global.png | Bin 0 -> 133531 bytes .../ugrid/images/ugrid_edge_data.svg | 1 + .../ugrid/images/ugrid_element_centres.svg | 1 + .../ugrid/images/ugrid_node_independence.svg | 1 + .../ugrid/images/ugrid_variable_faces.svg | 1 + docs/src/further_topics/ugrid/index.rst | 54 + docs/src/further_topics/ugrid/operations.rst | 995 ++++++++++++++++++ .../further_topics/ugrid/partner_packages.rst | 100 ++ docs/src/index.rst | 1 + docs/src/whatsnew/latest.rst | 19 +- 19 files changed, 2403 insertions(+), 10 deletions(-) create mode 100644 docs/src/further_topics/ugrid/data_model.rst create mode 100644 docs/src/further_topics/ugrid/images/data_structured_grid.svg create mode 100644 docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg create mode 100644 docs/src/further_topics/ugrid/images/geovistalogo.svg create mode 100644 docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg create mode 100644 docs/src/further_topics/ugrid/images/plotting_basic.png create mode 100644 docs/src/further_topics/ugrid/images/plotting_global.png create mode 100644 docs/src/further_topics/ugrid/images/ugrid_edge_data.svg create mode 100644 docs/src/further_topics/ugrid/images/ugrid_element_centres.svg create mode 100644 docs/src/further_topics/ugrid/images/ugrid_node_independence.svg create mode 100644 docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg create mode 100644 docs/src/further_topics/ugrid/index.rst create mode 100644 docs/src/further_topics/ugrid/operations.rst create mode 100644 docs/src/further_topics/ugrid/partner_packages.rst diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index eb1ea60b7aa..67fc493e3e5 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -37,6 +37,7 @@ .. _test-iris-imagehash: https://github.com/SciTools/test-iris-imagehash .. _using git: https://docs.github.com/en/github/using-git .. _requirements/ci/: https://github.com/SciTools/iris/tree/main/requirements/ci +.. _CF-UGRID: https://ugrid-conventions.github.io/ugrid-conventions/ .. comment diff --git a/docs/src/conf.py b/docs/src/conf.py index 39fd6dc3c96..19f22e808f6 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -200,7 +200,9 @@ def _dotv(version): # -- copybutton extension ----------------------------------------------------- # See https://sphinx-copybutton.readthedocs.io/en/latest/ -copybutton_prompt_text = ">>> " +copybutton_prompt_text = r">>> |\.\.\. " +copybutton_prompt_is_regexp = True +copybutton_line_continuation_character = "\\" # sphinx.ext.todo configuration ----------------------------------------------- # See https://www.sphinx-doc.org/en/master/usage/extensions/todo.html diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst index ef6436b0efa..81bff2f7641 100644 --- a/docs/src/further_topics/index.rst +++ b/docs/src/further_topics/index.rst @@ -20,6 +20,7 @@ that may be of interest to the more advanced or curious user. * :doc:`metadata` * :doc:`lenient_metadata` * :doc:`lenient_maths` +* :ref:`ugrid` .. _SciTools/iris: https://github.com/SciTools/iris diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst new file mode 100644 index 00000000000..4a2f64f6279 --- /dev/null +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -0,0 +1,566 @@ +.. include:: ../../common_links.inc + +.. _ugrid model: + +The Mesh Data Model +******************* + +.. important:: + + This page is intended to summarise the essentials that Iris users need + to know about meshes. For exhaustive details on UGRID itself: + `visit the official UGRID conventions site`__. + +Evolution, not revolution +========================= +Mesh support has been designed wherever possible to fit within the existing +Iris model. Meshes concern only the spatial geography of data, and can +optionally be limited to just the horizontal geography (e.g. X and Y). Other +dimensions such as time or ensemble member (and often vertical levels) +retain their familiar structured format. + +The UGRID conventions themselves are designed as an addition to the existing CF +conventions, which are at the core of Iris' philosophy. + +What's Different? +================= + +The mesh format represents data's geography using an **unstructured +mesh**. This has significant pros and cons when compared to a structured grid. + +.. contents:: + :local: + +The Detail +---------- +.. + The diagram images are SVG's, so editable by any graphical software + (e.g. Inkscape). They were originally made in MS PowerPoint. + + Uses the IBM Colour Blind Palette (see + http://ibm-design-language.eu-de.mybluemix.net/design/language/resources/color-library + ) + +Structured Grids (the old world) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Assigning data to locations using a structured grid is essentially an act of +matching coordinate arrays to each dimension of the data array. The data can +also be represented as an area (instead of a point) by including a bounds array +for each coordinate array. :numref:`data_structured_grid` visualises an +example. + +.. _data_structured_grid: +.. figure:: images/data_structured_grid.svg + :alt: Diagram of how data is represented on a structured grid + :align: right + :width: 1280 + + Data on a structured grid. + + 1D coordinate arrays (pink circles) are combined to construct a structured + grid of points (pink crosses). 2D bounds arrays (blue circles) can also be + used to describe the 1D boundaries (blue lines) at either side of each + rank of points; each point therefore having four bounds (x+y, upper+lower), + together describing a quadrilateral area around that point. Data from the + 2D data array (orange circles) can be assigned to these point locations + (orange diamonds) or area locations (orange quads) by matching the relative + positions in the data array to the relative spatial positions - see the + black outlined shapes as examples of this in action. + +Unstructured Meshes (the new world) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +A mesh is made up of different types of **element**: + +.. list-table:: + :widths: 15, 15, 70 + + * - 0D + - ``node`` + - The 'core' of the mesh. A point position in space, constructed from + 2 or 3 coordinates (2D or 3D space). + * - 1D + - ``edge`` + - Constructed by connecting 2 nodes. + * - 2D + - ``face`` + - Constructed by connecting 3 or more nodes. + * - 3D + - ``volume`` + - Constructed by connecting 4 or more nodes (which must each have 3 + coordinates - 3D space). + +Every node in the mesh is defined by indexing the 1-dimensional X and Y (and +optionally Z) coordinate arrays (the ``node_coordinates``) - e.g. +``(x[3], y[3])`` gives the position of the fourth node. Note that this means +each node has its own coordinates, independent of every other node. + +Any higher dimensional element - an edge/face/volume - is described by a +sequence of the indices of the nodes that make up that element. E.g. a +triangular face made from connecting the first, third and fourth nodes: +``[0, 2, 3]``. These 1D sequences combine into a 2D array enumerating **all** +the elements of that type - edge/face/volume - called a **connectivity**. +E.g. we could make a mesh of 4 nodes, with 2 triangles described using this +``face_node_connectivity``: ``[[0, 2, 3], [3, 2, 1]]`` (note the shared nodes). + +.. note:: More on Connectivities: + + * The element type described by a connectivity is known as its + **location**; ``edge`` in ``edge_node_connectivity``. + * According to the UGRID conventions, the nodes in a face should be + listed in "anti-clockwise order from above". + * Connectivities also exist to connect the higher dimensional elements, + e.g. ``face_edge_connectivity``. These are optional conveniences to + speed up certain operations and will not be discussed here. + +.. important:: + + **Meshes are unstructured**. The mesh elements - represented in the + coordinate and connectivity arrays detailed above - are enumerated + along a single **unstructured dimension**. An element's position along + this dimension has nothing to do with its spatial position. + +A data variable associated with a mesh has a **location** of either ``node``, +``edge``, ``face`` or ``volume``. The data is stored in a 1D array with one +datum per element, matched to its element by matching the datum index with the +coordinate or connectivity index along the **unstructured dimension**. So for +an example data array called ``foo``: +``foo[3]`` would be at position ``(x[3], y[3])`` if it were node-located, or at +``faces[3]`` if it were face-located. :numref:`data_ugrid_mesh` visualises an +example of what is described above. + +.. _data_ugrid_mesh: +.. figure:: images/data_ugrid_mesh.svg + :alt: Diagram of how data is represented on an unstructured mesh + :align: right + :width: 1280 + + Data on an unstructured mesh + + 1D coordinate arrays (pink circles) describe node positions in space (pink + crosses). A 2D connectivity array (blue circles) describes faces by + connecting four nodes - by referencing their indices - into a face outline + (blue outlines on the map). Data from the 1D data array (orange circles) + can be assigned to these node locations (orange diamonds) or face locations + (orange quads) by matching the indices in the data array to the indices in + the coordinate arrays (for nodes) or connectivity array (for faces). See + the black outlined shapes as examples of index matching in action, and the + black stippled shapes to demonstrate that relative array position confers + no relative spatial information. + +---- + +The mesh model also supports edges/faces/volumes having associated 'centre' +coordinates - to allow point data to be assigned to these elements. 'Centre' is +just a convenience term - the points can exist anywhere within their respective +elements. See :numref:`ugrid_element_centres` for a visualised example. + +.. _ugrid_element_centres: +.. figure:: images/ugrid_element_centres.svg + :alt: Diagram demonstrating mesh face-centred data. + :align: right + :width: 1280 + + Data can be assigned to mesh edge/face/volume 'centres' + + 1D *node* coordinate arrays (pink circles) describe node positions in + space (pink crosses). A 2D connectivity array (blue circles) describes + faces by connecting four nodes into a face outline (blue outlines on the + map). Further 1D *face* coordinate arrays (pink circles) describe a + 'centre' point position (pink stars) for each face enumerated in the + connectivity array. + +Mesh Flexibility +++++++++++++++++ +Above we have seen how one could replicate data on a structured grid using +a mesh instead. But the utility of a mesh is the extra flexibility it offers. +Here are the main examples: + +Every node is completely independent - every one can have unique X andY (and Z) coordinate values. See :numref:`ugrid_node_independence`. + +.. _ugrid_node_independence: +.. figure:: images/ugrid_node_independence.svg + :alt: Diagram demonstrating the independence of each mesh node + :align: right + :width: 300 + + Every mesh node is completely independent + + The same array shape and structure used to describe the node positions + (pink crosses) in a regular grid (left-hand maps) is equally able to + describe **any** position for these nodes (e.g. the right-hand maps), + simply by changing the array values. The quadrilateral faces (blue + outlines) can therefore be given any quadrilateral shape by re-positioning + their constituent nodes. + +Faces and volumes can have variable node counts, i.e. different numbers of +sides. This is achieved by masking the unused 'slots' in the connectivity +array. See :numref:`ugrid_variable_faces`. + +.. _ugrid_variable_faces: +.. figure:: images/ugrid_variable_faces.svg + :alt: Diagram demonstrating mesh faces with variable node counts + :align: right + :width: 300 + + Mesh faces can have different node counts (using masking) + + The 2D connectivity array (blue circles) describes faces by connecting + nodes (pink crosses) to make up a face (blue outlines). The faces can use + different numbers of nodes by shaping the connectivity array to accommodate + the face with the most nodes, then masking unused node 'slots' + (black circles) for faces with fewer nodes than the maximum. + +Data can be assigned to lines (edges) just as easily as points (nodes) or +areas (faces). See :numref:`ugrid_edge_data`. + +.. _ugrid_edge_data: +.. figure:: images/ugrid_edge_data.svg + :alt: Diagram demonstrating data assigned to mesh edges + :align: right + :width: 300 + + Data can be assigned to mesh edges + + The 2D connectivity array (blue circles) describes edges by connecting 2 + nodes (pink crosses) to make up an edge (blue lines). Data can be assigned + to the edges (orange lines) by matching the indices of the 1D data array + (not shown) to the indices in the connectivity array. + +.. _ugrid implications: + +What does this mean? +-------------------- +Meshes can represent much more varied spatial arrangements +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The highly specific way of recording position (geometry) and shape +(topology) allows meshes to represent essentially **any** spatial arrangement +of data. There are therefore many new applications that aren't possible using a +structured grid, including: + +* `The UK Met Office's LFRic cubed-sphere `_ +* `Oceanic model outputs `_ + +.. todo: + a third example! + +Mesh 'payload' is much larger than with structured grids +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Coordinates are recorded per-node, and connectivities are recorded per-element. +This is opposed to a structured grid, where a single coordinate value is shared +by every data point/area along that line. + +For example: representing the surface of a cubed-sphere using a mesh leads to +coordinates and connectivities being **~8 times larger than the data itself**, +as opposed to a small fraction of the data size when dividing a spherical +surface using a structured grid of longitudes and latitudes. + +This further increases the emphasis on lazy loading and processing of data +using packages such as Dask. + +.. note:: + + The large, 1D data arrays associated with meshes are a very different + shape to what Iris users and developers are used to. It is suspected + that optimal performance will need new chunking strategies, but at time + of writing (``Jan 2022``) experience is still limited. + +.. todo: + Revisit when we have more information. + +Spatial operations on mesh data are more complex +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Detail: :doc:`operations` + +Indexing a mesh data array cannot be used for: + +#. Region selection +#. Neighbour identification + +This is because - unlike with a structured data array - relative position in +a mesh's 1-dimensional data arrays has no relation to relative position in +space. We must instead perform specialised operations using the information in +the mesh's connectivities, or by translating the mesh into a format designed +for mesh analysis such as VTK. + +Such calculations can still be optimised to avoid them slowing workflows, but +the important take-away here is that **adaptation is needed when working mesh +data**. + + +How Iris Represents This +======================== + +.. + Include API links to the various classes + + Include Cube/Mesh printout(s) + +.. seealso:: + + Remember this is a prose summary. Precise documentation is at: + :mod:`iris.experimental.ugrid`. + +.. note:: + + At time of writing (``Jan 2022``), neither 3D meshes nor 3D elements + (volumes) are supported. + +The Basics +---------- +The Iris :class:`~iris.cube.Cube` has several new members: + +* | :attr:`~iris.cube.Cube.mesh` + | The :class:`iris.experimental.ugrid.Mesh` that describes the + :class:`~iris.cube.Cube`\'s horizontal geography. +* | :attr:`~iris.cube.Cube.location` + | ``node``/``edge``/``face`` - the mesh element type with which this + :class:`~iris.cube.Cube`\'s :attr:`~iris.cube.Cube.data` is associated. +* | :meth:`~iris.cube.Cube.mesh_dim` + | The :class:`~iris.cube.Cube`\'s **unstructured dimension** - the one that + indexes over the horizontal :attr:`~iris.cube.Cube.data` positions. + +These members will all be ``None`` for a :class:`~iris.cube.Cube` with no +associated :class:`~iris.experimental.ugrid.Mesh`. + +This :class:`~iris.cube.Cube`\'s unstructured dimension has multiple attached +:class:`iris.experimental.ugrid.MeshCoord`\s (one for each axis e.g. +``x``/``y``), which can be used to infer the points and bounds of any index on +the :class:`~iris.cube.Cube`\'s unstructured dimension. + +.. testsetup:: ugrid_summaries + + import numpy as np + + from iris.coords import AuxCoord, DimCoord + from iris.cube import Cube + from iris.experimental.ugrid import Connectivity, Mesh + + node_x = AuxCoord( + points=[0.0, 5.0, 0.0, 5.0, 8.0], + standard_name="longitude", + units="degrees_east", + ) + node_y = AuxCoord( + points=[3.0, 3.0, 0.0, 0.0, 0.0], + standard_name="latitude", + units="degrees_north", + ) + + edge_node_c = Connectivity( + indices=[[0, 1], [0, 2], [1, 3], [1, 4], [2, 3], [3, 4]], + cf_role="edge_node_connectivity", + ) + + face_indices = np.ma.masked_equal([[0, 1, 3, 2], [1, 4, 3, 999]], 999) + face_node_c = Connectivity( + indices=face_indices, cf_role="face_node_connectivity" + ) + + def centre_coords(conn): + indexing = np.ma.filled(conn.indices, 0) + x, y = [ + AuxCoord( + node_coord.points[indexing].mean(axis=conn.connected_axis), + node_coord.standard_name, + units=node_coord.units, + ) + for node_coord in (node_x, node_y) + ] + return [(x, "x"), (y, "y")] + + my_mesh = Mesh( + long_name="my_mesh", + topology_dimension=2, + node_coords_and_axes=[(node_x, "x"), (node_y, "y")], + connectivities=[edge_node_c, face_node_c], + edge_coords_and_axes=centre_coords(edge_node_c), + face_coords_and_axes=centre_coords(face_node_c), + ) + + vertical_levels = DimCoord([0, 1, 2], "height") + + def location_cube(conn): + location = conn.location + mesh_coord_x, mesh_coord_y = my_mesh.to_MeshCoords(location) + data_shape = (conn.shape[conn.location_axis], len(vertical_levels.points)) + data_array = np.arange(np.prod(data_shape)).reshape(data_shape) + + return Cube( + data=data_array, + long_name=f"{location}_data", + units="K", + dim_coords_and_dims=[(vertical_levels, 1)], + aux_coords_and_dims=[(mesh_coord_x, 0), (mesh_coord_y, 0)], + ) + + edge_cube = location_cube(edge_node_c) + face_cube = location_cube(face_node_c) + +.. doctest:: ugrid_summaries + + >>> print(edge_cube) + edge_data / (K) (-- : 6; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + + >>> print(edge_cube.location) + edge + + >>> print(edge_cube.mesh_dim()) + 0 + + >>> print(edge_cube.mesh.summary(shorten=True)) + + +The Detail +---------- +How UGRID information is stored +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +* | :class:`iris.experimental.ugrid.Mesh` + | Contains all information about the mesh. + | Includes: + + * | :attr:`~iris.experimental.ugrid.Mesh.topology_dimension` + | The maximum dimensionality of shape (1D=edge, 2D=face) supported + by this :class:`~iris.experimental.ugrid.Mesh`. Determines which + :class:`~iris.experimental.ugrid.Connectivity`\s are required/optional + (see below). + + * 1-3 collections of :class:`iris.coords.AuxCoord`\s: + + * | **Required**: :attr:`~iris.experimental.ugrid.Mesh.node_coords` + | The nodes that are the basis for the mesh. + * | Optional: :attr:`~iris.experimental.ugrid.Mesh.edge_coords`, + :attr:`~iris.experimental.ugrid.Mesh.face_coords` + | For indicating the 'centres' of the edges/faces. + | **NOTE:** generating a :class:`~iris.experimental.ugrid.MeshCoord` from + a :class:`~iris.experimental.ugrid.Mesh` currently (``Jan 2022``) + requires centre coordinates for the given ``location``; to be rectified + in future. + + * 1 or more :class:`iris.experimental.ugrid.Connectivity`\s: + + * | **Required for 1D (edge) elements**: + :attr:`~iris.experimental.ugrid.Mesh.edge_node_connectivity` + | Define the edges by connecting nodes. + * | **Required for 2D (face) elements**: + :attr:`~iris.experimental.ugrid.Mesh.face_node_connectivity` + | Define the faces by connecting nodes. + * Optional: any other connectivity type. See + :attr:`iris.experimental.ugrid.mesh.Connectivity.UGRID_CF_ROLES` for the + full list of types. + +.. doctest:: ugrid_summaries + + >>> print(edge_cube.mesh) + Mesh : 'my_mesh' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + + + edge + edge_dimension: 'Mesh2d_edge' + edge_node_connectivity: + edge coordinates + + + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: + face coordinates + + + long_name: 'my_mesh' + +* | :class:`iris.experimental.ugrid.MeshCoord` + | Described in detail in `MeshCoords`_. + | Stores the following information: + + * | :attr:`~iris.experimental.ugrid.MeshCoord.mesh` + | The :class:`~iris.experimental.ugrid.Mesh` associated with this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.mesh` attribute of any :class:`~iris.cube.Cube` + this :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_) + + * | :attr:`~iris.experimental.ugrid.MeshCoord.location` + | ``node``/``edge``/``face`` - the element detailed by this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.location` attribute of any + :class:`~iris.cube.Cube` this + :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_). + +.. _ugrid MeshCoords: + +MeshCoords +~~~~~~~~~~ +Links a :class:`~iris.cube.Cube` to a :class:`~iris.experimental.ugrid.Mesh` by +attaching to the :class:`~iris.cube.Cube`\'s unstructured dimension, in the +same way that all :class:`~iris.coords.Coord`\s attach to +:class:`~iris.cube.Cube` dimensions. This allows a single +:class:`~iris.cube.Cube` to have a combination of unstructured and structured +dimensions (e.g. horizontal mesh plus vertical levels and a time series), +using the same logic for every dimension. + +:class:`~iris.experimental.ugrid.MeshCoord`\s are instantiated using a given +:class:`~iris.experimental.ugrid.Mesh`, ``location`` +("node"/"edge"/"face") and ``axis``. The process interprets the +:class:`~iris.experimental.ugrid.Mesh`\'s +:attr:`~iris.experimental.ugrid.Mesh.node_coords` and if appropriate the +:attr:`~iris.experimental.ugrid.Mesh.edge_node_connectivity`/ +:attr:`~iris.experimental.ugrid.Mesh.face_node_connectivity` and +:attr:`~iris.experimental.ugrid.Mesh.edge_coords`/ +:attr:`~iris.experimental.ugrid.Mesh.face_coords` +to produce a :class:`~iris.coords.Coord` +:attr:`~iris.coords.Coord.points` and :attr:`~iris.coords.Coord.bounds` +representation of all the :class:`~iris.experimental.ugrid.Mesh`\'s +nodes/edges/faces for the given axis. + +The method :meth:`iris.experimental.ugrid.Mesh.to_MeshCoords` is available to +create a :class:`~iris.experimental.ugrid.MeshCoord` for +every axis represented by that :class:`~iris.experimental.ugrid.Mesh`, +given only the ``location`` argument + +.. doctest:: ugrid_summaries + + >>> for coord in edge_cube.coords(mesh_coords=True): + ... print(coord) + MeshCoord : latitude / (degrees_north) + mesh: + location: 'edge' + points: [3. , 1.5, 1.5, 1.5, 0. , 0. ] + bounds: [ + [3., 3.], + [3., 0.], + [3., 0.], + [3., 0.], + [0., 0.], + [0., 0.]] + shape: (6,) bounds(6, 2) + dtype: float64 + standard_name: 'latitude' + axis: 'y' + MeshCoord : longitude / (degrees_east) + mesh: + location: 'edge' + points: [2.5, 0. , 5. , 6.5, 2.5, 6.5] + bounds: [ + [0., 5.], + [0., 0.], + [5., 5.], + [5., 8.], + [0., 5.], + [5., 8.]] + shape: (6,) bounds(6, 2) + dtype: float64 + standard_name: 'longitude' + axis: 'x' + + +__ CF-UGRID_ \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/data_structured_grid.svg b/docs/src/further_topics/ugrid/images/data_structured_grid.svg new file mode 100644 index 00000000000..2f3a1ce342a --- /dev/null +++ b/docs/src/further_topics/ugrid/images/data_structured_grid.svg @@ -0,0 +1 @@ +23, 28-19,-21101525-5-15-20-30xyCoordinate ArraysxyCoordinate Arrays23, 28-19, -21xyBounds Arraysderive point locationsassign data using dimensional indices,position in array == relative spatial positionderive area locations & shapesPoint DataArea DataData Array(bounded coordsalways have points too)my_variable* x+yare not lons+lats, just a demonstration! \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg b/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg new file mode 100644 index 00000000000..ab7302346b7 --- /dev/null +++ b/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg @@ -0,0 +1 @@ +5, 7, 8, 14`xy1212`node_coordinates`every node has its own x + y coordinatesderive node locations1515xy`node_coordinates`[5][7][8][14]construct faces by connecting nodesderive ‘corner’ node locationsassign data using 1D indexing,position in array unrelated to spatial positionmatch indices with facesmatch indices with nodesNode DataFace Data12Data Arraymy_variable12 ×4`face_node_connectivity`face_nodes \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/geovistalogo.svg b/docs/src/further_topics/ugrid/images/geovistalogo.svg new file mode 100644 index 00000000000..4c68f0ee3ff --- /dev/null +++ b/docs/src/further_topics/ugrid/images/geovistalogo.svg @@ -0,0 +1,573 @@ + + + + + + + + + + + + + + + + + + + + + + + + Cartographic rendering and mesh analytics powered by PyVista. + GeoVista + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + GeoVista + + \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg b/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg new file mode 100644 index 00000000000..e70a9386a7f --- /dev/null +++ b/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg @@ -0,0 +1,93 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + Iris + + diff --git a/docs/src/further_topics/ugrid/images/plotting_basic.png b/docs/src/further_topics/ugrid/images/plotting_basic.png new file mode 100644 index 0000000000000000000000000000000000000000..ba2b0b3329d8930508ece378d1317940f964f58e GIT binary patch literal 42338 zcmXV1c_38p*MIKJFf-Q4PGsytmXIYeiR@d1ETJS(*`tNI3R$ugMN-C6*|J3_WuyhA zkZ2)GNMxyOVP@XP_xH{pW$xVjEay4rbI$pkbCc|C&A2(lH~;|L7Um`g0KmaNv4DVq z|7m}^=mx;u?G`3`oucw)ht4+3nVwx`>92e{|EkXK7cRP=m=@pL#6QKyo zna?I$f~~av#b|ffiBMA8pS8<-7hb%iE@BdoXcdlOEzp=gOwvuWM@@fA@2C@J-%dgM;rKhqg*c2-0b$DMG1fY5Q5XF36v2{=I+Z*sN|~ zzlyT3de|hF^6TwBzP`l|9J4hI3S_43FB&uV-;D`3Vl)W&YbX4meSysEt4HJMiV6BS|4 zq~jDr;`n~dau!AQ-S}Dh^VK_TJ-wUfCg1$gLkv>b5qlSy{^(${sn_@!^%;q5$KoG%%rL)VY%=-E=$@rf@BSi+sp zs?5c^6w*I@Qg32<2`jH{q}`^J+Gu*;83=za5k^-$%@JQ-{Ri>Q6)9v-oCyj z?+{zPpRUn7q*EDhkzLe(9-%TA(iUv?$&I#Y(@vTa)`O1rE!tsp(@g zwhjv?f4Od^8XcEBhNFlU4QR+#Q%=YN z`z!{bkoqG+9m&A12}I|4>XY8@sXBbUVqZRJAB}PEIg7nUL_IK0fhwxmBHEwX$zmOd zI1a=P|KUFC!yU`i0a&s?J9ThSws&RCu<=))rVRORAHVH(dzp77~P-_Mg_%!A5b1>w0K%Q9l=Og17VWXKwnY1)BYUdDQ~ zsk)X{-Q?xq{?CDh?=@}9<>r1JdpYo~eH!t9c3^nzh>85KNg$ZbRTereyTT7fEqF*q zY04Mdk&F}^?a8&+XMZfj6F}OBI8l1$bp=F4-%R2`8`G)KY8_M`}l?LgdH|&dmi@AJ?h=r z`?t@How$D|r{1RcW{T1JVAXy^wTCM8fniBCzNWNv!n#gBqvOWYw_j8*b8+w9vfWgf zCHs0kQ;jH9IK_ffB;Fa^m6jha86aKcJ)ym7Vv$Nxr@_SqIyCH13$!ias z)#lF0H=lpR98{kUqUd3}3*8hXJ0IZ7O5apP@XWE+yr{7`(7!xo zGgZACu72E6z}*j@T^%4p`u7(4K9yn%HpcrRHtNFpn9lkw_5%%$vK$l^(X z?wEFjW`gP7(OEexTS(2;y9IL0mgT3@nC{uhs}DnY`M|!R6PZ)I%zZP0zlI(~U`GlS zWL1qHKQ@_6)4_EnN);=uZQeyZ+U}Bcv}MN6S!^1%;o* zDQ%YLGOUS(zVvKy+9;0V)Q%Ls+{d;;xb)&p6k$?4iRn4wuxRA@;3KbPeCK)VM^2S; zZi{3zJ#;?7=TGVNJS1^z*3))GOclp>ww`hbV$B>e&7L!kpb4bP`4}nH39Q?xOq+zi zw>>xR^JsSg-TJpJQhtELhn+67#y(N89&8r*)3)PjLuB;eO?1N# zFkxw!6|zF_?!KMOM5dM!65vrSa(6n=s;>N+Q21U!`)5tReHM)>HVQPGM95*%>KEZN zJ2;-~A2u`OVHIVhaj|s#MwPRcucg1KsTK2g{y~w|X;gEvAAep*kv~Ye7!ga3BO?Uf9;&P;NdVyk;kb zbkBtHbk7-6a$ljoE5R?JB7H?%e=07>;Surkylr?*j4)6bt6{r2#19FyHw&3_ zRdOrk;HS9w(BN=|wQLz?F({<&#n{`M7f|DHbdx%E{{xYKHbFi`{4L^JT0yT8r&RCq zcex7U_Su4qNzA1!R1P9~zmqZcoRc9V3ns#k+oCvnWr?S*a`!-sQs6|xJ{|}WgzMys zDNIh&4GR<29i`tXoZ5?cYprS#|ER?j%!#q}54L+OT7o*m3-5Zj2TS#Kb}nIGG#XI` z)Ul+Cppe!3ziBuaV~cWD*XU<>vdT`|} zrFprf-3oi43tT*lQZ%q5#p|~0;}&SnGimeBT;v=)frp4@4<<3)udHT1PlR5V?l#z!jiWiY zT61QAESck&_+{shpew5(LChZH?i%=MKjFEO`#%IN5^)l7ZryzPf2tO6;g-gsFzGwTlj!kqnEA; zPq#s2D4HWL+6`lf-GUSz(k?y_q~ca*PUyL?DEX3+bo3hNyL*cWdRq91=%qmwacRW= z&Ltw{S{~Qzqj3Ob4pnvhgkE?4l-kejN0P$SBygE0r zvcdv<##~G1{&XSNLWK!OJZa=D24`tCQ%1^jUgxcU6tXhH!&Z%wrX-&&Xz{z2xc&MQ zeta$?KG)1kxajg~ zt+WEYK$GXT!zkL=RNWL6+17bd#QYwj5sH$x7&TQss~k2CF=}JgPybpl+AjV)<1H>P zm%BTTd9}|f?FW|Wj5K+_x^gus$-*at=v&YBMh|SfY^b6t?PoF5R_>E_C&Z3YMJ4D= z{qJ?WPcpG81}n{lTblEDp-*3Z6K{d);uB47qJGAd8=XiFZehUwt1yr_yV?@IXr!kn z$k-kt%EiK>1&Vn8KUba>EV}%A9`^nr-;+(h+W9V4=z6o7YORgUN|8~**Y|Gc6}sCe z)$-ljObgD)0>kZF7%oZcoE zNU%%5suw@wjmj$q7pq9#i`i*v!a^hZXU?=Ek6+2cuw%hfx2+^L=?3%4&mvaT>hPL* z%dH-&c^eB{9Cy;BL_iok5`+#sp!1x;_;$<+-jk#U;tr@Pi7_s_jDC<%eM@M#I7-D; zb%@h_HM*0tX4oPgny%J4+9{pW^d1nI1Y2yX)Zx0!!JyzrmVY5DDXHW0!{YQ%c2|4A(X8(jfiCin}HqzVC?YomMOZi(Xk84K^OjRcGN*0ZBaqX$ht<9={ z;!~P1LcbR4+dXfaO0pKn#hG8zL_&e&RiGOX9)IVtaVA1u%;-xDM~G;)>aOxDgdy96c|tIbS+Mf;xBfnK*Je`fD? z=E-nc(T?(D8&B-b1Qx&JfF9~)4m@PdSf$j$2s74c9v1%IEhwoYA%l_usrI3>WIyZy z@te5nYgm(e_ts@(WHK{n()RO=?BoNNO(IyF->CM}|NH`Cj=bB?5=aGv0}>xFsxUk- z8LVo_q%_U=^3nIgr^;Qb(##s$kvvnAi5pIfRa}hiOa!&>_<-?wOPtzwVP2l2q5>!O zO_p*pARt+s9wG}WUF-ZEcCNi&8~F5jET5jh+U?E?>+E&bdZ=w|MgU~r-HEgj5#ed7rTH;TQ&8)Q5)D;A;z&L2DZL;R z3}k@1_XcanS0#bIG6%z^7uXfCiu+PuSvKE(sQH+u`Pro^wL%MYD#QSS$cVsL*^QSy z)YE@#ep1!--C0=vefu_kAQI7H-nzkaoSD7k30d?W#ptF;V+L28J~5*cPSw`dHV)<> zkbSi6D@255`d9 zd;LG&)0^p1-+0{*MX;d0!Arm4d!8ba!ZvZ>2D@n{XewxV+j+$Bqux%Uw`V9H*_dY} z?c3Cp|JQ+Thp_%zKF>mq^P>w1EDPYuW}ePRb`lupqTGnzHGOtWoaWErqj5G^0jY)# ztP_#`Z>6&k2=P5FQhX?##P}hNP$F)_p9BXmmI=&|Ow_Qk!7yUPoCgMgk9|J%6#;a*6D>FTCT?yby#?H|kcGz#|p0h8!rwHoLU=?g^Y@YtQduwB~)X3dH z_^k%E8e=hOb}cAM6KQ(|*||E-RK>DKy-qrK^A zA!r=(%~uS~2Vr-wJOpi_V3bPV9mfP<-1?}mXAQG!-DyFfKZeP=>fX?a8 z_GNzQLQp5Chw{Vz*zF(0e-+1*%p71Vj15x2I^CO+_a7fAV%${hNME5XK7b{t`+ z6X9Mtvlw6@{^b))gF9h7ZSr!cBCgCXC_KDwHjt1J_|oY4ERNhaI_jOkDsrGY6b$S1 zdDTQL0{Uas_u!K!XNmv5>X+6=f&Y& zdLoRX9WftSePU~w-x0#To&;|{F0P*nh&?_ReXw1Op>t`JI$l^WOes-$gn8|DNVvUp z?_K?8Pnvd6+RTAl+k%nX%0!jNi)1Ec0iO7Pd!`C5VBPJ@i)6~P@B3PL5?EFQ=zvNK za#grCuv3_sOmJ!f9KP_-sURPRdddQ!tj*Ba%nX+|iAVg7w*H2+mAjk4y5 z72Aig|5#|6lKjmoHlD>6;`6m+13{JCns(=`XM&GmU{5G;HwQ{dg;XJ?*|l}aZW9(| zAiSBImgYQSPRDM@7uGcUc2j$aubti^inxIN_l>r=?fuRB#T4qfvuw%*F)Qq#_)-1& z4Xd$6qnp?)D|UDnAX3iCC9t+LlW-oa?~`U>SEeK>Iv=%&q#xk+ZH^p`qvOT=XZ`6b_B(%SSK@#ebS|uMwA>p3UbL&l)3uilB1x_7hGM^TKG$mD6_7=T7>Rm{H7096 z#@;n)ohi+C#$@Wt@fy+?M7G2lYPQW4JD!cwyOe^-w#!Nky0ZS}wafRJ9*gM0&#hm~2A7p5U390^gpFC# z9)#ChnsfckAK!GL<*n|*obpQ4*7Un#qDQ&mpG@oID5%YT8qObz7Um#|38Spu=e9sx zlo|AFFJSu}?TFs$X%T*& z#*}c_vz9NCuehc#V-KS&(=)sGz1lr2ejrdexY$04`Z5NX?WY~-(TVxn1r+0OG}Tm7 z)?qHz<*s;MFTJ$}kuEIZ%0K+RuP)Oy@?l_LJZ-H;+J43yST~*hs-{**O<+A}PaR5{ zL|laf%DQ2MZd?dw*Yx&k?{J%Ex>2(Zy}(`vz8LP805J{lxJs=ueXu^L9ueo^hr&MjSLEB8~t#fqMWy>ZVl#=W5Rpfdej8*Rt^Y#cdG#T`a4AY7mh}1 zKO_qd|B+scOdcXhf{`j+e*3cTS4@wtWlSQ{XTs1y3lNHW$O8R$JgoBtqoXWms!>=|V=6=H7u z{8raT=Poo4&qoFeNCMR3e%^(_?3U20>Z*!L8bKo?vXta|Uw%M%sBz_dAxjbB18fPb zt_RyB?HDK4lXD?t0$Q9dzWrJ!-d68l<3mUaKl8ifCs4+{OHHVdR3grYY8A3nLMw(# zgIjzKc_QJQfC56pP?dzg?1$|zFSfe2m~K*M_R1171Cd{sZ95+Cc76YElqv?G*HH9P z)Pk$HvNwQ_F8fU9bAkoR=a1Re{dts{a`fMj1#sJrgc_^~%L2h}+!gsA+^>s`QjKYz zwQjLr8CkZEFlJiZt?`d~BC(ue7x7m?#rKnoY8CPzVqi907V$S+Gh!9Ki6?bVwdC(P z+HJsx_L_rkb*Qs-*RaJ}8%>^$JQR#QVOVEd2_M6;UK0JQ_Sisd^}X17qyqG=5T13e zpJuNv87-4)+WN_I2PvKszi|Cw!kac$f`@~6w zvE+|&vl_O|6d`ji8(5&Ew55CN+>yW?p~8nCx$R+M>!eTXfch1-iE^ zO@N4!pk&%P9=a?l1(V|x>V+#yPaIhvD0p?K+~LWbUEuC@b0Wu(u!?Cu)+6-u!L^|d z1V==7zck`nkeLixe9uP#_HGh-6!%D+W(VF<7<25D(2&6@JmHQmqNR^(ac*i8na9x?tWowP?h2PNs@X;sH2hz%f(q zdW(JWwQ}4(h1CSTohKAej;^NbSzx~U$CuH4C zAyOP5ghXzxV}QiK`m=E&dX-bMXv<%aa9{}2WbMp0J1I-~!JNoeW%?9LKHrZIrfVAw z!t!2)auzl!mtC!##KLrkD0^>xuLk~hh9?rZ+ zfN&LC9Il9{i!<$BF=ie#9KP)Wmtda)@yRkV6mZ1VhRY%9NzAWee=e&9JY^FB;LO=C zd}PghocXL^J;_8ldE5ELQESYL$JfAA6aN}iENv|S{DnD@rm9ZYm|8jduo4-@2PJ9Z zLGi>HV^FA5I=v+nOksxQQbeA?Dnyt(zpkEgJJaM=}+Fgfg>*o-NIl znB^4Hvj$_{ifp?|0`kJL8#sESh1@C&;yc!B%6+ zw$eM;yh!F(WiHn zh+xSm;qeXNl!vq{F^PWLP#mqWz^6>3r}pR1pO4g#iR=7qdE7ABiP15&jdXge{$HF1(Tux z5_>y%17h}}y>G)%n_YMKjneL60-ls4#AO1L9w_=?o4M)y5Rbzv2H-CjAU&doqUSKjF+H9e^3mU2* zI8d?=dyw+*co*N@N#sI4jv$MGfL;o|ddFK?SksTE+=a1QQYhIjJ_9)YVhBR@vQ-d| z9Eah~x8;L?!Xt#4CYNmj0+4zEEuhDat+)(rzJnwD@Y=m(#0|AyJQM&*=Ea*he;SA4 zT@Ux_$YwiG#>>J&HVlA980rbw@Q~ihqANs7@qg;oexw0G#R2FRX|n(;^Mrm^<>33c zYar_N>orLLP*rs`+@Ekh`aBL@9+vL`_7a)!>Z_bcj6IvLA(8Zo8`-c0(%*7C?;5Z2$ynTHI2$;L+I0TX}Lsr~pF zDpcEsh{(eL#E~|MJgBvKqpwu$Fj_Rn2lQ_W;2TVe9UFJ60lLh)N$|i4NebqSy(*TZaysowp$~4W+4<5VCOvYdiAe{-Gje~8h-S-*7{=$JM23_wQ;``JkPD2hopig#z!wb5b zAPVxvUdp1>P5tJc@n)oRx*7?9ZdI5$xK&^d{Am#U)BAzPi-B zli&LO-`|H!)BA_tfmPj&`4bFY7~xnbh|hYP+DDfS3x4`-0o3?`RsuT!Q)12-Zay6x z<(eI&5vK2#UP(U`;#|oJE`p{;CF0tuwHjcnB`zqWk zT%X6J4Tioz{aeC<``r%XfVXFLh$A)_Qt8eO-nyBM6jh_qMwzeAH~=N`rZr}&9F>0X z19UkdYzD-ULr^i6j23QpoMR30L%l-#!+_JM8HU2Jg9Fyw^)RU%eB%picDsDpU|CU$ zh92_ZNfak{R4I}3nepqNn4B$rJ&4Dgs^^(`+}qpF0hqKdL*2{-?FF~NK$@53$imJv z@~ZN56!lactTEsLg|nuux#E+vo^4vX}`}Atxvg4nt;UV_srjAJuN$j4MdWdK5Ex z#Ff2{c&DGWn!=9iYkdHu*Nlj&e{EtkK-zYU$G(o6{7anP=^1NgW)_dl1ZncXIe}Hm z+IzpcUjaOUb{M$vfGuDm0h~)~!0cuOef=;>-CP8kUe>^Nc@L3P>dGcygQ zBeb#PAk^c902_!JhXkG#v;M^q8u9}?a*K8M;%RTfdG{2W2A%zT>&r8aa4T~OIsi`N zM|aq_9i%*i6J&!IC9J)`O_X_DkPIlMf}4a_X#H;Dh%?4Ja5aBTGF)^BK9d5is_~yV z>g*lHBL`B1b|gQMHjrShh;f5{2dLT$+3E3=)TC+Z#sf^K98#@I>>b;5gob6M2)S=6 zAD-mx=a#e*>0BJ4N=UH`qYuvpdRf`XqdM-fw^uJqq%zXe(#}P%c)`79yvPL5`r=5? zw!$;(DNsv2FZ{eNU(oXAiygRj-Dt}+HP>v@qDm~H_O!yie(jCHkp42}TKZUYnM2BK zH5W;M+~KT1iPA&$@a5_{`L^Y)w$1$|hl+f0=8w4+f?Wx8%OJde zyMm-k^oAiM12-el(_o>2qofH(jyZa0zI%9-S~WxUhsp<1iDbO)@>CmS9?JIT8XU?R zF0K_K#?Zs&=J~`xD}MJPMAVj1sqsepq{?M z1GYT5*#XPY9exk}e%JHqx@Q9Jk`sJnE*n{p8<(&qxyc$Q^&sK>(;s&k0Sg68KSj{~ z=!FVQz-e|$CcBe3^F9-A`rz;$!<{>i=+X}UYKIAtH}NM<|89p8?TZW2+niZ*a8`jL zc-E7}pYXB#wbe%HcT3eLW79=xuJ*65BH$x@+UK|3*ZF%k&iz=yR295$4Zr;1HOG$r zbF|bGb$A*N7b|?_&wno;&GDF@kM&W9OA0^D`da(*?Sf9dS~{$1c=r z?OMq1dn-#xB~rr6tHxQcg5=RrDs;Bje(%ZcxjiKbNOzKn`#!;-V`{j_0)^5@zR0{{ zaBnKr5+5G0N?rOCHJAUWswQkx957;o0pOL?2Wc#8YwJok?K6E_Bscj!mj_}gth(q_ z4mrPSchU8r1F4#zM#(?UP$a#|jqoXVU#3Nqd;YhhfX{(e)muQ~qZ|5Fv zZBlH}wr6uX&r8X|%0@h3US9nM%S3XlHwue*K$g3X^;i6GX|)6NV!g`HTva6bZ>;(p z)9RQlehUK&vd%sI(x!AAW|l7{GVhxF$ydsDsr%0ggZw^H&(fMjwVByUEq7t3-_wb% zptAWZOBM_T^Wfn_W)d}MF9bv1yJ2A8(p~(HO%!>M*YM5h;8)~VA)Q&?`TShkK@&Z2 z0-&~{+vesz?Rxw1Hr9Xq9zK=L=;3iecaMX5L{emLe{mD_eaC5bSY3Kdnktgt9Jv(6 z7O%H&opyVRXeF*42(Sj@v8+d5sI$!efm^fDmtij_#imf4-l|2u`z(vr{a;3N;u$}! zNfh-+NWog(YGOs+U?n+>{9906e?I_2mw%-D-7H}5U;!DveTNF{0K8Jv~;UC9AjU{ zRNz=EGSc?svllx^fq_F4|KKeZTyZ+{`t&Wi5;bLiMMTkwRYY7H@S;YS55%h~MQ`;6 zVDa$=yYI^;uePTgASZ?+5@)}xm+h}f9J_7AY!R;T5YUnTnqSW97zYM zR?fwZEYktXBPJ?(GcQl;^Umb?bp0`Ggx3M0i`V%}r014X`C8|fD{5*!hCsbo<9V53 zGhPa_#Ln|TEkg$blsYB82OZ-hi!C*5W4tLOB3*fCC?Dp99L^sa=$LO zKeW6gQQ+QVWCdFHh6C(W;;2u5Lo6%;V?Vhqm<28W!bHLxZ5Qnu^=)a24B_n_e%(3~Ej9LIMKS96u;y3c^?(p`l4o_*mxl=hmf^rrI9bp9e^3h^eAp|Vxq z?Tne8;r5R*uR9_cEOC$$BMWZ@&qfY=-|Zm{axt-{msg{ngbLdmz%D`S0&)6>UBe!o z>xjFP6(-{s;5kYp_XAU*)9hTd-3gaPbxioj#Obo41huv7mj@}2I%8L}xtvUS;8dP# z-9}=o_gzc0l%xS?q4-FNj8I*)6aaO!wCj}b5wYWx7S1AO{U7q?+LRT)Ls1!)$qeTI zFZW$)^Sim{eE+9U)30o^Q~2QGL1Ut6tgrJI@Jj)VMgXk*6qFTsR4jz4J+A}&z@Hi% ziR&t)WWmZ+0B(4mdHJyz0o@^koLs75Pl-7^-m7wl35xF!S24n&goNp*?>PaEe3-Lj zmb0`IO2dgqOJCn@jZmoo&>h^v?7BCJ8E~|VF0U?Gw$?}kxWFGIx zYIgq=e=c|kdFrp2R#r=hyYE?RttK1^B{UGRfR^FE^uE7{RbKT}>Rj~*1K)67?o3a? zwM%lSH``yh%ACMGi|I|GYc3vwSvcSi^ZN{Tu<&I;{VGRyaf5xJkm??Z4PMLF zumV!*|BqaN3*zaF4)*W@^U`-Wnv&o9I|mtLXWiQX?ERyRr(F^Uoukgcc@xj4Z*jD* zr9D~uvK7`As!Dl3O;U3^c)X!)Xl-;tHBomMPX3yD9s9DNg|nyS@bysg+@=K4G|P|6 zt9&ai&07HxwH2`f4$tA;7hGQNh*f(sCblBO)Ni~6x)H7vv1i=X#MAJ^DavC3g`2*k zO$U4De@7?(zQjphsWw9R2mk^7{Ds5I+Mv2}%jnzltz#|okXU$Dn zb1s%|IFXmLEqB+^L$pl;iBW72w-48hRl>3!on{*${NN~xouhFBK+3s#^=d~)hnwA1 zcY^G3Lf=NQfEB11yzoib2U^i z(S$t@j?UTEw$ITqtDY7O`^9c*9@n~{gZ2x$vXwY;Z%QJR4276m&87JK$H*yEyB+S4 zQrWA6kYoXRY4aQNPvGDK!H37+2drSM`B5chZo!G?d=yBc5uC4L0G;~}13fBt52LaO zWOt*7Fof!h7I0binLFhu2QAaOWfOv)?uSx`ZJde?%aj3Qd1le3J#09hv;U@ucsEP> zvt`2X^vBONK|}-Q2|O$KweS8=JX&@Guzg0}@==2Eo43qY^Z84tsN$Sd5dV{M_CN!Y z6u|qc1BNolOVxnEGYCEVm}K8%I0EOk`6j@1_w3rR1`K!G+A5?8QCFyRct7QX+b?Wo z7I2*hew$E35}9LTch8EZ%ax&xyc97j0LP&VJJ1d*a+BpC_@c*Do{z%j6!@X-<1ERM z2P*5Es@GWhYHMtg{s6d({Zu$tTrJW1lnz1EE-4<>TEXse(M1w>b5*DzI9D^|h+r`^ zb=-EN|G^t{8IzpIgaRlkD(V^a#{xGKL6|UB7gC7!_wQlg;3t1hKBZq!d9l>6QGa6{ zcFpnDrEQ88<5b&Sjm;385BSD7>lmiKwMWb&;jteXAy+SDT9nuU4Ts@=5U>V4{S(*3Jxvu_`zSi zevl$W?L#C1*|%Z~1SnzU*j=wSoBLLbF4wHb$7lo51=Ri4=bRsW-o>etm{|+t}2UMQhxC%8TB&p>w|n1S!k6K-LN^+=UJ ziTYC<#nDxzABpr(1$&$TOYh_K#T3=_ZL1>I>o?(BGJWwlTHNVLc$^7J@*tt}g~j<_ z&)jSA-mHe^Qpk*e`zD^qYspC8uflvZ-8my>qs*xx0vBkeThf-c_ux3WUE2i}=cABE8L!$BppRv_=p3%szbS?yQ3%j@x#N=@gZgI?d3ba{|wIK5z{_M|oz z+Pb=eUBittvjagtlKrmEqN;y3(BvG^*nt#5|>@}I6e z{#z-x{_AP>zn@5>TJkY>(Oz5A4M8<>(-XgG+TtO7zBMcutO-5!NL|Z==GU9#69wjA z$C+5l=^37T+Gc5C*tUnlLxv*O7=X&mu_l6;V`|Lj7tTXiy(iX=co6P=cMMd$cQZg5 z0BeC{*yRgQ^9NtPh6=h1{HS8|74S9d%;G@ug^na88K(6SV(OvFZBW*8;c z-onD9GQ~)PM03~UNub1x~+f|Pk9}~%S zksL|4b^1*P-LNDNRAjp68wce*l^xFKK}$6CEMaYlkOe9@9Ju{)Jj0-#0_@*ycDKS0 zO|M=S^vF^q%-8{WzZ40i=pSIoy zpdhcCe=I*zD*$sGTx2WQHeeb}q#K@}kNDwf6`-#RXr?g}a4Z2oI9EJ`qy@L^%Pe@1 zfVNppxHBEKsJ6MjyPIg0zWLY0Rc!cR-=Y(oLZEG-g$#w@1nGA=9|gcyd*HO4pCN!= z6Je(J<$!8FMG%hH1K{-*#_Q?Z2y}bs9QMA*1d zmH-q2dTbnk!M%%WGH`}ZW)NrDOjwqi`*y3#*?fjVVjPFRW$e<}WUa`gw{V4*Hiyxc z#)EE-1eW%+4VY^V?0~yHII|sqv_M@v4H9iVE!&`mpS$0#gF8WvoO4fv+3E;ue_NT&XyGwPt_7o^yS8@gJb$(TJe3^Shf5V;&sG zgVnK|mBE;fhHo%n02Bo+2Ta}jZ|6;^IWPqE_1Oj}QjntBzgadAgw0|wh;Xetw*)Dj z0wE|)a9N!f&|$On!3_9i5IQtaAE1RKeg&=)1VD38-IUoBA%UeFaEkrEWk>v8`J%rY zhC@)|_R?!rmGQWdcTaAZ%)~yMMHwCNxDG-6rSU9%!;@X|u+8w|*Qb2I|E=xiAU%U$ zft>wv!@0xDawQHdY&g=!a;{@>FFwL9A6J440RH=7tL2{<`hRgE@P|^jTGs?M&Y@^c zO-;gk0;o=d6ubIBIEE(9Oox(zAJI0Jke(Knv>6ltF#X+Qv+)xEQ+pN`#5d*u0u(SR zT1cgF!WzVz9vo#e^@55LyqU{V(jn8P*!mNPTEJ^)S`qWL{(b(<-@zGJ3{Z!*01<VADfs;AG;|p)i?zy|C6RjaTGe~=|Di@j7=&t)F^Ms78CoLx zItK@9vfLZ|jDgOFqUd-sC1|%dOQ>pd(1oiyM2f(g!X>7=Ff|?856K1-Y~@V`eT*u% z`m)%EtMFZ?muscLMvqNFBwUd_0S6`xa*%GWzh-J0kR|{lyJ2Ja_t#wLw#}Fh@<;Rx zye5#(+01hm9O|a-dk?*5XS)VW3QGZlJ=GI>P@ixt_Kx&okgGirZRh-bQyQl5o0&}| z=^S`FVY>xB34D|@L38zmrllEwE@;Pbsi(c^G8G#il23QRUmtH4549fW{7*{gyXhnU zNuC@IU54$=oV=6keATvQ{9rHCssb5!I(K2c<05><7#eir6`qktFf#u;dCFlXqHf|8 z{ugU@Ys-h_gY?=C;-n~25whrT+3TDhoVVh`Pf@h}$?y^nJW7DdYitY`Vum}p<32RlXqX%~EW^iWgiN&hIno~InL z;Ooi1$%SSsod4}i9QMk#u|7WV&=X>8Z#aKbe&l1hf^(%<3NveW`NPdQPL9JA=%B`2 z`aw@N#do;pRxk+O3(;mo*gb^$c5)}pM;10;K;H&$ZhXI)%+DBt!wOjV7|9?M&;+%> zxRSt1Q1$43Ia>!6Byk;esq{GZEPJ2>Mzc=vHaFqR>r1ICZ*rgUSF3ewF1niyb|<@1 zA47woX~TXf>qfOeeZ>3#Q{c6P07uf3>yBF>Sru~*q))(jU5!u1+;dS2ovb`K4Ix(`2Y zAxR(PF@ba?=q~mfTmM+@q7!_>c@g##q|E(YXiGQYg}Zq=?V^61u~aTuocvjIA2Z?f zliBkQM`&5v`SzC&X&&EoMs+3iExGA|@2e}K&<&0xT>k2DsUR#_t@)iST2T74^pRSt zrbII@eW3F3@};gYFi_+jJ3 zpNj}+@+YuQXVzb^@=krEdNj-F$^)@-T_R}$`0dA+>zn85GriwD!!FS#FZ#5X31)1;-BDhrseJ=(ZIp(Vz`Htu!iGCUY`Y@LHuw3P>ZEmdPM<6#+CdxiCF4M=CHL}5G1)NfSLVJy(rS-{ z4s;K1HHdGI$!jhiFKNE;w$I0OOj_F|r!!!Tm71Pu>31MK(}cHql_DR$`3f6a?*F%)(K+rWns1YcJ>1*bGWO7p0@vL z40?yNmpiA2uh0Cs&MI$Nb$XDtb1K^k9T!l%BdJ~$f`PEts)oqIH(E6z$-FGLYSv^+ z-MI26WG;lO!TNN5a?6A@E&sPm*W>&{Pu`@9uHv~I*l;;f^XG^E2C7R+?x z2ic-ej2M)k`)1OgYY>|HEQ4A0S*PB``C$4+v&V|s`(1>eXa9xEV!3m23zd1keU}H_ zGWcL2A{t$X=vsMc7HyQ@i8ZN&cZ1#yklEA56)#9Nhl<#e_!7CXLB zJLV|`>!8Fk6oicfo+IpFR9U}xhZ(&|ZYkI0;p5n__mvgP+a+$_uY8ofrv{4gctio# zH3NE`&Mk9&r;r1j>%Kc)RR!hS%w0QIU-d;k;#0*blUH8a_?|omuDP5%E{8cojlDX0 zv^H6gJSGo1bgSl^Wz7Y zeQ62vR$;-$R2bt7Kl^6U8a!<^>QMMTJMrvd<3Y;dC8O(pi|qFa zTjAZ*nGXsFvSAoPe2%~(SZ5fo+Dl5;C!K0Cc^4po~F+bH!M*I+)YJKA60 zA-BHgr%K2FG4-W^P=0UN&mdcglvMU)2~pW1%upiPN(eEEP_`o3&6H3SGPW#@HCd-f zcEb9zRz&&d3UQr2i7w}Wo?G-R z2?WNZ$q~A&pY__$6o>_GlaJid5S^$+&~HR1urWYsOz}|Ly`PN#R6$O?4QtQ|`8~#= z)2w)sVQyx5&YL+}`duuwRxA>LxmweY*|-AhzUQ~q{rWnil;4INx0bD5%%9^|ATwZ) zZI;QB(hFVBRCEPJNRhuK9^7Gv&`?DpRL5?{V32$5Ki6?lX_n;>Gp4oVqfxm(WFbl|2byEJMT0+Ur5=@G`BRgSgGB*9oefA zU3wHCY{$o$Y&{-_GecGd-q6u1wz^-fMK16O27gykA^`VS=RlRM>0P*vId?)NM;Xz#V!=y*aTwhcxB6Q@RA zkEo65vy}%0Y)ns{i6Em!gQB(kp!=Et4mVeGlgr1f zDd2+hXL>;#YPx1tw=&_%Um>Lavkyq*(1ejoZ$goY1w&Rfx?tqUY^?xDa~L6UiLb-8 zOx_jUFc}ajShB-iW686BSH18C z=D(O&`*o|U2aJhXujj7D4%&La0j0N_8S=>r25C_wlWnrA%?b4d2cgR(xJ~!lmH*Nj zkt%=Oi$5D{?@>HWErfL|*o}=UnF$P;0Ku4V*FtnW&tiP=zz70)C+{UodvfBpH*tuj zNhfcl;%;F;8d6n0D8=%v1dqD(;O-g+?Zn^6w%``tUioXWy2t3!ju-s$nl07@K*ykN zkvnZ*A06>{cftUJ!TfhErGvfEfO&-we(uu=ee(~6p3*cEN8Wqm=d;u_ee2${aqFfX z%R_ft6u_2_zp-hKO$c|``_x+JIA z+Ih}(=|SVBfJ2Ajn$6zw(};ao)K4%k1~4S)@$XKg&-3Vw+3q>sYbnhue%$ zwzW4^`MCOQ8i>n?NViu|M3MtkE_9jy0&Xkw18K&?x1n?^^O+=7MJMJHqaA zKjXA>r2?pN(j&eYYo_xjM&}()CB-vCeqie8LmWb2){miE6{BI>97eZbQ%Go)qKT(J zCtk+izVqYq$nV+5#i}fS5-wygu|l@8&TPV9wzn_l@U8rI1-+G+nTe!LL27Erw)1>M zqikEpTZJ=NMV@Z+)nrw>O(WDghm^E$tie%_(x`ybP$;0*E40Aab4wW?C;;7u0HNV`iAijbajrSIGi^KGp8&oB}n>tP%h5z-tS3u&^R!Vh0Ox@md3<7_Q_HPX)p6f zsCjww4Yh5|pz~i>CoJ~po;2=K{t1<36hR^m6`NCXFPP~ihccaE@+gqMN!Ul9rI9}c z&1yLNV*g5S@Tz{=t2GVxR0yoAIC&HxINpE-iM9?2cZ?Na&{DL-4u6H8apX-^jaF`_ z16kZ-TD1_7b|Pt6+|IMS zmh$(5zEYfOT9w2oCvT2hiN0iDPiJSfjxS~6;_qubhFA}*Ti4qBQXWeFt7qEksNGPp zOwIZ7$5K-_a>is3^KeRH?^SWq*|$p4l6TpUK!`Jyqc9msSl%0EG?W;H8&UOeznrV9 zXEwLM6W&RPw87+ooQ+Eg{=daeMU37H^6j9_O6}*SZ_irN+Met;O`fS9X6T2@Y}bZ$gahthMY;eo2+DVmCm}=Ld4Xp&e7(kCohj`p^TYKK(DB zr~NEgP}xHWrpsNq<>vgrnkaI5h%MOR&fbn=+l^K-D@cStK22kM(5KGGMTYe5>0vpQ_X z%M5_C_lcWvi{kY^J*r=1svp)WXIN4r;e zZ6NWh=wG;uBjaF<{t<_<(V}^?ks`irW}6~7J=(lUv>-F-y7u-m6Dmkr7VdgBBKMCS z2ap;1w?Z#W=A16Hz!*B?;&r4&$3##i|HnV}Cw*=nr#saD(`8(eS0p&M%0NtMB>;au z3wdtC!bB`0usqW3(ofskmn6LJjsrOx{Q+OklcBROVwSfg*SHEs?|j>d>H7BYa$ofX zgkU#CK%*cZy8ta*HE_z8W0Law6P`(HD|+&T6nSsJpmEz=dzAA&fE}($E2%V_ z@o!!t!YqoPphi;!Pem__J|jA3HhaT_GuG~)ZG<6(>?C?`rB z#67-{#V?%S2CvOqiil=-7tNS<^uU_&XUQ{tsQm1A=Lc*rL0Wy*e)(jumV#^TkJ0gI z?g;nO)})CnQveFqv3sD8Kjtuw?S8=W-Kl`d$;ambs4tYanMH&4+{zzcJ{t`J0;k4j zKaGo~rMLp|r8}PyW-{YK)tC1K*cSH+dYf2ckG_uKy#Y|LZ!JTLh^m5gCR^G;VUo%T zTf@j?jw%M3d!5X)MQ5M=Z9fVP7#PWa|H!3KLW?Q^p{!MW60nJ#d$bS=kdJ|2)1XWd z(n;o_5KcmeSTaCMm_Ix18e7QF#!f3RoVaiEDbDUf*#|ymaq&tUK(k>D-W048c*yy) z0?>Rfpsxqkv4$3NA>s0AqkpR{Z&|S3cbc_9*5fKHsqq$)ObifHX7E(4OI9mpPlIhe z^ZFWq@QmZs;wSDLt099>F49kM{D_lqrke23uTbT4MRSUzNcu}Lv%Mr{a$Fg29yV0B zI;+|iKaQQA(P#%^r92fT)#fjl-bGiwDLjkeWA zDoNE5-H}A;EsRX=Cz{kzjvjMR9|Q+C(B%pqTs#K^+iFiFu3;cnbT!2>?}J3CF3L^& zV|69|pt^b;e}K0T@!z}}+W}DwnbRw|4bFeE+?RBghoI=D?~K>NX-HaeLVtkDQ<69| zHO4Ny#2AX#&)bW$`ugDPsjgL+t{8LM)eC#Zq59(tfUjr_;@b0pGWY7%)G0czS;2=4 zx|xDH<7E`h>?FqIB!UIMUH3v%BF1mIjne!nS z2!8<* zITN~xa^S+mz&hS3o28Da2KLSBS(>%%JEQ)o?d(USzC2AW9#t|k%W%`=?>1A7k}pQq zuktn!z8e-ZREsrrub;DO&}3#1o=Oi|gqSN^>+jOD#2uhmSEx-Y?Ns<1V`YQvpn)y1 z&{>n~bCHyYmHC;IXLq0$iMF_Xc^G6rB@P+3roQ=>%lL0&0r~SfBUd3yQk6vt<7E*j zV)kypm1~@7{Cx>J+k~1~MODH!qM|X{aV& z6wQ5xrqc|{2L6%}SOFWk<_wK@wG(^e(2JB7@o)2n?{feefA>R%Xhk;TN_O3-&#;Bo(oL=KJpaF2ZCCx$KSq_D z-tP*$Z6Vsn$DfyAbg6lx)q=s`(Y<0vgsvmZ_RZnCfJkE2c_DQPHzu&yN+uI<4iyhkD2iOUh!e-;mc5!KF^1gMfQ&~&$Z;?qWuMJ5{}u&|Es zvO@pc%u!ft8AHoCy!lQ`cs@`ZT^Q-#<6mr&|^wA^lKD5+M?=x_HSzJ8+(X~M* z6u4{|l!uDrx zd-5`Nq<+;k(90?^?sT1=_YWSdTZgALuY)ZJ+{Ugf)GQZO?Tq`oi`+a&OHNsUJ?Bto zG}hy&%sJhAzeqAEkA6`(vZz3&P2CbM+9(&O3h9Bep`f)2-8%14lGmI%h3YvthITeZ zR1`ZfJ29@O1|8{|yPl6@FQTaMT{?(0<7!wmwFWK{_ z>>Px3le*0+E*qK!R(~x~BD3bo?_{Njl%6*>!{g@I<8WrkYSx9T8K{@>L2PN2Ve~Qn zN%Ebe>d?3WgQRwA-6lag1MZ4E&feSi)5)ws;h~|?9~4F27!ZOtOsRCr2%I1Ubcn2j`-KM4@$o@*lgdtXK*C^Zr%XH5LTbz_5zw% z6-hbavIB8%CEYf4KZz0pG^@()NO z`#DFPpM}NpCG($zx&GNdmz}qhEp8&=`Oz|u^q7gE)-Ac_Gk+v3#1AUj z5OI=tAWUbJ9QdRY2q6xfggP!jOBj7Nhd+s*1D-Z;47wk9sHNrZ8ky}lew=o^H0yr# zbF|kLaWX7gdS5Y>`Fv)AaFGlHK!<+(2q4dRh0>o8!93-?Bd2ohE~S}Oum7jlk+1oc z#X@W?WT0m8X@k?J7i5T_B&Owx*@x@}%v6w?<(w^NXko$px&L11<DA0n zK#xA{67lAq2Q`W`g(AEH%pI+c-vapl$w;}eKl_CZS{OolP9xn)I@Fr`nXGSNJrFV z*Op3jA13#bgt)lFp9SJaQ3%1h@!!#Qf<(^e7kPgu>nJM{oXAN(>dg%eFlzIdigOi9 z&BZxCPI8G?d^VnN-UFlYx`4lK_QfV^ZU%kSJujRr4JI=S1WLLnKYKfvye|Q5H#k&u z0Xj$g-@D~VIV$G#H~7=1M^@`2J+ZOYGS27;{(D6*ooo^b> zMc>bk-o&-myp^(sLCu8%h2}v8ODxLA@LZMWi|Dx*DQhnGdQJoAX3A9hvMEAC>TW?r zPv`*gYYKaekx&!y`gIByh z^GEESqtVB0z8$X2jNIJZM@F}l(Ux)S%SZ2azJI3ivgE6Fug)@HMv88W0KQocH>7U) zI=TBj=Z}HdM8|)|NscX+Q#$l~lSpE6j(_X0NSd>%#sN~h<4PdOFniS3V2`>|<*zLRb7rq8;rlD5hsoK8#kyTLPm zkpn(nNxN9?v*F6Zzizh2TxWgq-#Y&JCD^wcfKr06((4mv6Hyv|9vS3nl9sG>j)r>V z+Zwl+lT#^p;ANt)j;e#XyY6eT+wD%g^<+ak93CqPCS?K3N)F)*b59%e7d0S{RBp7C zp3YL~?bB^#Gy&nk%MR+8QuhHQ$?@(Qw>=|5YTFyNizimmLzNHY`1K)$Zwz^SrU=RO zLOz=qpSmN&Bg^#|XAky}$&h&3A65UBj3n%DL&uu3^1><98ek4R0P4%^E#eJ5CnH=^ zL#(Vqvf~I_^_P_w5d zqA&gCfGn0VacA6a7db?`nJw;{qELggp% z#Yb1=uUxUQUG8&1G{i()OMchv2O!L7iE}j2L`dl$q~Sc;3rcq|Q6-^E7~JnW2~Pqk zWYe&FsrAN0@iDnh_S8i@$NE z7=|{J#6x~(>G>1VrJC;RIK4KhOdszKi#!yOCGqL(@dJ5PhyR*Ey6C}>R-e>(nR~KX z5h}ldG&j*@6s3sk5W5GNpTGs?Umm95lMX!5F;S+7&#us%fyKwbw{zo-HPa`cm)soC zQ0%h%oYLFR3Y}j!!wT47gqizer~xG{QuUmw7t-HG8G3ty3JS!KUN&@SaA94W5<1Q2 zz#<{)y&S+`l=4ik@HjG#RV5n@A}22=RARyiQ1{D*`OgA51fq)$-yuL_NWxiJU~+$S=b2VK?!mwvzAXk#5Ma5HMUGoT(6<& z_{0K=B6pg`kpTww5V;1yLOU=cLXtPSurx<#F^TR$eVC$$2k1d7h75J)zciL=4%ArC zza{fmRcdjrLl{-X7^U|uRr#EOJ%ReaO#_Fw)S_~d@`y1ur%VNL?pOQ#><`pE-jwU`2ojSqA0X7qCl zFlK~SB5u|12Xj(}gs9ng?ql9RNpUAT%?H6Qv55qn-46>^`vT+I5eF|(N+vqW80b4F zlrfh0=$AdSsi_(U4&_&8&m!C zcMvx;C=NR|0$SdH;><9x$DejZyavA!PilR?EASLr|3(-*J&bam4rG{=TN8(!O4(Io zf*zPO$W%_L2bv^b1kf#UQ(Dd_HsGT3ID)QU?m23c>$QtdUd4an;)Z;=ZQC!+m3m!) z*v8=!;FxKX6(zH`e$i^jO_x%Jkajm zUw!m}8fHls)zX9RVW3OD80agGIdR;Qh<*ycv^^%{At$p_5OSsTNTPssT zU`DGc%`Lj9WSSK=WrDT zEaw<7U5?a`;t_(F6c`j<_`@W(eIW7c!o<28W2mEM@)?>t!U)rWQASMg1G%?{8ZOVu2&cCM16MXwBU3C_ z;C*qZ(f;0`CP`%f8uuL+c@-7Kt5+W$OoOi722=i=Mpc`&o^GIOGnv^e=o#;lJW(w2 zt6Bo_8c_k_e_m&`D$6wMg0UI{I%SJXHp+0nL+dKulH zKa%?aK(XD$ryq$!k1w+TVU094;%AsslA7hI#U}~xH+dML&wfL}#L%jUb8gMas@gp> z$@;u~F9?#k0}Dv`10ij?z*!ov<_;vuL8{y|tatQj09^G{Ek1OX(-^+cON|uB(EXx> zmQ#584S4(_ZTk5YCgC9tHTQ9c8#U=#Ptl|7DN>ZOcAS}QZxceDV!yL+50-X1*K+Sd zgFoe~gPo$|T@CEMX2HtIXdWuUXpCsf;e8Goo!8#Sc>o2VC-B?#@sM^*2m(-scXnrt zj*~*a?)Qcqk1^fmOxEjC8Hove1&ZO9HpS9q#FC%wA4=N{=uKiCQviOt3 zF#W$`R-gxUHdmVxQ8eJ55xLLAi1_6O)ZDc9v~?&ZE~cBaMI3o5+GVv;qXqP*HVK)i zo)9KsQ`S@PHCuB;qA9}5?mIIimHb6YNx3#Yh-$E`^DUQ%m5b5K8>A~-KEUSD7hMQO ze=3B_k0atA;x!^Ww7&!XKwyuR z$YFRT+51Sj6UgsD{<1#}?x)y6wgttgo+GuXpj!+XXGsWHT}Cls(8=VTDZ=fljM9;p zvH#Pa)Xw2X&t5>^S~h`wF@>FN%L8K-*K%^K$8)w}?Te1vG;z6m@>9`@O$AnpE%nT3 z;bl}(g@0Y%k1H7{i!g5JlR4|D%=0ZN($28{uF@_8==+o5sP}GFn!Y4b#JMN=EOiE? zembbifre|gVOT{HKjN6-5uiC0=6OR3GPVJ{u)xK+@{rK=IParoqtDF!nF5y+13t9{ zlS`sRaVGb5uAjTwuX;fL?YR~P%W`XU?bQTD$rLGox!rxp z{}l$E@pz$qD34L_?WthZLw@XNJ8>O#imVe=a;Q(L&2kd`H<76&BcCB}-$db$(Nk!g z)tNgj;jSx=-+8c`bEp02j$`VTW!4`VdpA5D*XOK7eFtCmF6_Xw>;fg~rr`)Hok;D_Zf9k8ZF=>jvR6HFNj@kxv`giwK&|HuGDks(=5cAA1 z=m`_}a8#8v%Ur}bvgTo4+Nkuh@utk{Vyf_7c_wnI^SEtX5xs|V&*T7;pUH;4Y{PS( zbz^vtNQpom$n5FIGDia*YCysdrs($kwRVzLYja#t8`zjSkT7!6`Qj-p?`RmHotSmT zkYSqze(sq8qgo!rQs?6|Fo@*DEgYZ3&XxDWlS)QbOzjjLL9wUu8c3dKGyg#2xPb~s z>AzHk0jV%O@Zs!eEE^2W%-nmT`FF`msjY1tzea-3an!z<7P3nM)51a|Et|0CNcqX;vS zNG-7nMv$aO6-uE_X)}BeeI#Q1j63b&@$auDF888sRg~43>h9VA%6ZU4pOV;5l30Q& zPQ%@zp_CbtUgG=V2aw(X5a*?lSNV6#&QRnJkYGMSmE#mctG+l>bpbjq}~5zceSGD9QisrlV3QF zd4Yw7^(XGlfAJ3*Du!F|lO!l|jzD|x`CzqgvN~-SQ;|&Cr`mM=$D)(M`0v^SX8?&g z{zjActywAFmjw$S^>)N2dox9DB!2*?-p!?!T$YfXydaxlK&#TUWN2Y;FdDI3o0FdBtZ94{C(m;{aB+Bv z>bjCI^@SbiEjeYuDVSlO$B+L`=UR|U6da(x8T^2ir$EV{QbP#DcryIoc-i0f1eybN zn?q+}515%+D6g&O|F13|1mfMRJJ`)RWxT~%VvfQ|aFv7mo3rScXQ@oO@5PzSeE+h2 zceo_nKuTJ6pjHQUNim{t4HTmI9G-X?21dQktd5{5ic(!gD9Qi|GhQyNZ(EKqwadq# zmej4K3s60~8=pBLLo zYqSoRog-I>5&c;X&DGgbkklbhbjCIfD zw^cs}#!^B`uTRUOTF=@TWh;hQxCDqxTGm7j6aD-;?o*k==d2onAwAD$XH2${%k10w@Y4igL;*@Av8K1v)n`ILGzjUdO9!ya^S zZZT)K8QjywpUYqORutUoGMvgUnH$HsRE;q-H2nDCn6k%3jYnd}bwX{QXtVj7cc}FV zvC_nZD(i?%RJ?iVWDNL*jKlr(`S<@Yp|NRL9MH|Eaq8(<3z)CcTiM?!!&9pO1E}W~p9Nk@q;W9^`(u5j zq{K#jyPF(Hj<+Jr!(Se-DWk~o%GFaGUJvhCWq8oVGg01do!T2w?LaGA%o7cm=EZKT z#{S2<)40jPR-Sikq38X`gQlXx$4~>wANXz8Q3o?ZBef}W`bJO9-Y@17nbWW@1%15i z?d{9PyzyH)aH*HK-TiuzHOtm(m6SYb)Fy`hN5c!(x9vUW{i5(FTIp9My`-y#i|#x3 z89o@OdgB!R8n_jTVY>sm^eu<0`86Aw9Q6%RKRv7dEB$hYtTA}>XyG*I%wj>IG9nJgmuLF*Hf;H>1-GY z=tAKaD*b<-O0N-|O2Q$@1Ub~=uMxKh&SJ2m0^o;n0fggQ^Igc=fW5UKa*KEERw|Z$ zFrVw$k-8sq76xm22rU zLfeGedfgzd=G9Pf4z7w8Di^ph+7!XNjPnG0AEmxizSNd~Y{DEjxZfzDQPFctjCf-YiR1Nv+h3k4mS` zyKgM~Rba_|nePK>?g3{>Y7n9=%z(`p^OmbvE)vsvCxH#P$NFO;Xg~Gk-_{vGNgVYg zlks$Fh37mG6qKiB)%Nr9|DIR{{py{bs4uGO>Vf1jZ~A%|{2_`;nph824j_7Bk<~sM z=6FAw>LI>~-QO5P)xEuWEPlD@x_g!D#%P7ykpEf@eF;ZfdULe1i{q;u_$xUK9?;g< z*a)6vubwk#{cl3;%Ry^z_4yP}xvG`ozIgFLREOfATX{V*>c?^s3n;u2@Y~sO(&idF z|1z~$Mf}_YW`ZP8yRjP^8&wH1d$iTEuGH(^gM&516wj`j%{E-&*mYX%-HCVPQXDmc zG>9#+Z?$$-?%eyQzOvo`+GNWbp&JGasVpOXhgh`IM5GOYY@U z6z~$>1s_*++S}xHt6aRU^=@ygjMk!~;nytS9YD&THz@kpRa7yzWEZJ2H;aKEG{b2d zy!c&hVzyfaAzo4Zq*k7mLB#*$3sg;MI!gR!@dBM zmB&AynGA*UA;gkWQ_c8M#o{9{wG;^?PsqMHYur+6mRvD&mr)YWB<0J zjg5Sa^JXK;vRV*z_rOG&8@)NrFI+0+be%uatwbEIFS}PUkMybaGqcq;&v}lHTp;;f zU-{k;6|~)8ns3&M3tICp6Dteo&wq8$6BYEWzyd!LdA*iY=njJGaSo2v?+wSJG${WB z{a1brrl|w7T>Lw+2L6%HEm3}NXqohC^hI+)zFy>w&q!2t*z`^K{k5PG*IWI1zv>!tG`kMc8?Q{J zZ!zVk-+^lb{hS?R4VZm1R!1(L_qf)?U_5rmW-|s{ng!o@OTiqxm-zBd=+M}9R!HCh zUl8Iy9U``JsJ(oP5{K2Qwc34RmHU80vP<#PXU9dJ5JxML^8HqRH0wR2TQUkgbAE-| zWFv&7KJyL%;eveVMzM5$Ic?njXv%f6n9y}nrj>gQsTsb}$|-YgCC|&1`P_iUyLLtB zay(9RBMPYt<>~D(&H|adWX5Zw9HYzcgOCD*W%o(9qNV``031RxR=-?GOZ`?Z>5-RX zy5zv$TwMCAeYdt*0>E@{?BJod?lT>Y^kpb(G^__DSJ86@dav($Hi~gly&WnUBgU)NT2g?7bSBwVJiS zwL`Ya=;nV%<^DNka&%prnaMp*E2}14q91h8i+c@mUy_o-kC!){whGv7$N^*&dEYwG zD78%-FlE$6#6Ab^S>{Kj7~E3oTEHOiLUS2|xFyJ@YCnY6rMOkxG2q~2?ii=)!lXKR zSemR#9&*GYshbI1MfvnAidU{|bb*Wu+?0i-^yS30#&`|vzZ^+|J-6S*P74X$Ww6&t zzs}@c#wGc}YxS$t=8%AO$|*!}5V>k15Tq7ri&+@i^2z_M1D6E+_Lx1!z=lp4bS>%l z#fuE+dC?pwIlkE{fmPf3BM;yF6lLQxrR_cDHUBsJ&U|NJYbW?z!}05*AfMv3@zMM_ zG!q360zS-0g&=xpq^aKHd$Sm$!)1VNp15Okm7L<)SxcuO6Cd{5MXCRKK4p=UviWKE zAs=A37F7`|$6k%3qG;wus*AJSgYfbcdXiD2j-c=CsAs3jUV~8W!t@;?@FwxH)|IVa zldEYYRK@DA5u2A&M}$;qOdZc@-QC?_KfIn)Mk4oa<9s$j;xggcsj)8)y#ERCO^ad) zy2w+T!7|RT`dGGKtpOQeN!rt6Y-q(7$J4N|g`Z~TuCgyYwhGWnknl11ZoL4W(l3|k zPplEN~8>w4aOE)FFSt z$lFnc#!if$ZIUZ&BB36F8`0&sq5TYg1YXc-m7mD;&gu82Nz)gKmzLrU)3zBWSz+=i ziu7G`m{=`hkmuCJEhdqh*bA_Ql#@nkKQ=ra0-v`{@CQDXZ~CxOU{NA_jef9C++Uou z(PG_L@$9mybhy59oNjVpw`>eNViZ%_>Ci1`Y=t>e0%k9!k?5~j-J?}(a- z7d^(eTpJ2+9(u)28ai(t>}~PNJopZdzU$APTXgCK{eR~PxB`dj&i>uTn|bU``Gvpy zCiW-rt4bp-+ZnvnE)<<0A8GR_+I;PP zkRR|@4%7?aTt$qymZcsX(5ZO(E|$KB?U^|=*O%J@t>!|ggN>Oe4@2!11nr08S#xPQqjFA@CKoNW=D)6;F$nZ`tXlo0zSNTWYKQ34@AvBaUnZ}K$@ z$j3YYZvAW2qaBoRhYJ`23jm=f9ZTEJQ676~vWb!M4z>lCBNilwAoQ~F@ATI4;SR0p z{_nrlC|~Oq=blKesc*h*VZiB(4UkPzNKn?*zo>8-^Btsdfs|fs(C$>2d+pv_4mQXS zJoNRYZ<()v5u)}IP!=_QrPWjr<2T+L1#M1+3Dpwft>Q0P{aSrBB0#I>ByKO6WA{Sj zK14+&yFfQUNhiU>`ny4BRQfp3ve4D;94qp!25h9(cN* z!vD-@prZhE3LKhB)B(}k9O>CnoRd&P^up~Q%xjQI=GJ5!axdJNgT7oA%dMj3qN$6ZNh zvhvZhL60gnb;P?W+bv!z!F1uGaEnW~6BV5Y;>>OKmXUPI;c3emh`i`lLmHR|L4n8o z;fn?i<(plN^5B-6&g$*r_QR@H9={&qv%j}BkJ`9{1b9F`5T~GB@(%u+uyjJ&pziP| zXJhVw)ZleVr>guOMh#R_I8ZJCTR;u23GS^p!ms=9_#GaaX~$X`X&L;%O}y;=sF-@? z{#;<)VTg3-vXS#)zk|=QfTx+DkL5(E zQWm6T@oGHOkfC zOCmzZFP^%@M4!#Wgq&DW&zGTIw)1)3gBZ1wXI`_^Bp0+XSi4i2dsA(rYJy4(gC9Iv z>b&_$zZ>%hc&S^2-v@sYg+9Y&kTdw1(;qhcuZ9)rh2)Ey5pbOdSb34 znVf*y3v~~49xEIBn(S6N70RbUS@@gO$$Enjbix}X?c@GyTPq+d2US%YpRLygirh0A zaB}lq_?69ej7gF*=yz{{q#d9OwWB+p^PPwO3fPl(Hm)sXVM0LO;ZHeWp$P%2UBF43 z75FUW=Fqz6d+@f8hws`zd2D;w8D~Icv?#W31-)X&Voh6JXz59i^Y=)FH) z@N2OR>sK*k3vgX}McTJFUKi_aL%-LD^0A44-7V|l2t973w$V6R;YpaO-p-ce(Nf#9 z_tRQ=5AXtz8Mzc|^6DE|I0gH+-my7m4}xuQ`t)$zT3DtL9-GABSm((EOW1jQzVwCm}g5J!}=P5o1g8MpD$&^c}sG= zM+R2AdkkysiIeyW)H|7h+B;DAd*-LN-B`(=CJ}$h-vL;31orb4yABC`cPF3zydRFV zZAX#D-*Ur}8YAI^_DozIY5vbyKEC9brIXVfwoX)%d{B(^hoBgc@@*uKZ<`-xjvn_P zgX^A%{-F|E;Cg)v^)d z6FL7OEH?00_YNm*r+xtkszg`t3jsn?-Z4BenF{lIHp1-ODuUhs*HNN-Rn zA7W$wtNzMwjAg4!EL%A=g)baNr@Cn9->H6TGwS0Shf54&HwXWG3XjSaFraQQ2>{ z^lRh4>PlD{aVaqXAlo8H6;Sjw`u-s>1`rtvNXlz>rz4KT4}yVxah-*4EiOW+6WSIQ zR8O#@`+poYJn2+20L{o9WppgvS8`l~!tuY90)Ef{!XGEfhtzo;H5-pWnIe`f`hVb6OFlmmecvp>9T^O*nHoIZo?=S&rJJ_bq> z)3mzp)+wT-q6qj21-qZ6mq`T3nz*6JMkX|c#ISst7n%(TE%pFNRc%t4zN>`hb+dE6hQP!l$u9sSMxo_`LbrpusprZ%rGe+;q@YCVaZq@6L?gW`7c&8{Z@~E?CRuh zyW&B0#UOI%W#C(%y8>uiSU9MN8mxXct8zohf7}^I8O%M=(5W*B?)< zrQL(>=*OAq>3@Emf8vp>rb}ct*i_A2Af3V=!PJLP$*5(Jgx2nlZGU`vn7TC!q15*N z93pWb+km^d_NDYpIE`q66S9yQrI8dc2Wm#Jtxe$IT5X$mz%z_>t{B`GqD_F8hu?1U zR35U&1nvwuVhpz0>t|Mt)D~xFi?{Cx1-{JP=Bp+QSdm9X=iMGh9;1)k>oq_nT(Wuv z%){n+{Sp7w(u86z$r+)R=sji;py4DVxS4=W=hDqxxBzA&pA6*Z#>xAi?UEpyx?#O$ z6V>eT#G<-3TT9=Edzza`3L-(z_WNGMjAR{`#JdvWzRhvOE zSrA~V#rcKKo%5T&KH&@M5KytbesD-Y8gWQ=bq9cA-wuGH|Ba0A`~qIt2K77Ko#jUX zveCqddc|_bes7cTh%Wl9{SX&Ifr2|UEbI^ru3jM|e|ATWhDFl7`r0mT}vL(AnvO1!y?3JCkGP2J)H-2xwzdg?9-uHRG zU+>rR`Fg#cPe3)Z!$CZl_O4(51nMPCrR%xG2Y@33drZxZ0e{T zTqY!N4zrZJbDGI9XC+a`dEu}rdX>_9)4BXTQNyyW*5 z_0MZ>|AkY3O$-ggmJhiyq^)FHv+6?Sh};^FK25p+Bs=f@$>s)g03KX-0E&7ftbEiHcVgzad?blPaqi9F3;r?GF5JGII_=psdjeO;oAVr=Djxe4Pg~aB_#ox1Pd@>G zCj@NQgd3|}3k!Zq`qKbt0TyLbfb@(0GEeK^$&cX*Skf-xL|w872nd@3lr8!pq-$T6 z!i^h5;9h&RVexqGfCBE9BK^?k?0~8qNHtLpTrCwa>Noxeh^ymGY6jr*#ksAhTWIo+GfIEaBAm{5}tNY(^95&46ciwB;FtkS?W~xci&Mu z%$jKaQ|UcRZ5r+{es}d8^(3zJ0xa_WURzgV@#T(rQs2Cxi3zCLkipmM&u*Ynj&*ts z;ajaR5b$zYK?QFKo&+8pR6pzA1Sl7D7g+JaAmn+_DQ7==1O zo*4u#k+4;Z+{WGhrUD0lK=6T|y$A5LgkB`Y+^?07TiX5*M?J|5r?DAmMdXrR- zK2|z;p@4&bIuz%;aiQr^cS@7t9Vl_;uX;V;9pCs*t#pf7;7W&aQO0Cp{VQs)_j}`Z z$j21x&d$yORgZ&1WDG5N1H@CmzJC84v%tqZ5j7_F!fgfIkV39eB|Kwc>Z~@EG+35Q5C+|AFBb zJ^U_MDUyOmegPbQ^RH)2Uz$GkyMw#!%W7Gf=5W`bR< zuvW=U7D0z~i(+9{1FVf9L~|0Uxik0ijd?NUZ!4g~0I?Q~H?VdL9J3yPBbhHlE>ANFIZ}48 z0)ZX$CsTkR_5PLrlD`q)`IGmNH9QZ7wVrw%7t4dol| zFt)u9;`Uu|;6Q>c?=fCcY=EVJjHwaiRjIj_U|qmL&9NIujw7lQ=IGbBPDO$ivTR!R zvM8h*7bZt>eBk2d03$w~zR=(vJ_pkltHW!qJ zm%dUPwAWr0%XMd1F3*WhOx^3}>Vx?`O@vBMdsMezuQrr_V*=W*&z9$Vp8T8JXSxG4 z(4}9&MC!6(t$Z(VPP?>vuV9v)=r~AY(YIwB6X-hSW{Sfl?4xsgBfrPAH$n=SSnzg&HLuUAuMH^+`t3H=v)9YoEkFVI-Ej9Ni z@7uI5+n|lqL1(K1r%Xm}^LT)~+7Tj*=Yr&x_*;%80UMmH=%(m}V`MzSO)UI!7{?-)aK|^I!eK|I2{$`Ex0}{>Y`a_3rJAtqd|)9{ zIV-pwvJ;Nt-g`p4M5$wQG?Dr61Qp>&Z&%yaC`?Gvu{*F1j0j(3W_dKuy+&Es$FscP zJZ2Tm(er%&2d#xX{Q9W~4S>zEZ*HInKN@k` zJh6mDKXklt^5T|#(qZc1&hI;O3!fWD=zk+kI4oqSwj5nFfjGIE_w zeWNstQ2OBeF07?PFTF4h2%Ca`XyhYHkLbHx@^+jodN3jWSGJCBkmL#^fjR++C@4wq zCdKI^L;G=hM{okyRX9fs1sO&sM?_+J3D+g>L)ATV^kagW%IZA!lOBJWQb5v-O=J!s zWye#5Dk3JZ1e2IA9GljofnX$bAyfH3pr5*Wp*KLQ>)q+y(WNxvsol0PXO4s&L7UfS zr_6EQ;}7EBhvfr(=2+TRsY*QLmg_mx`>aRj2Rn;x%$_3m1m~0$F2PMl3wdq0hy78q zp(Yn+vDFAiGm*MiDE(WdaOJ985^|68((o>d4M-P1hA<#JYP=ZF;UQAA1{>d8iFyzm zeVRdH*55ClDzf51&=)$_RBiJYfq(@u8S+st6?WQ)L#Ko>yr{E#0TqbnIl7R45n)oc zGx9^~qaJG;+eaG718YTI5m&An4Ik$Q^!Ovh_;1x6tzNi#U>w{pQX@$H!|`+2!6#Y> zFjA26k)(fot+tD2OWJm%%7%GFg}4tXsrnh>2m~HK#?$77HmE>8SZG#hVY%~HJf04X zl0Cw@viy8&9Q$tka~fsLZroTJ-yvxsgX+71DG#ap+p{!!lgcY;wok+xjJ&rxvCYiR zsfg9k6)_~3*5J{FWakY)pQEyyt^ zo(A%2YF*2JY30mblH{b6mum*F{jBhPr%IeEF~@d6%&$^y3~8aWrZ;h!PDxj7+4WIs z7K<(^9ZZElY;|aU#eiuf*-Ti#`E0wqY}zgdS=^oCmlI;OUZ?Z^qlBdi1#H}6Y!EGHKFuHk2{;u!1Jq}+AXOo#g#T=s4^ln z&j<_;R09M|^;duxLMUbrQG3Q+R&lb| ziNE!69L|&vC~H5aW5C23q96hzcnJ{1A$EwQw zbp30kY~TabX~%-dJoPq;7myM>c_OHq2&cmcYrRn3R9;MmK~Fl`N;*8%cFJ*1X|4Ga z^GBKzx(N%h4Y5xvdWp3JI<$7rZ_44B?h(G5TiyN6($8Pe%h%9!I=*gH@^Y{6M7m_r3Juo8-36mNp=vC=_U95xz4V9D!c? zt1uANv7PD-j_1cR?6fx{_f^}E;ytuAVMqW1op!2Q+Mb}8ZdWD5Pc=^NW?vN_M&4^~ z>*{*nXrkg_6?SywL?c*aUAu3-aUOfNyV$HJbv2h`c&aek;zy?JrKJDdqViuQ*w!pn zwd_wd5jI9fM$XB6cOHq}#0bm>51e#4{Qh=EZfx#GmzBFr$?=GtXG7$-*jsCdS=+Z& z+435!9`96_`a^&|%f_jh9$7b8<(V0t5Oc@k%STS`(Qh2T@AvdZEm4w2S?C&$dmn6d zn~}bpabg;qW{(CFwf5nrQj3Mm z{92NnY(Nl@;DqGf?S?*|-N&5U8}`=Z_cBgzE7AXLVe1 zV7tJ8aeah`6xcf??dHGU<$foM42?c0$AW1y< zaQJc={zx7gT;Xf-KINh6pSv^X#FSC645yk*C{XgJZk_}G5!7V znV)T2Hm4Uh{H#pp?RqNRC4BcQLIk~o!QRbm`kT`fO^Llk)H%lq!JX}aGH;&A&ZYZ& z#>3-}VjA`#dCDS|HM)Z;f#osxdjF@YwX`fWP`(9wo?=4IS<*^Qktud3rgBRRsB}&D z&Ymt!k2G&8DZ?zq(&t&d0X0Dox|I{iD){3q&G@60JK6IW zdKh`ljI_hzmlslsr}maj(9`(pjXm3R=QY*r8~AGGidyM=(x9; z4Iwuht;=^^g)cdXt(2Y|7iu)G0?B}J-d}G;pCOwx1;TVYK`N(()}N6{UB&^&5{@By8~6a> zzE<>jjZXKacQS-lG>6{wEp$i~ zA;}a{x$%#B2=wYcbE>ji8cro1@g}a0v!o@Zc&yda){VVAOeodM37SMuG z&##1_SLlanwG5bsa9TmXWy@s&_^YD0y**(9iY95w?A6)RRy2Z0pVc1x?tUcu42NNO zaQLnBGptuLiMbWw)Tl&Yw!sN0%!6wa5-%Oju~=+aV!j1Jl4CaH7asOqHKtkOzkC62 zOLEW3&c66r@;&bxR~Awy*2>boF88j)ptZWN<&|*LV)mZ87n&o~NXX-Ob;5xqfHQYz z1i%{w8s}{>8q|tX$eIc!3;x8>L?OcypBedZ-z?+LL{ZN+{-fpy`X33+16jRyo_MTG zLK?sN4A&qe{s!B(WQFLe+wLj--zW@Mi6>?NMPNRbgBbVgCk7}X$e z@*Xn0pJp=+_)5HZy$ADmDK)B)+Dc?iD_%-tp{W-E-?6I&W z^&N&b@3v|e^xwZtHyZZqug;ko(rC_@K+>=}G^Xqf2<(MF5NR>UlR=rXbmCz;#7T>q zu}C>R1TR}KsK0H$LVs-%erFZozsX=4S<@$|!j@$7M;Mn;F=|4IQt-Q_!2={at-U5K z`t02bjskf4GI|b9<($C26YItsqMZKV|#hL=R8b{-;@+!+osnlwq845pnb zD5h(n(_WPZhr#kBIOs!fNaY~6bAEYpQzGHmLhDvSKTMpBuUkF*W+XzcaIL2@wKSzV zlX}05#K=;eq#@XP0d@2R6^HaiWx|KuI>7D777X5z@fX+kN-BtuXPLflD;STLs4*RM zJHr^wb4@Gap`|7RjTr%;sC}4~{T91EVB*=)I(G z3uu+k*mmL{Sm*>0!-zdhjiUT_RupkpSmEq1BPr9lD~r=K;t_rgdD>?TA!#4Jq|+jR zE6(?(ZVp^hzdPkrezP3@N{m;kpXa4x*LwfO3)j!5+D+F5QkMf!=GYbk zYgYW?^+DS`UU1+KG_Az`SUk}68s5}z9Ep!tD4g4I*-1_x5U*s)2_Cj6ZOn^HA% z_+;hy4qcYqb(7;ybuc&iv%%C_X3KOy#sh6aO+wq91P=HT+o*3-zY@60nip!i{jCIfvk@w;0%m zHOfzMYMjpRftm+(>uikXvR0fbVjhf?NEydT$W8HS-Ve^7wYJeuSIrCIJt`BqN}C4W zt*ev`3d??}oUuz4`rc2e>cvyn=DGK~$5jVpVh z1j2(=)Tsnj#oTndOzPBcI%In;a5u&DoD|vu;J?n8L{KunEzkEx!GSiHAJFZ0U+9AY zzp~{?oO}C>pc;c99sOveL|lJRc7e*XF8;C!LmfBcv{9aroA+lQ5HSyaKoGCMy#RS` zHiHy}ZmIt^0r!2ME+i?MS1_^8hDObSMvyLI1=Nj%)*aQLNy!jhU5Ma#222ZZQ{K)5 z0qtd(7#->@Es`EM!~zTuUmzL05}i|X-qGoKPpFlC(v++mD6)A#bce7MNv4DJKrQ!b z%QP8esMqUTSVXvS~K)U%9pe=zfnD7#$uG@rR*kq5BDoH0GX+6OaG0??k zNRh}b$0^2|e!AJkeT8aOr8E4I1GZ%8T)C~1}TFU%^=iaqCj5?LN?CVISbGKi% z?%bBz|167ck(2VW)%x_z?ab}hOtMeK_+ue)FKD5jl}^^r`bpDyCVu@nhC)`Aqf?|n zUU~Pm9K+&LYUco)i%C=0*SmF+O!ddFd%iJSerO=nisR%(_V@|0{O{w3>%q*x$=@*5 zXPkZp{P$aqOHr24+W+^@_SD?}_n+zlL%{!ClB44VAxGMD8Bm4Le23R7g4g z)u81j<*O7^eU$`JKwh z-yl)nPgCnyoukYMW6U7+l7GbLkJ~z33jQP@_I_zCe7}4C$mg)O&xLR>#Q6 zZ)%Tr9ljx9HDjR;#yN%RWFN#|2a8ej%l0AdqT(LwHuQy&;X0ecDiY`8lXTY}?^7Lm z?#DG23?^B;woj@LWj?Z#`*s4H z(mOfh>~jhg$X>b#2vI&VhT}a}32QH>_0Z;?7FEkD`S11%Pj-3?SLfavm|Qcd85Aez zR)^ON08g9*D-vlZQ7xcYTQv;4C=0r67m+pf~sGk5&%7X-96E0!S+M_*dd z%R<0&NPKmyF(Y{?F_f!*FbSVGk93<65eF9%wmy9SnB1LPgn#O(g1xq(^|1S^sh$tD zXA81mmuMPF03w@+H1fLFyKl%lZ65?HO|f@{ zhulW9jA?eyZc-ps(>WNmH&lwG8b_W}9%iv|7oH#dt7##s~m1U=X2k+}`&HfOb_X!O!W(&gJ?sts?Ps>OnrV&s5iU8V9LEu_2T3(>js z%k*tYA#A%FxmCt3OSz9?*}dn*+~@CROPQ3Ske?reYhVbe@)Vr1@COn3_VFRBTRd5I zNwT$7*(|HRzkyZF*Xi`snn4t2PU`ffgdU0r+*;PG zdq>e^szy}pmOgAJ4tz<=msLsG zQ!rFHC;Km1@KutRK*Dpap!hHK-I$46b4}y4K(An0zH)?ngX{ zI!fVn+ug5`HQ>*p2Xr$^^L08ZNV_VE(9G6IuB#8w2$Y=!WW9Jt`^2R*D>PPnL( z^SY=NLg+4+f?mI)wkke_lnqBqU1QpA4n0}J1J-426N@iv`sSROQjh;h&^#oG_3-6O z;;!sK;y^*Zb$e)!JWwgltX^6Pba5h-^Q%SAg`eSHTbs#735QYwpKvThGaS1U86z?q zUK#9=iep6m8(O|nG0cZ01a7L6(OhM)5f;4K7B*bbv6N3&r04;o)&1{ z9lzj{a!|cJ5BSvuuARNu1OQhD4Q)Oil8)?%)L6IiFyqO#rnkRk`Xo`iLJJmKnTP&YE8R(XJckJdWms-qoTrsNA;286K?(q z&08UfE1cnH3X&$SL(6|COJ?ogN~f1f`XG$i^p9VCPrtgV}^N0rVbz|=7{&pM*fr{7AXA#2Mm zal{@cpdo4`SK~cO>so`yUTw)m(rRK7y>dYt>0KnG&@{F6jz*MD6&~f`P@MxWhKWd} z9_(MLm&D4R+%YP9;k|j^Dr0KqX^Gs(*R5ZQB0~wi&$A^qH$wZ69Xmytgsc?{1^xM^ zYV()APwX{=oYaEQUj0!w@)0?{PnHn^&wxCGa}A$`ZI=0Z^Nip71aR0?mjsJA^U`sX ztP}QRZJBh&s6L0@X)0#Fv+^zWT#PW)8lF1F~^jiUH|yb)SQ#X5A<_$ z(_`Yf%wdkN>SndNWP>ZZE1I@T5S^;Nh0GcL(09vnC{-Me@E>?AOyv&$lt* z&5i==CLa0n>C7vm!my@40k}UGT2eHOC?-<+$@#Q0cWDu^S5X{Lp`a<8vWc`VZHwq8 zRu{aR?8)6^E>Ka-q};TO-n&at3A-1Q@k`p>8~^wY291tdUIxrMw<86LZ8g*}csYa9N)q8p z$d4DF>m9)M1DgOv6-vk)!&o*T_;}`NTqRs1v^ApwU?~PKDebEoq zwRf2@S4hc>d+OR8OEq)z>Qxh3l~Je$4$HQ7(S6R)uO$1j`t5ads;#V$?=o2kKpzjp zXuOPfm)a;4%j+rk6$ zor72iiUcv^0G{B%#VrS}0pFJ^b7wQOp!!_gIg@AaoAU3p^TT7z(!E;W)PH6G?@)X~ zH&_XCeFf$2S3{$39#bxE!gE(uhx^40cMvQ7V>ZeL%eu*R3T@Zbf2)!h$Mh8vG-qaC zYcEMP#BJ(Q-1~(s5&t~$Aec1>5$kYXvCr_dAlhJ zhUU1_;VpFzp$DC8h9&1Wv0i6{J;WhKZa~FKYRqm=h@)lJ420iF@X;fm7RkuItgc9f zZwWbKQx=8b0R;bd0QI_i`U37Vnbg^ob(LDo9Y4Q!JNrRQV6xP4!#BZz7ZMZKUD+qM zf+gqb?_i_0992oR)?tX0TAW{7rr)gj{5NBzlmqG00a)QOaY)V@OO-;N(XeCZQ6Jr* zJ9j&cBIfI=`V}7lwZf9+<|gt`>8f}{q^f5}%{PRe`USFlyX2o-+GDOVzd-cbGfb_; zLE>}8aqo8qCw95nNU{;=oGSzv%QeEGG12dSA@4G)82?eX_xg)#zV##h?F_`kvIP<)DGiN1Uy~!X2HQ*ROiadK zc4?;>59dvZ<)tw2E!%u4vcy*YOB_(@sBY_hYqU<&B_`0>toT$Oa{~@UU*uD(n7rOG z7AfZ&Y@bxlxpz~qywmKaGB2tUe)W7y|DBQQko&0*M&vO~)DMeMoB=?5Sq_Gp$e0K& zkos5y63I`$p-UrEA+z2#ihVB_cM*Ggc&Ec+R&;_5690)zNJFES5gW_7$y?ejvPUZp zmPIf2cLjXuzUef3-TAVfBeXNV2gKgJWl~;htn$U1DXbf%ASV^8p7uJ5{ZHmCQD&V4v=nN7z=6Qii5 zG8x-;cm3@XatG)w8z)2-c$252#zk3+$kHouHVA!iinxU!sP6XRN|q+8za<)xDuqdL zglJ#&J}?C@ zNIU{hY?ATh#1vu(8w6);+@wqdzH*T6JmvCNaYERydG-;Fr6KM=r~o&&%uuSI-PtbU41CYEwsq@yiCK zCn#TJ_~OWv@%TdN*hazy|8Sw1go}I+^d{cHS~^-@ILpa(RWsG_qAs(fnOmi(-!Xv$ z_KQ;)KDT1JVW>j9t5#Hv5c2lOD`hPVnnsFgyYeI+CL!NeGmla(0&giC0#@<%U>^DV zo`^VZGw%p%J0JaM2f>PqCa^zs!Xy$hQMa}2?N1hb@xr6AdYF6D2kCSXYRS;E?gfOW+gogB30&?WN z#i7vor~OEpUkSCy^H1w3=DX`zO{v-&7f4^&WDVc%bhVnZHtzp~{04|rVf>{RMFQ&0 zC*MUg>a~3gyb}}?%jta?o*Fnh5d#Bawr$akP>d6pnNUnl6a=$aDnR$x{~C6X`4+Om}Rqt6|_NW6`m2U$FHZf8WAe5wwqX_@rB(t1ej7X&;pJmB^o^@(S5A83QQM$@O+ zr;nKMmx4C?A;*z5%!k6in$X;Ic>-oM(kIwpQTI1lRypI>Xl1pwzhB0YX?Zg`oRCYg zc(kt_gx-%tKPT!bQ#7`s)9~3{lk!w_;MyL`imo+i+f-LA8)BJ^y(dl);~J1A59bDY zH~#|Rdt>G*6S~n%049CG++@n;j6YAXtR_ENuoQyng^K9EtV;SapXjb*<5s%{najQI zYnVYbR*c%|*;#{aZUI51uuHhf#O=g8Lg+~-f91qxkLu`Q;3C~4y}#@?>7U?7+EZuh%H#?svz=qo@i5?ydp_|b^~ibaT`az+Gg8@=Q3p7 zJpU$w;^0Y&(Bt!Ar}7oR(~g`$3td17+^IGV)L&ugQTaudLS3ML^pNx>f`9Ads}Fl4 zW$cFc4@1)FA9M)wNWXbBP4r(goc|a?nc{E!_7q2RR8>)3P@gijxR)qO7B+O56;j- z=HnvvAD2+MfBKiU$7`C&fnAc3e@cCgiLQd%+SPyo(Yr{MS*!@XnwixCJy_inYCZx* z^^8y>dHy~c{IvHmu<@@9A1L*rA+N?EtNN#utkF|k(429?%>{AN>#TC^NJzh#Ar;Y_ z6XS2%{7*u|W?gkSEV!Gz6CZSWX-e`! z<0xBLIPvlB#^-~NjH!LYWU(BIg=Z;3#+Au&FdMzdsX83ZKkaAEedOhpcu|_x0I_9V zqPCmk?A6)%?wohR!N2s~pT#vqJ@eUbc($;qP&v#XYq!G!#C)#`2g_P`3o++K+0&g% zF#BtgO2*K;z2+o!IQU^uYtyZnqthX18_@qg{m1-3IX-g~2G#a2j{bvMs0@G@`1^@$ z=S=`(w5b}q@Kyi_)!Ro5#DJGI$GQI)Uu2Zog{Kb5sTq=8Lp`d`PzgfDtx=>m6Ma`Q zXC4wG2@hEnpF5M#Pk<}ULEMreCCY!j*1{v4L#+aZP8it`lyo|j;6M8WL;n|&+sO)m zXpQXHY==4URvtN$4mN$717$`81oj_%v(3VJR!)lkBP*Z5`~~rCQnVlWu1knoC+ccy z#WJ3dg{*o*PPm$ypS#Y$x37ZJ2+KF2qF&_C>Ah~VjLvY2e)P}KhU?BJX36`%WL7Nv z^&{Gv+e3(7#aMi^zgV6oV*`;G^j+7bD9G!S{V>og)T)NKeWeiOyYH+xzT1aRoYx~p zkJp*hN2IL9xLCoDgX{#z18-}Lqf32#n)%Mz47AFU-BU8iQ4t%b?E8`99zU*)^!|m$ z8#fh3s(tJui!qRL)5(qP7L&W0TD7XAb_CcJtL6KqZD0^{#7e67n~K)1HI-day+_#p zh^JTIs&ouwst4;*c+EBO^=1dGgIR>J>CeHvrhcYF${WAn`B2Lhx9PAa+W8XD0igfBIe`a+66fAzo;mros`9uaT#oE+ zvEWA%tGT_|-iJJ&IQ$BQufds{;d(UF=Tatj|NXlBq4J>FYu}% zM&rky-CNF6tVl$^1lM!Y8~cN}lSPq;qD`(SRm~(HQMrNwpSJcciQC4uub(z`a(&Q< zlI_OY2ncr6LqylHaw6B*a883RzvVazmCY4b2zeSL$0o8!o8D*EW^-kVBF_;A#vyV^ zmz{1mI!BZ|PctHokDI?Sv*L|eS!Lbh-lcALncg%rN9(VDnG(H0_reF$(0O#fmx*#2 zNlrJUVhwcEnelnI2^cUo@Pe7|e~rbe>k$MBjdtn^I+1V_@XkhiyOS2w+$?g*Z^Vx2 zzB{FLvf>}ut?W?;p(wKO$mN~}&C`Bd^&jj%Ox7d-3Ka7WYoi4nDCqTlEL>#HHc9@y%^f zxRFbi-mDZlVZd${Yeaf8g#N8XM@-7M$RohOaMc;U`S}J)mlFKJOPS^KFU%M-usL+q zIDYUbdTC2{`}%xbpvL2w%rb_k;7%{TNJlf3~rbe2Q@o)q(tD)%Uw^N;s2@} zULfWkZ}Mb74H1+`==^bW>0mbP&j?g3=PTJNmDu?1O6A3@)y#C)iy-vpY#H3 z^tLXP-D%+Z54{1IgZqBL(!8j5(pnbxdtj5&?B0(z#L?Pi@J9Q4>Vdh zG2h@wF0**bL3>=L4P-tBv22M#!bZ=NmTcL|3TY1inY#6Lk8N}mVdJCK`9x#hjlB|21PB848OT=PNhybgZ$P!T7x~p+$$Qer2ym|$jIor)2m*`~lT;0< z2J`cHL*CXlwZ@}%(P&%lYNbJ!-G1b%cx}TbUs**UNE3P#*SC;7l|dH=$TRM#bn?8~ z&nqms`g^k~tW-Xi&4m2d^|v-WRL+sypqN5h8OE=ydtKG;J%-!q*LlMWzoB(OUO+%r zucjhoS;J3db;^Fe8dCyMk(R^Q084XlKgEqxG%*JQ?>zM_kHHmjBJ>Q?4ajsAgM9wP z>GeNwT1eJ`gSKs+9KhDO=fJ-IV|X-=%_5&>$?DgFgSZd03e zXxCIihnk^NrR((h*XE*gZqSFyFg{HkVflPV?7qP~$Hq_ESNG4nVc79Wn`Il37=2+Z28oroJR?B`UiG}gX)X0c^4J9V@Mc8Az{V*T$ zPMT z2N(L#nMU=0AB{sQb1MBvb&{^cy7fO5FGYEc^T7NBi5%P3-rI^kC8YyaucEJ}DsCNR zp_e0Vpr1blA2L&&i9_d%QP(P1NBMMXpw)h1;-`@y2p#Grh~u&p?-`$;Vuv|Hb>`!> zw|Km&W906u`KWq#0&FQf{aNGe!sv%VI)*M*zn39tgsZ7F%ysW!sH$u8oIuEAkz?SO zjqU-;Q+YrLqIkcFZF2m>_VhvQr+<*0XUJ?Kkqgx4Zs0y($NTV30~w|xG`XU0%COd{vYig$G~wG_oWMTUxN48F zL|y%%Hw_;B9Nb3#o%gU*_K`g9oE-pPKR?4)3Ja(F6eh0g_!>7t%NT=}I#-S2G_d){ z;^LmivGR@lk}T_0vL^YXN&A-9b5&`V)3m9JvZM>yAU}dQoyI_idN_aj&>kTTHE<`z zGIuqR$6sQWP}$`D?4v{{4CNBwe&r@0a=!MO!Vw^n3~0bH(M5S~3^BLz!5^Mc`L9YD z!lwR0mnNY}trK+_MVJ79v;$Z^Er16!&S2zFkFEjhe8KOX zQ|=ueYTq&|h(I?=F{Pgjd~n6*PyQ7TO{uA=cKl@}?6S5r@RSa7cHh%xfbY?kt5>wl zA*kyvujq3Dv)1kxvC?TfMtt+T`crMli_h zsFb1}zMd=mUi2{^Xxxk3T(FUHGMMBxaOMdolGPT0_I~=jdM<0wV|0y&zQq>DZj*W>*oU*5k40Xu{26mJi}kBzF;g*9JFl>FpH*~C*|OZAgt zRI|{!Gu?G=IKPi$fL8l!p}_&$;$%I~=L!Dol=q-7F(0Sy22XY;3p&DoC6+*zMxJqC z;z~W;9k1^+R3|&{f)xBK?F+h~Nb7L&e1SG)6KBg16Odhmr^{$Xl2`chIa49&SW{BH zJ3xEJlGGqFxD^+`}c#Li2)c1n%5 z0mP=dAC4M^8(jz-)>bWa*vbD+5H5_Gz^mY{t=~a`&Bt4FH93Zqy2K^ITA0CwF36Ao zCWi;gwLK2wHz~h)Qy}^;-7XyT)54v7?90yLrWMDFTQiYHsc%eu?U?jD@ovJfq8qh# z^#~bk#wq_5G_^tBobz_SFYvqvCtQ|9zEfvLN8NmYUVNGw8B_?Pvtx>+R3*xx4x*|K zwEgO(@iWM^SK=fA7dV9FDA4#|+=_pk5*WmxGIy!Z`_arm1?m@ltD~cviB@nP*#`de z#iG_8hy^XK$CK=^CxC)8a;ANI6mWOGP27>zCEi^@(_tdQDt-Gnsi%hGonl%pU=%qK()!!9BG8JJ{w5u;;)??(hp9ucqd8 z+%bhTJUSb+^hAk)*v^X*gfwt;wV1xlvq7#JfwjDYQrQ-$w-5vnTzBSgOzCygYb};m zBzgUl*==Y|m=Xl*5L(BHUfgfTdIVolBU0mlPXZca_zQt4WdBXSqbS{n>txa~ux847 z2BkJ%Nzpo&YlidPMR-z$zTg8=?gM~QWRUYRJn?NvvjU(h_Qu!d!S4~xRR;gIAU*gF zSo1SOar;4HUXt)vbf32dq`xT2s#$2Rh6DZqe-Z!aL)T#szQM8v1a*>OEY>IiX&5BD zdT90r5EBQyhkzRxG<^kgrBD;6E%uq}>5kWGeIb`}YK`Hq*$|;sRiLJCAkW ze*^9ZuSZ8KZ*tL29aNu}Ux7nhSEnTo>E$WceiByrP{VKT4oedN5mKw@aH1xqx(})Q zn78#RA$|ztoN{_p*OKWETc}(=&Y!k|Y*|HA03I_vEpp>)d|Ex19yxftJ#mP5gnU)} zb3`tWnhW%i{vnALR_;+%sKN$306&zZqdjW&)@6V_F%L@F81WRC!a;UyN`*qwk zJ~+$S3Y>}pW=LE4383QyXQBA!63wTZW+4q5`S8G75Y$$ON6`DY*ujA(r62Wt?F~Cm z>9=?0txouiZ1svYh!eTNQPGRD`()53tIp8vPS(uQ0sRIP`IS@4x?XgcFU7FkAM)W` z=yseeKXO=;)E>j!@^?w!#3!TgHj@EkHX69ClEPCtZ+xBn+l%RG*OKOq)CZHisX1sx zcT(F^;UI@h=;;h{dcqMfXUi1H0lnCS`A{BRL+nR>e(@~aobxc$N~g{%|DAAYKEqrx z#Fx{jI#SMuR^*x4`Z_~b{@2{cK_FCl3V_v}a3-Ldbw48W9ibGFo$_ev{f7Bu_5j;c z_Fmr#<)V8c?eS9X_Y#mhiQnRn{7&bNv4gKb0lpG~ad;mO>ycHd2XSC(X#bSOd5R}Y zWY5COy=?s{FimS+&)n3skq={Wma92fkOAu8!h+a5`l8PUpus#pU7lYw+FO-O=tC#| zvERwEh?%kGSb)>pfx`QF7G>M66ml?^9*mH1VO!y=J#iOaPvZJp-rcZT$+z-D>OGX9 zSu=S@Q_-Kq(kqa5bdJ3R{mie6gE@Cp#XoMb;e)G{Z`YU-YF)V>IHLHn3 zXQ%`4RIC*}$X>{=&1zBrp66Z^DTZT?JCa-p8=w@^^PDmKt~iVVjp(Mv9oR2z2-eZc z!O+i%b~9Y3f1h$G7P>B1i^xCRGO~Qesjm{2+ttd>ETmx)GHG@Ba% zp;q1uEcF?(ZaOi2NDOF!guxg>srgX7A<1r-1lXV_VdejfGs$ckV*0k2&!C<( zrpqXVgoVI1`JfJiROdsSjV);wJuMAap_Ze7?iR9ElVl9;f`5=>>6?Wz5|#f>wWH2t z*+sl$8!%_1koSO|kgjywcYfb9$Bs15SWHme^ZD@;;)ee!^UHVaZlJ&S@YbE3o;>on zNHOZ7I7@*CdUY|ah5K;*w!%BE!vc1{7HEq)@NdO$0O%U(6@Ld-cZyY;cwNr$qGT~* z2h$BRj*cx*mJBik>tOx$#xtr=3rmIsg=;@X#2)^9nOv#Eon5^6z!r(4a!KK^yp77;bXF z_{o+p)u>Ra(SLiRlg;x#&smU|;ugT(&xazFEJ{fV>m%Z0v1hxJo zab2+axECu4H&B|hsD~OR9o=6I`n8am)PHA2Xb1X0ULI3@n0UQQ{TcwyogUB1WRL}1 zOmX4XlN;{bfEw61rPix?XW8BgL}2s&+RGNt&1isA0W$XxB)3B}BBVq<2e6Z<0}?7J8YN#H;3$|NI2GoX zy9{}Z94&ua8VZDyfByR>hw-*8Yo!LZWdAAhX;H_Nl@F|eP5q_r!(Qu5p5S7fBAIG&< z;9Do$mXd9;9G!C(Q%YcuVU8I^=YsA<2mpwLm-DBlME4>5aF*OLzE%#MWN_F)lk5b5 zgFgzp;vqwbdl^(A>tVgnTIq(~EuL{fox>b()^S;I?{IFt)?eB$_=56QF|x>EooitO zl9r!^%$bHP@#7a_hPKTz$kW%s1dMz&+d7TH&?}@ISgIt2x&Mb^r=3Z_z zHl8m^W=NfzXJm&&8;P99+z6~?^UR-CR4v1ae}VDHK$3bu|kBUu2Hi{p~0$n z>n^fw->_rB>=ycz;&=VgBJE7F)spNnT-@?&##J1;Vf1`C9u=HWTLRjPxTA`GRm%3?Zz@hPsZqjw$ZTu~h6i-3Sy> zp3EI0O5B$uSH^C8_;s&@Q0#@k^Rwynw5vvDtvWhJKk(X$-tD;ofXpKI!K_yy@HtD* zTT9nhlvf1pw-dFB z_x<{KY=qOx`QcR7qf$QrKQ@mF2FsSTlM(0wlmoZxdr{I#5?{(|d2=7Yrr=jgGe(Mj zYJ!j${_&_U3%4zVQM>Qgu=r;D_c79zF$DeC&yg41bAOTXmjn23zpdZAg(09ThsFYJ z`=iyPk+Z_@Jz5Dn2E&VZpZ5@^xX=@5x@$ynsyO#ZrkoAPrf82uQEfK9>M%2jmh-UD zEPXGa{YZ13n)SHb0YddC@Sqt?Ktn0h1|YXu@chZ{E}hG~3p}OPNm#LPX8aCC@50CP zYd1CNc78?g9svzv`%spZNk=}wNehT&cD0Co6Fhc<-+_HCoHKQE$%l;=^;x(PF#?@S z ztl}QzWbJJDLBjVezLKXID2sh+>aK+l1=`wbam*+bEER9*aRDot$ApT^&t7eEMa-gh|iWLg9%I*AZ$y7|8;EZT1<()aZ zvENGn)am8HoB}V(bb59cGR27>{G0*Z$in$HAy%;QF6C*U0rc`%3t=(LfFPWOWW|md z+h@p%5hm;!S->$v=}{Ny`M;cdCeMc2N=OJQ=>=MNQ;9o^AII$scq|5l9GkMQ_0#_3 z0GTP+4*;KGi?ODF9ieBtLb;SbmuLXd^=E6{Pn8bkJ$OZ?)5*UmaxAFq7dL^j-xTLU zDZ+aL-ZKcWi>SvIz8JY;FD;+xDtCy=m6>AF@PJ~zSgfYwH6LnpJo)pFs3G=}g*p93mcZ2XZ!Lw|}}!L}*>6jHY*oTkn_bK9j6k zt@8vf5-o&=(Yy}`(E-CQ@GGQ=GY{5h__hkc$Qb4ffOMsNO9-=kO`aGD6C0&^>O~Xk zY;9?C7RaCt)z~JB!2vp(oy-o%Z%)!(6=cq?u?ldxL7(l;(eyK>R3`{%LicBX&ATZAcI5A4|K{_hvqpCnh$+8;| z{uy4jJFZP+o3wM+B|MUX3{7J#d;pO_}n&`KUq` zgRW0X5zQqwwr2gB{f%3N$&4f5N12Huim5|y#sRlU&My^$KA54>Q4xB;_(IGBKKpCk zGWLELgELbE(Q+0w1ITN?N}CQj-IAggmJ7>sdcfMzJtX>r7tH#@POucf6nCzr5joga zTKxA)nO;G^f)%hdWVJK=;*)|slF)Z}A6T8Rklw>8F}CwP>&&?s`L>;RO|6(Hqe&@_ z55yrB*C2pK&Hj&=?pSMfnuHHwx#eO%ouH@cUX^UI8=aZ5>{?pHl3&MkcRC*?gi;+E zQ7bo-MZpKqK&HS4~>CKAHNFRtQ@fyUWflbD>v=i2>!B{Z71sWq3x zD1pAi8>Y!geA7>gZo{1TEmkuZaC+tl^NPFVHqKH4_EgCZA1yy&+;bRuOX1JpTgB8q zyQh{n$rBI7^c>yT#Beoo4VTl_6Ph& z(n2Bj$4KZZZqDbJO@%dTCR4U;DY1(%po@+lyNSe{F}soKTS;X)47UA9%Z%W2Co#cW zchXZ535i~?IDLr8u69EQ)WBO@4p2t}eXuKtb7_;AWI>a1<9h^SiOapnV#gn02E{=B zlVHyvAq^^!~k>Vx%%>tNriemloDOE*BO37FYmFMh$8=t>%;91 zOs=u;)+SSj2UaJTFAQ)y=J^y|Ev@rD8!rD~BZZp9<=wcw%JN4WFg}t{huPnDTsk$V zsO`Q;m@}-?+J4x133|zPbMt93Vst>}#au#!IQUEp!ek}I+}%l>O?b?hFqnB{6D*vmZrqQ120j%Oz;#bOv?BN6QjWdJ zx82j${FiTo=5|;&yfN&N*d-E?a_->AyMM2qyiM{`R!NYUO_quQlJJq6uTNY&vXbG{7LC^F)e2a&%uBXK=q+_S}(n$4i7NQVx@xc zOMzM+7E@LcEQi{fpTE6k7ysGf{kT0XAWAK&F9Uvg3*Y|7t@&x2WAs{vCIVOg@Nwo5 zT;gu8=rt~Qj;mPQDp;QVYyJ0&1odX)p`uB82t_C|;?pO;>z4u7)xQH@q8QrNor)#O zs6cTv`OEfeeBiHU$0$?MBGDee#PaRmErev#$n0uYh6;(n>+_!(_2Ht_>0>b6Fv&Rs zFs~0Qvd3^Mlc02*Bz4_#FGyTJQrva_prSbY%)~ikP&S3YSmsd*jRb`@^M|u%G02s* zi~c6wq?AX79ns|ar<$2XytQWh8388u9QePVuVHQ6$ltyIwm{%V(N(tv3H4gXaNh4V z`eL$Fg`|WVFdoY;m3si^?|J67I>hU z#W*u5vp(G7YrAYLJIzf_>_`8#z`r2JTHr~6gjjGSd5&n0`kO)>29m8zJ_NSy>RUlv--!}IFW z#SdHGz|0E0skG^oF{{dW;X3e9#0JU)qXj z7DZbolBTPCd??QNW~<{;?I{X(^ZK*XJ$xvm`rsd-G;@%)FYCejB)@mt>|*YsCV|zz zDFEXelBLjKSHfZ?;hnI?^fLY9QOh8`2OIiZ2-Sb% zkIm$oaZ7v?N#5cTQR?wHq@zHE)84M z7zJ~ts@qCFoTR$gz8U`g0H0=g=HJk+lx|gSf&0SBN#Ac!BCdbp!4)u|$`0cj`q|cn zk7Kmo=N#tCxNY~xKfJ%#T*fYY4J3vo29DWX+F)z~b;e1gp7q*!+ei6k{b|&L`^B`< zbaQeu9mId{kBS~8*x9Lx2WLRXzg@4}V;^=)1Xn7ZYZc`E(GX3IDGVE(lV0D%`AaX3 z`vXFg=OCLpl(xURE!p7Yb}M6ZM(SKAjCJ~qce;!EycL5T?Iif1>q8$pRPDFr`GDa6 zW`StrBK+g4gXoi3R?K;@V=f$_s|<=gfwf(1G!Ji%G}Y!tm>E@Xa@buK*x+0(u9v8K zvoX#%=2*4zf;h>p^_8NXbGRj%j?c++3n7Z_J0xRf50UKi13N{PbC7sNexRDH2TUzZ zmPRDRQ>QBOYf!ZW)D~bFUU1*1W~nY6@HOaFFEGswhy`A^w?n99{@(k-HcwXRbpF*A zE&g2)z#PXfE8;kQWhyfC*MCTbC5rriG+lQ%)otAW9YR~rh=42|<$?5?L zsLDy7!x@Y{+~p<#2=ZQ2)P)eVHwiJj3DNVlN9is-6Nn)>55NSg3gT~rGYd_olXWIz z`mC4lJz^{e9bF(tcX&}>Znj>O*7WNolKa5Rw71=(c5^48 zES9OpA0PovFX+r@pW*3xd4=;FN^kM0mB88k)#qvyaPx%kvwOCFIWgt7bq2EjrVbUa zAb0-2RM6J}JLvHCf8TLRf-$^2e&~w%J~#fNe}x|lMoi;rNrTJ5T2f5tVpsr)EU0jPcbavOKCXep^Tg)jM>R-02x>h#hIS(VIk;7=2gHaWG7)ly?Qb++a zd+#beN(5HHB;ZxUXru-TBo(tgGU!xTrzn$K()lGBac>_r59=IqrghoyWqRjteGG@5 zILVc^Zhj2jX(^4UA;#{%zB2$eUF*&WsGiuZ`G-<=X4GKref5{I9wB+%w%)v|^wrKU z1s52wGGztozA9`{$DgF_gT+llgR;2MdNiFZ(R9z519b4|k)_syoF9xmpDauJ0^(E% zWqgh(i=AtN$1ywWY|miEGCu+qJqFbdY)c=8Swe&mG{+8=1ri2e>LFexSV3>>b%?FIG!7-ILYQ)E4)8W zl^?QCP*>hvi7UARP#GUrN77aWzA2N6!XywnlQMRX#iDFbW@3Keg&bWeNn&WPcV-7? zguYz0^K+Z!-E#TNM|U1dQ_t6ppNvNdr7K7sEy@^E+jX>p44{di1>;QnT@ENOmKP#@ zB0i{ily1oF5nIBcTA4B}?}&@!gyn8O3CuRf=O2{l!j)6P{?%!78+RcWa!h^}T9zo5 zDy}D$`W!uUYx4U0q(l-NeIbhY5UwUtw_#4S|vhO0qb7+oS{e}*_NY?LY4hN14 zKu2^*0ZOMT#&P$oo4MSL@?XaI`kGC{e%RAvn*RF8NEh-Wrh3^@9TeC}EQ8BH@L$>fN22~@GJO#ljs;bLN zJVXyzQODRRChL7Hv7KM@Pb2FR7{J36N^Mh~AoW*wF z_TzV%@4)J;)jMS_p4!mjfJ~DjnA?*4${3k{jb>8@)QO;DoRlmhI8Dijo-VT(%@-&u zKRY6h(r{cI8vX(svAqZ?g*`}aT@>Sp@u9x7VH-#>FWEL6{cIzOL>v0M<;AV0+$#to~>fj{Dr0rl){UY+GmlaQ-Qmg=&-uh zvZei{63d!Po$q^mpj<+v<;lk}(4Lw)!O;2#3%)EJdsOeM9E5Q}p5q|hZ~x{seoBAHZs{naN%hXZ+YbiL{?^hDlZKZ+HW|u08e(jc4g;06C{vmN?ELCwpv+DO$eH-! z66A1@BI9AF)0v@Xsyw{bx0h}L27ZsVuTV1$UEC(PYGDt0!F{Bj=ui*9vV(ne9J8i5 z1qwrt2|UskLPZ~qyas|rO`bGvD9k5j$up8RJywD>lpMzpa1n4?Z^eH*fx>$3?;@b}e%+EhHe|_RP>pOlc!;=#vxKPw&e<|hIRyF>jC>aC1;n`6rPe;0?T&7u)>9wKJBn5 zRHhRyt>mtq+8 z8Q~jVYX>|+3(*mzh51qahl^?@9KQ{_GP^nsUyhq+y_-}e8GgI;1?WmXkPFzWtPoT2 z)ELh`(-qjySviYu8rs99V~ECk+)m88D26_!q*43(y8}Y|x&UB$IqeD!8$X8&DwaG+ zN+{xUIfEg$_>Vd5CD7`;qpvuRi3N~RNMPpiOd6XJeHu*a=o&BaY)WKdlV6-7Ol-l@heD)Q{>AP`$H+@rt7=NoF&VX zJ&Jq+ymkWL2W)>#O?3?SiyBppCjP{}aHRwj)sM8lnm_I`RKZd3)o=a+yrITK-c=S&Rs1mDbS<_} ziTK7*V^VrsZ7F9(c>nH;Qy*Fb;(92d5}N`9b5Brk?-GR7pcC4Z3irmjf%CU%UFJde zv{JrazEqEgmNcM>_KdEStrg+r2jvZqyCI&~@=fm!fg%^BPj`EeRjsRqEbm~J?w^fK z3RG`=5Qu#Txvh#e`av2d{Vu#uHL~Ny<2j&%9q2deMSEb>=o#jji1pb7{5+}3v zyL?qVCL$b;t}ARmH*$c=jSPlL7iA#!R_P?g8kh$j%h=>ZT6bvVtSW|~I-kF|6!m%a z9xp6z!gZhP2_0S|PGK}&$4(FfuBqpx5Q-IkBa6;Qfcu+G<_~*}`@0O}P>Ify`QNB} z*qIar_2iORCe_$9x@+C+JP88Q51%yjx9wCwen<91btCgW(^2OL+F*>LZ-q@d(~zw! z5mQ(#Oy?si=LfVG==M*wJwimS_&mxJkJU`RNCnlU#uNO@le~{tX2{DMe2`*PANqjF zZeO&?9Ax8fH8uasX*xP8jK#`i+W>XHue{)*O=cmXiNVxhsR8^ zd0!0(R6+l`oytz}tGj+u?z~?bGKozYM7t`{k7xHDT2rWQ{nDxPq%mCu3C7Tfmde-R zWMEy>(+cAw1@|y&jXfBG+7YsNN&FmZ+&;oCkiGdzoQQ3F`CubjkZOgYoeQg0u3^koc5gcPs!s}9?NeVo{m4a2G>&WnHnW;x&nH{Wvt4Ts$-hNb}$=IgdHm@AFq4L zzW3zbtt|8_MbKQsj>*Gli3|JZJ@z*vuGFZE$ri5p6zIlj;E|V!8cE37ueZy#ppN7x zXhM=M)px-*7IrB&C-GBtTk0G)4Gs78jD|G>e2ajz-%|P_ND<@@J(=ww$$WPK_TwS80rg8ltu&UuvCJ}oyZ7v>f)5!k*N)r~FRhJdc* zHRX2CKeKyC--xJtq>%UIXH{?+%Mv12R60_@Qgj$vi}Fq9heWs596#s|*hsTzniI7a zFYHqReSeaew9<#z=HE%0vp=&6SGX&IGe{eEe=^}TgkCZC zdMtcZIxGy`{W&ME)7g)AT6+6Vd*mow^Guxq(N$}8@v_kVzR;1@qehd}4u9)&fd5gk zadIaI@A(5Q=UhyblLsh2cH|uCg@t39T1EJkbSlcjEAM8wE8w8(oEg-AGty31@;O=t z+qV)M`UtQn1kPG7wCSF8W9n4&cl%n6Ocf+hY?j(`Mn9BZxY3(S_I5IR<2^oo65MU& zETma;J*f&@?tuMGkEbMQL}cY@Po~A_K1v14io8dO2N&u4&TE~_!MEy{iTUX&IR#C5 zj@PQH&TAj#BZiq6?sMA*rynWCLa^*7f|?-T&J`sW^M0J6Hk#BC;()3~n!v4YgpddVbqg>zcmRLnn!|}>mE%nsh{q}Q@!^UT)hNQZ zH^+36Z3@eSJ7SjAVs=-&&(TrVv}wp;ue%I0;@EvLf)18;9wh!nPvnSDzY@o7nC{KK zCu}g&)P<{3-NBBUSoN9SlL;KV7yF}Rq7?SkL%e!GPz}?g^UilS$@izA%5DGl@nNUKtN}i_Wta%dJSF*krs8Xq*C+ zTZ)WFHnF=d`oN4NOEON1D)zsY-4N+>bJN2F+|LUvkF8{*+|}D*aSVdwLr9w*_Ao{C z8i(_*^6gYGGJ2f8LTsX?$0K;+ZXEwa8xCrus94~yigDP)=D;0OBJ(!1PQ;Y|Sh#$1 z`$-A{W!2+Q>!PN(B^gu_ik4htun(;uNr(z29f5{F#~=$IXgQP{2sh zGy7|Q9JRT;w(+@g?lU!A*(YVww~V+XvgXkz7h2~QcJ=$UPpDE=G{VO=xIX&ctc{A$ z;Cu>Atxh+r?JO2|rntV`pD7KpHl3)5n+s^2zJTupMaet&STBWU#_e<8!cq33bK6~1 zCc+$Txw!Z*lLpTO=LT6A(l(h~o8+0a3ZaA-2@FQ>Qf785JCd)stxG5$i9V*J)jalb zV)UoS=Zkv%lFST%arS}_nyfdN4;Wv-5tEn~+89*XkM7|*%Xho?mX2L@YameS#K_zt zu~SyFD%JG3Qg;I1lMA_Aeo*>8w$A(u`YlcKF(dmi_3vylz-|oz@ z#JTV&19ajBO{mz0Bs|{d(*A{cl=1`a%lLc~cW=mUp5xHOLe2_->Y2=3P*LKhc!LZU zof!1v+R@vJ1>Zl~M0q3YQg~=UnId6)Z*0F`;bQ_V&oTm~N&zrM+WtT}_QVhurnZ>T z?+2#v=HhcJg8~Yd;-X&M6E)0iOYk;uiI6+_vG-YB($3v?e_Kj}o*Pe86yu}L50)C@ zu*>DP$Gtr47^7VYI}_;ZWu{+Gu6N`fVwlo%Q{QQ}fuzkp8@y=*s~P8XFN@DfFQJZv zp9zlmwq?&{ndyUJcc}Ug#my*S{H2i(`W@I8O``!XN6@=mGLx)&G~{X~h?&Wh2lVo4 zX}$w;SDo)|hz~oQ{_|xH<-4)i9g9~}TBqau3MM5UUf>eaI9)O^a+}nfKE(n59>()9 z56H*BSoQS+9Hz;s^(P=zkHmQEJp4?aBkZ!HoKWaY{11&TT_#zat179rtXY2H)}Y#| zZYQsuPks#7(&hc0bcL6^YLq_yaSvm!mPBsgl_!8m6*-CyAr9#(W}k$-A$|d1967eH zayP~Cj2a};EfD-|M8L->dvZA^3wCOTTBC0j^k?Ul;|5p0wv()VX0Y=F#=@_@j@NPy?LP})@|2g*qU@_=YoU*3b^E(SdvUuf1Zy1B?DY)Ho&4`x?|iY}6=UQ?he%nd) zY1UAA!L+NFlW9Bf1w7V$dy7QZQ~JoGzbLEwxR$JUVtm~mi$xl^!D1F23#_|uqe-#v@*G>n>oWmzzM+D&rx_>AJ&2T!7R83+D zo^%B}eAgQT?(J(Py6R;8Yik#i+)5dP?!U3;;Mw55*7sm5@+dAg5XmHQGRPiK^T5^p zn^01|jrs-LCVyXmH`6@P$Wh>tf2|oF_%=oRmej!;r8==btb0O{a9u9>t#cz(2;`ZmNzQX{Fnn9~RL zmtZPr{|$)TeFEyisK@Kv-quh4>JFqFbvPRjNAiPzZ?P6BL@3GWoKV|czLH6hE_r3b zO7+%-N1GBokkTlIcJzCpS$ofvhAio~#iBCa{ z$Yo@{4z^-(PwbV04zeQ!J~i`&Uxe}b#8C0i!=>|<4NUbmH$8roC(8rOMKT!hjTTCp za>>j4enk_!`J7viDeUTlWu1lgb%%v`)1=ZE$OMlP=*(T&+oFb=Y9BKNal}YHZEu7G zYvyf~^!`xz5oL3I!l|=KJ}3AiOQ}?qM$czSJ{Bc^J#mZFN$xnErYhgIV<`&7bWLWum!G54*|+4;;=u*IigTa?Jit%T;wA zz$W`W_q8d{e13M&e=l8ep&|21Zd)MPy!(lQft1b9K^C2!c8r10o$oPR+fe0s%D{yf za@b6!_{c4u(`0rWR=@tO%0_OxgT(3UFy69mz6xeK)b2#0YU?s&K6sH|dJp|1PvnZ| zHAc5ja2{_Ltw)2hn+s2dZ$ZXBooNYgo*kcvfd4|kOXsh)i7C7FpGb`Ir|yKnTszi` z`+N?53Xdj0HnQf8B@bH@BMFbapp-I@=1VZg%of5Wm2su!PFfUhQptKuwmSXkMs!(( zz3qhEZv0HR?hi_&?}LN#rq^PR@>hCnslIPw*~dp83Sw)cYr%ZWB8V-B21})Wd;6C9 zTyR1{ozeYBOpJ;l9M4FFXME86KxtGBH@RrhJPeJ{wdIoEQ3^uH-{CK5v~#gu=MQ3q z3}cb&j6_==U26XH%iN&qQrj)qvc(w4>^4I8Pjn?{s6&`84*0k)9S|NU|8~;vXXXQ~ zO45z%NnfS#z)I82GJVp@+^AaN=mu?Zocv zIoO>(BynNtU}yY|&ef_NXuTN29!DEx7x3dfA(l2Z3+c~UlOlv)?sxuXdCrZ2heq zM4JsqX#bR>U5_CIP`r5>nMcEBVGb+*TMEs<*9LjFLLXGDj5XswIdHFX;|GB zV}w0M!`)f0r~ta}=2pU4MC5==_66?Xm|_hCJnoU^Zl* z6WvdUjs=1oK{m>leW}DORSIs3m*iU6aaFcJ)r~5OcIj*)`tsh>!DeD!w6{RyHD+(< z1@qaN+VLq_c8|=;5JN4*$~MK@R)Wts@#0OLjtu%pml#f zO3%hFg6QhaJ4&dhSd}rT5k|$Ndz2rMqDMB7oS#u3AOb6S!?+!SYb3M3k;em2Nd@-x z@`qvPe|@NU6vL!h$pOB))`4kFvd}9d8Vul5+=K>N*!BDja>y`NN_EPQpe!|12b@o6 zKXx}kjrgEM&cisWiOw(VT~&OI#l!j97kS-!3n)6bs?{drf83sl)H!ll|Eb;*Uvc-= zzB8$WQw$KyN7;0ks$prepXXoqn|nsNhk>n7XzXJm+>p?ESLyZMNsI|zB-4L)+GiMk zu?DN>tp+Z906(0$s?@0Zy|JO=KI}Fh^{eFP>?uzlaHSr!T^52-MJZV6!s3&R_z+aM zvx$hQJ2OQf5+G>D(q1{i#1)n#SGgIh1iGJUW>nR0SIB6nCab+`7~^fbcpM2-<%RyX zt*6EF91QQ_uv>qxdJf7xk16~BCL=~snEA)Scmxd-)lWr#?)A}Ov#Cw_5ZlcNbrMG- z{>ihP&2qizm+b1fEl9u}bX%1wuqb<*+4159DVV@+S(+R*>_G(~B)zzyffHA4xCJ(j--6j$D?HhV&3ENT3R6_ufAj61C`bvr4CD^gl6OpZcE)aO zAou({=ZxE5r!>pgn|C}i4OPI5FEfIY?Q&0x(tpXv$>Dp01i?j@m=i@ncqDSJN*F3} zuSy7bzliqrAAWoH({Jxj%BPoA%DSp!3p$s|?JOs<)AXb_0a#MXEm1~FtzbQJlDFwd zPkskWYwvohUn*8p?Ld#Q8~q^3LAhG7C2yxkWg}BzV8{-9Ul@+J<0b-gn-qQDy}nlN z?JOI8QegI!RT`Psk+XnR?LJ9Gx|9Kc`jWL@-EE9ky2^K~fYiqlk6u3*i5S;@l&UlM z7L@YXdI}W-rT1k+gZn4$hs{KrTZiG{O4I1z)eMXKTn_1ZU<<^!tTBx)xOwPhn7ST{ z8b1x}_TOB(Le&z0x^`9{78+jvSG|V+V21|^L?Z5!%Ok0XE!==4PO)l_*XgBf-D!r|r~^wM^J zY?H%#VQNB$!yh-5M%QOQt&a0)H*3n$UbW9w6co6?Z2$JTk> zecH;ltFjeYb93LgWUr1-Dqi2X^QVVYNDDdRmTvV*$e&E|4CuTCf$`X6sAlQUdT=Q% zWR&Lh`O(KmYM7%O9r=Wp0#JRm$?-EtoI@yVH<5{&FTIV2b8mr-AT=vlf52Lr z6P`Nfap`*<_8Zx1sX7aJsumfifVrl5M;tTsfakC+=86(-(qMB@8H%9{!D9|O_j|zl zkFF-(=hkL>^4WKsI^>iL+MC9iG z{Ku{KBE)X4Df?EVC_cI$Te1*dJ=E#jFvgWbF!iD9AFhAOc&EqIWJ+}T#_HJ#fIeuu zDe?2qbEpwAMW5mcggw)z1m8;&oty`e)$$5^YaZ#VDtk~xNp2Q;H#@CaW%NkB$5i&} z0zQgXQwUw$lM(~n7r^|2{f@M*aym^d-+oo8M6j#j{s~}(8ZEx@Wb0MiltMjfzpi@r zZD(ch0{l4$JeGnJBa40&*1wyA%ruVFD&C!)eJ%<7s2d4Z?TrtyC63;`TfL6?#<0|h z_wn3M?yBlX+2jOq zI3MY4&uH-Q!=|_iQR*liEQ*8dS$hqN@~dXsJE{3lR#T*jQe>+LSY)}elp$Vwj1kqH z2IRjP5MeD|=iYpMeAJAa^j1H;nw{!+2D!%AU%Y<5TJY64%s#oGANznP@B`ZkJZnW> zR=G4{9iTkh&o}u7N^Z+e3#}v=dOeHZ$U!=L5^X6J7_&lkgrB9OFDDPqGIoJahwsgjuK|9Sn z+!~^wW1yY7W3HS@)(e|z3p~L|(*bXDJvs}LB>R;Pb`jR(S`n2_Drbk@GjhH5#LTh* z^rNPAM>9RvlRK4l$J|K+05k>t4${V++Q?NSb0cE6W`CPArI+vs5!&o0QjE~zX*7P4(4rLhvgBA_y z7feij7ledf0SUO#@wP+b^~Cv$KGx2gYsw8`dto0;?~;UdZ+w;BnfL!qT?o`VKR$8) z_@rTs1spi&kK}gNWkDm}$(%$}8;O)s7$t)0RhY+|cEG-_Tt*%C=Qt4j$9Cz5l8OZe zd$uYIzG{m^l^73>2At7hrD~Bx{<=-ZnP;CB+#8QjUDJvlNt8ualqhD92e~&+5e*q} zkw}8G;_8>P_o?nAYVeP1aG@xNYmYEPCG)za)O2B;*MyXc1aqu^EM!^_R9o=ty+?}s zn%pgLA3M%m=D*+meWgKYmv^m`vrSy6@Mnb5>gy`LF#{uSJzndw3$jNe8T^nzM567v zdpkG89djww!{y6wo=GH0HC%sB(_6s=N|O42y)rEj?f`nFwfsKpz-%)$-(w?N-lln0 zvFZYoAmr>s8Ms1HnUMXJZj04|9J$BFes(SyhDgH>1SKLI%T8jR83~yTN}1hV5xb(3 zH~~D0rgpO>jpZ@}dQ;?T-rR(grYT&xYqP>DU?Q=tr);as0U!S-`ItYHKak9Qe_Z(M zWLk^_uPUI&c4|u1nUaJTFSXOKu_7lq{0xmvi6k!QlJP21xd45n{pS;dW*>I8k^Sy3 zyMSU~S;Z0Hs!2TL^7^|0=EGpSQ&i080$RCTPy$B;g!jxN2XFHTx%w4RZ`w=;iz840 zb$S$_bY(=%J$(2~WA#uJa$Md3KuD^?9{wL3ib_YXE-*UX<165_=`NTl44`qz5a^0{Gx(0v_AWXkSm+B)ivi$-196a^L; zUo+*9z@3hD(oc(tY7i8EzTk&n%-PpfS6ZpHYKiANn2h+forI(r5+uLeTwDLab{Vee zL^x1s&2@^sd!oMj?0=Q!xx#2gen(WbK-JzYpamJlvY}$h+oAp|Lf)9xw@lo@jmF0r zEC~YMBWr~s-RQ4<1qdj0nN!By=i#8n6_aXJSxY0A;g ziMZ%kY$tb~PM1{TLd7jK1q{v0HZC@-tA;CUPffluxrZJ~_rd_BE^xJmE;}J~6;P`o z#?jdK5cAQIR4%Qv!f*%u+yEDU=a(=bXLI+#jE$sy)6gBy7bK4nX6MCs&!8N|)t6NG zyYpG8?j8pef~PM)Z-#h=qVY&m+Sw!0JAIzf62W6}j(M%hn9v1jCdnNP3y>DhqcQ__ zIxarxDxg$HkQS52%Tg`AsNpDSr}F~f^)|9E7E1o4Z&%#w&7{ggI?UA1<0wwx!HTf@ z;~TPm?8ec?KYm%g8!HucvXIVbw6_dhPw zG|}P3(y}O#MFL-BD?Zf9d49TsGd)F#CD5YKR9WTKrzQXirgMPBr}PVSwJ{&4WiMy8 zoU6YM&c}AJ6)ed9(JRDeKotPtD%`tHxKFk~$if_s&0C<~p0vH!c)W5bGaLKVljl!v zL!Ivg8xhF__DP9VPN*0tS9!oW3GK6k$PwqzZV1LwQKl1dDoleH`Gm-9c#S(R zPKEWkDX3iS0<8IuZ#J6+`adafS@@YXkxS-kC11~wAoEU7z;kt93dsK?&f@JW7)-Q4 zuATw^Z*yY2yOPLBM_T4158u_%wP?AtC1HYFk+v^uG%FH_-$VGe#_w?FfkKorY9BUh zXt(ebnBd-CvwmBL{TRye0x7(eK2Lj#yHkklaswi;puoB9T0gB|o9e{L=2x$Z6ZEo} zv%a}0-(7x{SF@NRXU{}g9#nd0g_IwN($Rq4WwBix4u zO!teqYeLkQ6CaRwL8ygiKB&T)>(*z-a_2o^OkKa|9}XUJ%D?j8Dhbk}?BFBnHxMiP z_=q4!{3H+hZ}Mer3Nwzf@bwQkLa6-Z_g{DOUnWEtJo7qD7B~rCR38XPQaoMPG|YmH z+lCDAVoM{oXE0&nUxqOj`UCjlc2tGGk&hLpJL%sr1htHEz#np2+-^C;x50WV=0J%W z7fBuESAKVt1~MdN4h(Y9@Ep2@U&X1QuT|)=cN{VO7CXamR_x!7kOtGp49(h?q|;D1c_vs*}V}fDDgpJfOY@&z1YkN z!-_Mg@!?r6u@MwvxWZLHA9kZH2La@F3nkN}X@8hs&8!(%Y{3lAoolTB#>uoTf_FWd z${3}UR!)ppv$OGkH!(X$Yw{q^E=9aswh?anA#bhARXgNM?4uNev^a8W649I~8~x3& zNv%mVKS<@k^sDs>{e7;~U5~F9$zl~cS(b1fw!=C$a#=@T*^eM9yvmi7H8;@af##(~QD%3~OlUvk(SL`j zB?Kldbz$1rzk_Gc1p?axQ_Y6k<^CnEGJED4@o7?5*-(ZrZS(GXL%7NKdEdFUuY9{~ z#R9p8MnJu}FVDLX9$GDZd2iq01#N*af`m*K_=u6e3sqXnqh*JK1z=4b38k@z;-MR z-T4vAKygA+rw;rI?DSZLZ+h+L5rVY4+iCBPUYKWt%F8^3lPkQletD-9K#mtDhQ1BT zd@K)eyWKjr1&Q7pgxRE6uu*{U>H#>Kj`)08cONEk15o9k zP&CvKtSLMNw*P0y3DG(*XDhfRUW~4f`;%Y!iM%|hDO|%7bcb*h`bx^v;D!I@s)?u> zCl451t)i+PrI7HzuT1{H+RlH5y9#=3-=CL-R$f~B9mhb2l~f%>!$7I?2$|$MLkVDC ztwcBJhLNz6AIa}2zeba3dGH!d(PcDE0bTD$BWs2pCVrth?3#z!Oq^nyv9mag7g6`qD~@u^MMm|bXRD*bckapc zp*LQ>uwk`f`{Z0}DUyUs0n1CjJimucT(L{7Dx?&Jq6|Y7YoV!sl@|KMF#gx)QyU?E zj?~wb`2^XAjZYq-T4-eY(c9B!)Q|ME7dKnH7=rprR2#x#umYmdWb)9|YFEpI5AA=Tn5nf@O< zt5=F&SpyajbM$Ix=kc9M;SUI*jsohM=l?SOoOVIW;Th7(CO;zLo7*%Jh;B-o;5c1U zQhPw(^;s(yYXM>jr3RhGXVzgS;N`%6pgkkBFlFqyjwK?M4`!x9y1JR~1xdf%QD<39 zSa5q7yy#ll?#@b41xPB*64xgUQ-N=L>sW1IE(l?ILy$*aQESZKv9YDM7fnWL=}@1a zxr#a&j`W4&0h*M{e(=tY0ITEtzRk2CPB~Z$e`-)mj04cwW5&(>@{{0#)9RKeIe1NB^C?Z#bjf7qYCzsB<^f&DF#KTl(W#{y5WK}&o~k5- z9bILZhxg1uuMnlHKpJDLm45$%bOONGbcj^W9&Sp2C-G?ft7picZK!G%`JGE$OpV0Y z43A<8MNYB=*!?nSM!cT90A4$aCzT^V3esL)-gw%{8#&~8Z&$6URSIJgllmW`K#CaJ zztx_D#Mt~3keTe8hnY{Vg;pbPZj+mbq(@Jl+l@Ea*oC^WPVdP6hTl7Ez{_xmZws0C zzaa)y5+A;mVZ$y20(fb{;QWhO{mt9JgQ=W!Ertx%x~vFP9G9V{kG=B#CUN$L`bsNT zYEBVz^H(wqTBGf(D(~HDQ+nD z(Ky`bZBqTAB|NUmd~&1NKc8p6<`n=G@V{=v_TL-sZ59?ssU;_DDoOEQh= zb$4u0Q;%D8OEz>d55%uk0>wIIeflsfOh@IcTeF79kqmm`&{&U3c@IkPxb{N!xku}b zBB!6GRH=vMpR)PtDm$I>kCHw}CMnHN7xvHCx-a_b>E1ZGbN}}-;)J3S~1@>?dB3Jtb z!Gb0(;U>6icZ7I$7cY&NC;SB`v1)j{^9GgJ5g3wPCX@0G_N)GEoO(z4Whgwe>~}Ce zz020ykNaz1!{zNM)85;c_tmOk!}EoJ3q$pvL-4@B031813{>(9wff9QdZJTmBv#&y z2xgF^difA$ld<>dqINYz_V&PG7z+3+qISuZK;!kymP1VB5B!c(ZrFrDIyIRMk&5au zWjWCB#;~S0u|I!9PCQRo*(&Ee*ax0n+7!<11^AZ&%aE813JKKrd-G~)835hlhl+bHXJB!xRVDw0> zZvT?}3e8K76qKjYM>Qv*arpKLllGq0kSd|~2It?}@k{K7QM(CF^t~4!SnXW1&f@CBtQ1@+K~+g^ z_@02wC+)+lb2{d~Ki9kQ-c%K>yzYBKYcPC{;b*CFCJge%n7R9T`BAI$L)Lb)5AdxP(?ZnRXkX3BF24UR}gKy=QU;;mJ(jJy@$M0-ny=_HKxp zPS_A674 zu@B?pRfp>n?GI@pXq>GlGcrhE%<)hC^^{W3PKd1WaU=Dot*GrXveu|cO|9N&H$sC= z#MUPUVK(QP)e8jeFtk~dd1Q*=cul?K@rh-?1J97fus=F!x(WSUp|B4t-k|}i_^-WCv$fN2(W~EDkX_kmnH~6B zCr_c0)z`o*45!)}}S(%U8XcG0D4sI}8@HPSoDNRPEFq}LtN z@9gsIbh2?QT4_~!@4h`J2Ba`Y%-+*(7tFHADVAwonWd-wQ2`C+Fr2q=uO@F*WcYOVk_y>q%mh*jz>Nqeb`&=xn zMzRx254clG3*fzs*Pk`s%4Z%hBFyfibG6$w;?+(-szy%|%YR99Sw$o)C7c!z54xbd zAxbUBb@a0rgY~$uIaqY`bBB|*1F3Jx`R-P#*t@fSk9lIiryT=lP^9rZ@J8ISM|17?>^#|{69qC)$OwcIV!s-`joT$H!6$6&{3v9tSbnS=%!xZUN zEk>f2)~nie@xLs_`QZoQ29)}H>XrPF6l_3jYxIcmGv1TVY8pldSQiN+!B>(MGqQKS z0(JF5?7EK9vQn@%3`9o9s!VY0oFs64wDkBGxBTK{>LyxmUJgCc9|`Dqksar z>$JWi!XAN=s_WS;(LtiY+m*?EPox{*e`_bs9QgoR^^hv5pr8qdWd{H3i+z~lFstqN zX!3d#HhsZiSwjFw-3x|JQ5f;uFT`($6HHz1kkklZ;|5Fhg3fQl`32bb=-M_;JFB8) z>3Lig$io6~?SAECnzlh7C=168-8q9P*DL=%JogDoe3*RIgUw#SolwIjypbCfaN#@+ zh>=F9{D`uLu?EAv@X3A9{MywG8Y23I&TZPSkVklFl*UW*_d=DSFOH+|sqPL=JZ2B`W9-x0@@{?s8;6BOTLiSDL(K9P=aiW&X+W) zc^tao+%8=kQjhpa70^RcpJUp(OoeCA#} z?Li!g`y8~}5D){NqL@DTX)BOUH9{+poc+YVu^+#pN6E1DSeH6HsObMgW9gA}s|58) z&+r%Q9NVIvbx;4P*k#ZFoG0kD=H>AMQ$HHP=4ndbr{}Yfr8GyvxV_Q#@8{qlh1AEG zBavb{WU9!`7%+L7CBHh5md~2k*P^&sJ(?LV)l81wawTzfQl7r6ac5#XSa=IpaGXaRAr`xabJ4+JMT7$oGAEwA;*jwgJGLF_Un3{W5|_ zFGIBd<>01Fz)&G!zX@zuB@&rkbq@iX>JkzV5FM2y1L|*d^$OFYaNH4DSZndxi}(aZ zOer(iZ2#es-SM51p`s{K?FfhUhf%`*P70ktuZ)TDlaR}lLGhzpA|{C%lTKcQ*@&C3 z&*xr6I9s{m;6~%s2la1@ezL#mw(ogAn)6c`<*6^~blvV>eM3%zE)>b`1Zh$x8UC`! z&;xeWmE**yySV(}Np5rvtWUJrx6<8d?R1bTX=UI7!o$f{ra8tjoF=^19o5!&!x9+9 zH~wZ4A-s3$wkD<@7$EQ|+6HSuVCb}w2lU66e(|=6#+4edu8Pm`uvB$znmUN552-B1 zR4<6FJms+!3A(yhKL=RUot&=K)SRvbLIS8GHy^GCZ;Ah(NviB2g1b`%5q2`aM0$z~@Q@jBaDEnL^Sm zB^TcToYQ9ifS>9teRLz}?u=X}!MK_yi`>LRTO7i((t1DuqWNWY7kcCMsk~U?rERw1 zuJ1go`6S))B(F8O;Cn_pyT_SSS1k4)F2&gwQ*@+cC+}x6I=v(6P$YFWN)dW#wexL( zC3lk&!eTvPkjrggfL@3Td8a@4S#r7iTM7QT2Bf($c~6m;pF7e3W>dN2*;GGqgx9rU zt2ftoUR|Sfjcj(%g?NMT|7B+w^qV&sHyHz-sZv^9Wd0krZQpIJfO+>;GEcHR>dCm0 z&H13620IW(?;d01iIWWh-h{unj`0k)btzE5nmThkR<9opJ?$*`+SHNv?J=stZa4GR zu&%IHRKL&VVTgjivz*H`%pK86b~XYz5m-awQX+zxfaR-LKr#Olv;@YC)LqCF)sNN8 z+Ku-rIBPxYK)gg=FAE)*#L)Ev{i>r0JAT?5TbDC0O~&gPTs-p0X*$FE!C-dW z%FY*ZP(=9EA8u{}E9uX?_{Mv<@NnRAWE=NE0TV?Ae!WbMD%=MBSGEyPy?}fwck#dn zGemMPI9mB5lJUmyvGLw*;7Sc@m2`a`vD+NtSq3jpatqS)(2^tLm+oR!sUEYU^|Xjj z7X#3W{#|DX$w%!Jb8OE!Gf1Y0A`Ur(bn0}pHgU7H0di(CJ6G!P_w!}?=n3b>;y2LB9hW`dq*}3nBy&~NPyuQd$mpM|jf3A4tCM9V6R`Gev zOLXVLy`X#W@-l0kS!D}Ht}$q8;WtzJQF!L$mh`DSfpJ8Ad4QLHs}hUB-Y4f91q4!xjeSA^F&s;t^0NOi{ z$li33f+{xw9w2vCs*a>pLWh{T2fq9-*jG8;@n~CML%94=0q`{(@lGN+|3lMvz*GJI z-@mVGkBE#?sO%C^ly$F4$P7`0O0rkk>s~2}Y?3W|%ZjY4kYw+@$-c%luRH!PpYQMg zc=X85?LO~wpXWJJlKPf0;b1$Cs=4{}gvopvWTW>V;y(J;-+XFWaoD$d3_g)xF1WwP zkx*{E=5ZI_FU!ZA#@$X1c#u5oWa-II9y4`l7AD{bbi5<^nmNk$_3#bEnQx zfh^&_1--jVLa;j`;pmsGpgD{m82YSH&v!yC1O&e znRlzIMy0h43QKm?*Q4DNm!Xa^G071lMV_{YU*euAA+bj>OJ5OJK18_gOl2eIOZ|VF zhw$2;7Q=vsXN}OL`7_X*Rr@=CN-l+w0r{OO)yS$DTdcPHXp~3UZD0q&iUEMqpA4IS zQU758_ZgNG_>=)utnI?-<0u*@z8k*l1;0kvfco};PxAE_@jB!o^GO~Euz_&_GSEJ& zJL^Ba4&+36`0!45cV`2g`qgH$qhGiW=PAI8`O$Jh&xn-rC*v6X0rFezfoU+~As$x| zSljeMyk2#0#&29ba2JHVqq+5&ppi$rIw2EF`Aq_v34w=2+Xr>_dqLpgnqHM}{4Zpc z7xsQy34Nu3!Prp&jw{})&V;4H9s)UfiA)5};r&w&4GQhWpZ{LVdVD`pEqX{yu~&R- zt53E}WL`GprbZuoOGnf22@}LkrQ3MDGpu}Ze)BC}OKecuqtuVkwezM-$cKctxRwIM zsmJ-mkj(jEfTGe)eLUFWH<~pb6U4-w$KqUUcu45$dgq9zJ7KX#W90-GfISweLSeQcP;1qE>i8aF$%AmUEF@Do*Pz!&P0I2 zCc?JhZFW!$DV;dQyymyoW8uG3MtYVNPakiV3+|9qUsGTwD+^a-0W^fQi-yI!;`)xi zm-+fff$bKAlBbbh=F|uvsfCvBN0EjE&P~p1P@v@-1NH#@N93-yFN3B~i$lrB8!5sV z%(>_H;Ri#!cgaLe*GzE8&MUYhIs{$8xL^T+p}Wx`^=yBiwZ zoN*t^TNHoF4CmWJp|7-Pww|>;FkcacT*MT=|apLHyKm^jO$nsJZP|&{ygqlaDm-0Wa`d23u39Q(o#iT8h8;r2Lq|)cS*^a zXk@^~D@fQakRa9~9t;0iflMpUTDOQT`QUe32-%0J+m#X^Z2o)PW@&;a>tNKU zG4x=sfbJsxgWvM)`Fc3cl#F`bYPk{E8I(+ECD&m?4Zu!mi`BP>hpeL@nW1aegq0i! zd+oUH#IF>S{TxSMuTn}dW=z$64ClzLJ#1T_`0*yU!u=xXV&(qpVzZ`}lT;EPEFX^k z3#h!$gXdz+Z1ok>^mn+zzdIHn z3e!m6-gPqscGU~Z*yNF_XF2nUx3Jl%8Wm0PmSL*BY}}w zT3ac1l{w(^1eD95ged6cBtoS$Yk+zg;7wQ@q($7u zSMSH#lAAze#1>}(O=}2nehLj8`!8R7)oVS1bcBbLD&z!dEzp1gqecH#uB8ovLXd?o~#%% zh-j)Sb2?c-T#Uh|w!Z%eEUPWu0?CipVTN+`RVK?jl`B=4{OUaA&|XbJwd@)J&o6Ic zg+ZNj2{4fEPce1w{5Bc4&AXCy7$kwdqB2zR9q0@5%(O3v0-Q2sPH^0X*FalPU@#c# zz9P+zZ24#t`t+TNr2Z>MaUFrz$}H<|d3paZ#+aI=*3~}T28w%}lnI|QKpz_f4&#uS z6ntR)rY0}Uc$0@0Q_EAsby5jt2OJWaUYtFGfiq;R7pstAh4|`x^&IQ?ouV|K}*iV7Nu%RchQl(d32Mz(+ zEBT4i%AemN!3&Oo%#g4cYWsPqC*;yea)OGywOKtOe{dIjFEU)3AYS5=XNNop)2ne$ zx&&rxvw-XV{FPDYHX&8k)7RIn{o|;ZkRZ5qIMEobE5Eok`wmnlsrq#6Nm@jX>{ z-9Ui8(3#)<`g=YW!W5FIz_pM+05CAz6K%9yd-@lk>@dWL?HGEn*>Uv;%2K(rK9gL$ zt!xRoZ+VKr^kAxHq~7)GKfODhIn9nm&e~67L0WoIHT=#5sIEWN1^Rd5^vHJPJyA4(M>{L}O_%i1A=iIWi@pt?G+6`fJF`sokcS5Z1ffE3 z53L~HaQX+c*?d+a39BSnMPn4jJG@49OE#0UE4f+o!WPxuS5OKN^^gMbqcI^ z2w!yy^4z2#)?q{HapM}yAwjZ=#z_YL(y+21u0vfJ%t!TR$P1^tR}{c)?tC~6k-agl zR*wgnO`JJeau04K*Ijwf@CUMrhK8lD+DKar6|t*>6=H(M@ov{nH|)k2slA`SV)vG` z@(}JjO;iPIoyoK_+rM1*NXK7-$T{Cgm~fvh)b|^Qj+c1|W<=?8Amw2H3rou9LRaW! z)Sg$!h5*A{3R4E?!KA>uDw868UBzn3cM}&w9Y4$PL`edMTFIC2m%6hkIMgLz==!Z* zi6%}BkQwG^73!D4Akp!vi1)+MrAJ>(@6gfq4u;t#I?!g*L@zupmkwe z!lIlY_UEp=)9I%JRaxdSfJx+{ytGDK`!orpK&R-Bvx02q%+k#~R!L}(lP-k{=?<6F z{07sOb0)cp`_Ftqu2+p#ixiB05%u@r&O`ECrYhBAW;^NlGx8^_v!!y_3wfaf%PId> zF}XT8ky2wD2T?2-hFi?7(l5?_HRuOs00%4~Ni@O@-wH?EX`owR-zJS-j#ntDbMePz z366-L0w4}eiU_=+HdxA~?|tzsYSn*!N0pdk%ALm#^*#%muG4S)YaE4lJW7QZJnoR_ z*o2*+%pNJpLHh5=${27fK(qC-y_os_Der2Ff@DewNPeXB$83-OwT9WA-YRzos|1-Y zKw}FzY?x@*q=q5R-rzoSqzK`jm;Uc4MJ0q=3m>v-^8=k<@o4@Un)o zhKK%Dl=+=%E3m!Cg9I0F{Eo#8H&QA2oXwe%s1P72e6U02ZhM5M^3J&rhy2{%9=DC* z2UE(X?_JpeBgvKNEFTEyH?)Gl8o#l{?>@SQiX*ZS6uUQwo=QvO?L8}$ev)<$Z|3L^k@<$$@ zZNqh4O5K+Nzf-9OQ5*6IYvyGWsY>zEJznp%O(gKhK8(t~Jk-kHvUz_Wy0j%OkLD1I{8V3CsLs$}dk**o7blJ)XiT6gP2;?2u31yv$2NxXQ^pz`^Y7L`D@S z`nu#>-On%UH1D&t8NxyLC_m|uJM@JFrLfB$P?U$lJkiumF%(bdNnM&uL1BSFZnH%^ zjEeVE5;XHy(f5$IaWSJ{d^dqHE!5;Rf6&q57KU*Y{YK!*Vg!bj#3`=LW7bKi(wB7E7+3+qsqp$zN7R5trMPXWbfL^okgy+433KZHq0Zz z7Ag{A>TIZDiGv8}IsDHFI6Jbs{@USeW^?Dw9;#yt0QQ}5(IxDk`?Gdg7=P*~UpZ^jm0T+x1Kq!l{OgLKmb zk{ISE@P=?2)u$$>zh#hMx&&QTt+4Agca$B0t)wl!so|vdH8(*2615SvD>kOfQf1LEJc4_bSk(&@GEj%9N5dq;$l?D13;=<9UAnyR)gl>b+O++VhPj3E zU+~J`j6OA<2md9TbM#FfWu8kkmxHpfW2I;FwlIQaKDL{9WA8xSGFMT4P!t5S?=>Sio^Dcb`@cJ zorC2FjtPQjp5*Mk<(Ea>>o;ze_v`1=vaY7SCB7|m%L@XA`myW9ZlCv7UwS6nh$Y1g7%KZ z)n{rkVE{35lh<1MN&F&jZMpP>j}6#ON6^d#mN+)@W{HXcdy6#aX6js&hD~v``?~7B72uV4 z)RA)qa$|)+3PK#et}N#gP4kG46`LF1i1iT~|Qp(6wjU zGCUMIwi>AGR<%Q4{JZH7M6?fW+QUH11hWQ)&?Ff^t&g}MbMD2>l&P^S(2mL+6xk~F z=tk(D<_C+?@u|;&4KFR>P?DU{q~A0!Q%;mkOPCY^u`%#5)`Xv1$JvJp;I4;E;*+x} zSZa9PtT3%Y{mV9^@dG&JL(F<#LW;dmoXKOV7U~M1m*U5snT`@5XvaJ?i6%zwUi6rP6dx-iv*K8&Q6vs-HEYX52qzxaamF91PaCyl5(1{1puF`81J?oqK~rtC)EG=U??FjXmN*K8vg3@lw#2pNc3pCDuICOu0a+lz$L!>N zL6dZGVl|a^?EWuxVyk1x$7xeGo9piz=88jK&$|!NgP+`;UFZplR{oX>N@4U?ke-V( zUgXa6phWGj9(6lzK-dQiZu$(@hip=v8i?mLyHwiXJ#X$5<-lnxQ+P11h{?gusW0h- z0sjGza5>>CcJ{oT^iDA$rMm2G!)w{DKP$K3NG+90Ebn)kwD~z)dvLdSJJv|~XSgaw zOp1}dm(9ah-JZv#&`dw`%+T()r9) zU%^FNw`z&!Ea~+k)KFzGfOzxzo~r9-ikk%oBrj1ne(dhpKyBQ%6k_`I*T5>T^<%+M zli1#7ez#~tso;o>DEe?P)@c+dnC-*^DXQ(Yf@i~oKYi1s>rHLIrIgJ|(n%kItzO6& z{&eVOstw!_&VwZ3vm7Ey+O4H)Hn&oneXI9eAi!0tpwP(FP9FuvrH4cP^M;>;jAco$ zn7Dp%0mNRC^Ad~}{Eqg8QHuu`Beo#ZJ}}wYiX@HkLr~DJ3}IjRGBZF*$i#qg&#TNm z(>QqwGH+;tyC)Fm;nhlk_t*V+bzK0am~`X#-)alUR^YI~>IO~^I{56TB_o}5lSrCd zyhI1KZv)SMHj%4-9*WO*k;kSM5JA%`azuQ9AIz$1Q(EqcX zwJOF>;+hOQX}Y>(=!-lAg5eO4)N+BC9RFT;$#*-UJ?&giC})|U+v!azd(u!KnNAkf zrbPv4h}BgLt5z)3?mIhmr4WD;N4KD%8>5DtUR|A@lUL5nZkWIIuDw?aN&AA?>^_{G z#~cOeCq0R`mhhS5@7X>u#V61U*Lf6EZ z|07i|Us#v47?yD7;akxqkD6GSl(@=3>Zy15%?Jef z=|h{+B+4{609h>XLl=B*joPxo;f+Ouc_XNd_|z#g!tDz(|CxyWfU_j`V7M)uWGe;< z(Df`M80s3_0ov-q2^tQAcmRGXR45^A&Dr~|bNzOt%9=Uvr}ivdw>0)LGc?WxHnqh9X~BNK%OreUk$wqIicDGq0m61LGsDeW~wVRG$;)2^d@I)L^O!Jl0`dGWn;!{gq1 z+zn_k#}I@-x3d31Nse9%K)ie6rmw1!6Y8p2k!dab;(MueHKIMg@g6O4294Ja)t;I$ z0cuYGmIsr#0@D0&p_1CJr1P7~I2zT7rM7U7rV;{6i>5w@O=)$KoUh}K0zvy{;#DPF$zMvrs$ zgc08fO;UMHC$)~(i&E`vW&zmp7YZ_amS5!!(9W7ik%Yqi3@j$eCmE_Ek$Gm_6!Y0^ zZX%^y6>m0HhV(#7UtSUca=)Zbp-%sg+y$Jzw>(cV&Nr!*$AW9i-%@~ss@CPK0PI{R zVRve-o4c!wWV1?qgcIM`Mm>4-K9pqMoJ_7P+D0+f5;5)-N4)TBgl^)F1@j>rU)zssVPpJJq}k7UU=BLqWi*$Kw`P+uWB^HQz~SJhC*4WGS-L23KXSVx?%qwU^a&3hr&*SEJ~0ezpI!q>NX9 z4%vERNu{>Iwgg=K$m%zM&Rq5%5M5LbYXPayN>5_5j|0O*lc2pbUVr#6AqGGE8r8Pr zg-RwYI~KqOj@S4jDLn{gYutoZ;gZ@hBI@|CPNYeK?DZc8^-`70d0TnKH7uU%1ev`S zK6G~2xV)>_B3Hp}%aHpfki!fAZdWApnjh_LO#eKNhy>4j0^v~_F`jO+b`hm`y2~3V1q=<(P zbYD9Qpn)kut)@_QJ&=zmHIb|%%Bo^QmIa<7Q@1Bk0Dsr^7C zW?@AjislBcg5yM^PDLiiD^#Vsd>vvwf5o^b>%b>K zdcTYb67j3z<>T(T?&wzujGGFxGM3n!Gth3)R)b_6t~+<_RpvRCA2r`LU`11qZO{9o zWUb`i1YV~5w(!P&-K=K@aewmIERmboN(;G@55f${mpA*!6UK`&za%;^JSVMmevey6 zD<=b{u5#9+-&w)I@z#AI1lP3sRgbb+)s1c6)M9Z89q)C%12!JVNkJJ%c!2x%PA`Y6 zqt+a@i%Byu3>?Q!jm9Uafy3_YK8a7C;X-DUwfT>Dpr>O?X94JSd-*T0Jz!j7YKmBF zV7?I0ugxy5@@Ns6Bl_8WWm^P&O+k@8RzfnJq~hN!1G$T_TpU0^K}=S1gGA{vd@DJ% zzKSvYlIA$6+Y6x}_gxOo&oVK2@s`xB0J)3Uz*@E&)oktpO3^c2&-_`EASG7Xi}X^p zQsu|;(|Gc1e$6qq3_E%d&$PK+GW|2MsJKC0_J-V&=M$yn!J*3so~&ra&Zm_FU#)5?l}CFaD^% zJ`NE}bQ$tZ5D&Nt0~zO*<D%!PtHW909- zN_pEbCqAi(;mxrY4_dgg<+x)AWrR z9jB5~^`(!r7fG>l0oIt)kqY_95NW=X##aJRZFL&snDdIbje&=d-)$d!ZJs&gBcH^S zw@&jEa0l;g&%3b+#qx6IxY))LeQw2%KKsN5q&d9adYTer=!*{wLw;RRdmHgIB9ZX~ zn0%OOH9ed$W55~=6i7}q0 zdRy>g$aIDyx1$&{bZx7PVbQhYVd3)y2vQ`*WY!og_U-h@A zRdA!GKCI|U%B@v1pM(-4R-^$rBfOBLLX%HrBiTgW_-XYI<{?an90%%Mi00N1f0VfR z6IKXq62YyJo&RA#R_U5PmA;TFBhyG{@~gGc(R z^%zP9GTU3eiqEFH$`OiGRZ(LHY>(zM5=AAG-;+IM4|XY?HXSb2>obwz%Tt56sepS* zEH$osluidqmp}&(67qYAO-0Ss-d;9{SV!cKlyBUK6R98b%GGo_gO%5QZMR9sSH%~p zP4Q;{k)dtdZTx=DKDx(}BG^#3%2a62dfpeOEZ@wCCdkW7fB0+a^W;m~E~G((FBjxi zU392ZTOA)e{ufg1XYT_|2!Ap(WdL^@s<;&ZhJS6p@YZw?K5$;GZ zh@7XY=WTj2&U-3F8|6Lqx8D>N!RB!kjgM&kb%lezVUJmwo#OfzwrBDk+A$lRF{!Gc zcUVeEu`>?AnshP%ldTU8;;^`w+HO5Bw=D_8YkDY$bXGyuYsxSFR|{z|9A>`Ci#GS} zo-l0tQIbgmBr8#8f0RV)3tR@w624;|H~|jEcsxx{RpoH!C7d|<)kD@2(r5y=W7Of0 zu`c5S}*yoDa!+SSyK}qM?_fcg}&NoM+c>+;7>P?pMv+Y>(9T-LqchZXZAL; zg^7k)A?g2Ayqgh?c_`o9D}d%zv7{{B^obkQ4d=345=}EE5Xj=5^YB8cvec%pd3!}q zSjMynjZoE$Qq??C{zJANrh9#Huf&VFMXo#7VDT&$myaOVSqL7`&CpJ^qKDR#@dmG+ zQt=@VZ{zx^`hy;9&crq4V3K8n2g!ts|5?M2R(BE;)fau1>GuZ#q@#BBUEL$#Nu0^* z_R_y94}FaKXNw`z6wdb);FG3mb4}z=LkN#EPcbC&i80OF(m8REEPWRE|c$cX=j{pRc+Y5yf z5?$+lihGJ+1WzAOqj>*O-$uJLuPQ}g*M1h3W~SbqSi*YLnzd)Y_GZoVJoK4?Tr`6309W7?0x=8KOy_rY`u3f#oN%xUsarY$|~3N_Z~?A$x(69t_) z+>{qy?qv_j+0q?lzC!up5z?r$LG&TY8m2~$8-3UTuQ<<}ra=K&&GA!{rg=UMse$)r z@>q|DTd(!R=A*4xBC6kX{m-j3sC9m9-Xb!HjX4{QSbLmU>P!;^%y$1q>d;E(*H+CO zcag4>7#aRg7KVBGlrUt?g*KDaDxW+ZA_klq5Yzm zzP;ZkD*V)J7@qB&Z0?F;t4_^fjc*`J`r zQ$)}6dQ-lc21a5QC5?hgLuP4X=L4RdTZ`#$o**uG&wmUU;j!0Oh3y)&?`3 zoZI!X$CI9CE;|lTrENjL$O{Sq*DVgoH-hb5cITry1-x$sq`{rNnTkVKHm1g6ZZ2;6dqNy&2 zxFv5?FCVMO`K?x@k>^chDh$qXaS?*9Got;KG%tBma%p?szXo~dH0bY}%VeMif2yNH zS+(BDqlao^Tf+yEZw*rV+)5Gg4q+w(*&5GpvyUyI7NI`jI!np;giw}2C{btB?j1LJ z4wqMm%-Z9tt4pw}{FgJx7ycH=wu}`hTuPgH#j1u%_eFHSP5d~p1A1=*a(_#&`e!hU z8tq+sXiA~Fdj&v`m?3rUv<%YOtL#u?X?@O)qi_iy3~eq{P6Nf!^MT#=Km~57oHDwt z`L7+MbC?==e5t{mCe>JbKJD}j=Fyi%qi2rnKu@03@Q8~!P1_rM#r)W9tNsbguy!eR zW9Oags^YY7LDUCzVf?P;A9=6>^mg6PNc|E10@__!zH$t@iZuId(x|#RGavBzmd*Vp zu83VjrA4#TiIk#wDE3_X4@Uk^ZnKn-?Xozz-&J% z3KA2dtos{mBRk29biRKB2Wch|EYzTfc<)0wn)4Y@I=pCbpg zT9DNZR7Jx>89bVa(T3wcZ{w&)ok*WqwiO|ezkSrJsycfBqh%45bQtSg?iPPFi|V^R zEgO1xd!8TSAd!!@K5k|kb-;^c{W=`x_!SA=EYR^sy(?KONW6qaHyBR#VV2J;BuZBv zsk)Ehc9QC*J_WCX$=5`t6;gpCn^*F*X;+bT;m#cKU#cK*c z7((X@`Am!N<tybyIpDi#7kGFttn)CHfBZeYKcJZM951kWee0=}>VsEU5x zg1KY0e@bNq=D_j09DjcQrp_3&BUCSUyrdY6E!cttw+Rh04EJto_oXU{zu|4!EiI&~ zu{R)-CgS-vVwPZ7`A;iTS7xRzghZg2ThXB z-<9X;DoG{ovWq07zYj+C5k3+a)+BUjA=zff-Nk|MaeH>((&N`f`M zGq?nRIjW9jnBJb^_UNqZKaGMh?>`P5E41p3wG2}&it zmF|7eRq`C`dECrnH@wV0205&U1K`7HAG_)4wMdJ*67R{pQy=4s)+btP-vA=HHW%`9 z)Os+udAhzwiiO@h$JBW>2NMPT)cc(B|%m`Kv8OV!Co1!j0YtyhLSr|v8MpmKi8 z2dMmkMTbkH9kC%Ex&*MkJiyM}tx4^z4d^cV_0IS`9b|R)Ncd?@bI~u4{njL2k8|Ty z0piAQ$|X|nkScm>GaRw=TzGhzyUH|3a7Ql9MN7_ADXn|h_37Gj1bQl&=*ZqYCHhS` zyXr6(x!FZ}Yb4QNXo=w1q=`7@A1Vz(5Zk<2>^%jyo&F29;BpQp5jcR^sd(VEztvsK z*#v}_LF=Hlq@& z+7hXZIp$@Z-ykZi6ZgEHt#&w5=veOWuKNC~cs|(Bw8v?s=>M+Q@*(qGBKQ9+!Jkn zHa{dDFQlK-BP=2t*Jt-m4t+<2J?$LHvaT*_z~-9}`fdBi89kVw%km}_2%|5KNv0sb zR&dYkEqhU9UQdH+b|u9a4sJ2* zEm*PDulEkhZTeR~&X3Kx_6S#UWz+dQI>iLk`F>mg%pkLsqK4ubi$1>fU7Oocm&_k& z5&8NvJ^IY1;d?u3=t)FM+`RAj@s#L?MK~G4yX?S`K$9Gll~pOty1H*&FfE-f`!r7N zlI-Ak-B1PvRc)%2TgVH3d3>7%YUTMy6{cT39gAr8>He+`I0CL|mw@`(6H6}(z zMWIh8xsR#e{tkT1!>#(;MuoKUS?0uBt{DoUuXc0)R9nlKqt_#VEB1FgCg0zJ4LVv9 z4so+=vcvfb_AeH|#?6Xv12VX*gD} z49`2r-c(JR)i5yS0wpwou{0&m_))C*rCh7`+2Iw~@&-j6I#C zFyQ>Id&DwckcbkG+7#SfU7apSByXtnmffYKb?;a;D-Hm{Vp$aTc6jc&uU$4Ane?Vw zm=2|0h_9?Y>UGCu)b2Ack4@JYFAvFV`8f;_Cvvp{;BrjgtzQZWz{Wxu2Uv+^{Ah5d zZFkeuSAloWl3Rl)lZ-V>vL@MYsiCXR_A*TlVzdAc7jM)Yg1=>rhNMQsc|T?am&Dpl zN9O8osW#;v!sz{wpbFz#%PU>B^_+75uq?bziaw^c$+n>TAkavhyaYm(E;JsLKd9BR z;rCv%|4qCvKNtFD)|~w5{`^ze7me&dbCY^;|^|V<(o=I;tf;cN71IqK2%3X%?Nj%R#~^b3`A9G zlno~^c)RZz( z@IieIRS$1gm}LYF-w$zA_d8P)F)(6K>8;{bK5${oM0g%O44$Sue9b2bf3R2b;3T~s zU9a2S977-t4#ek4MXC_^=*F$B-bLD?z3}3De**pQR2@{!JbhU}x0C32^J0Yf@$2a!C;QlM@9n!cMc!d)DNnL1cLwF3#vyyp&0(x~fb@di=nb$AJWEFB!O3@y+^=_-JcA~!X@;6C39q09kwwJHJ ztW%~ew{>?s0{ystL`B_cf$1U;XQ9xhH78L zgD2g0=cY<6XOe3p)8=^b%Wt7C%O%-{GUV}t3~%>+p_OjlDX{Jl7w?l~Zp_l-xmSM5 zq(H#B?0TA!>La9o`Ytc);bGL&1P}PAViMbmgu1&P-_D#=U1@7zqgYvLFZ^S`M&Zh1 z4Imuf6SbIyyFNt;z+SQ|0S|UM5Yr5PWI&1$Y#H3c6@Jhf0QjT<496*)kk3f$BNXWn zk88zDCr^59JAKg*|88bgZbgf>lq=Z^5nuOl*n0=YswnCIo7JTp)$Q%>zmKOf7@~u8 z#x4G|R=k<+lBgn+UT<|&b!Ei1KRHYdl7}Gh228=G_;U_g`(Qe}-#7$ri@c2(vr4jk zs70Sxbvcnkoc5o=3~__IkZOr^|^s>7PD1iMwG6%1YYY?%4Z)BaiLb z_A#i`?ySIeb}}HBHBJw*;ELV$)yZS(sap27DwxYAdxUr=#i}G9OJmcTLFR2|vcb!o zi*ucTz*-P?sDzBjMD$`?(=077RXjjHmPYa%FEtdT3q+)lm4l~O?e)ZT?z&?R+O8Yf zK4wiTPR&^tdq5#fL?%Cx%0J*;C_v{QSPms3@_Y`i=e}6^?R6FV#@o9X+zx;dYxr}n zlR-jq)NWm1as6}-i83t=$7d>`#xtWb2*I+E2_@~)M6meK6&GWi+pufX_hQw*-R1e58(w_N%z4U z+so&)EQrk7vsujsck7`uPc^Kjr@eA{khT7%-Ngd(4bf1Vp13&VLuB4Qbi32qFct3j z+jVv6uJlY>(IL&z_Fo=IAo=)^;!k&|05h@_r7GGbKfqQU$2(pW$BV9Y2fn%fLYl4) zZ~09his>`E+%s8g+_*v0oBxa06|kJ500y0H;e9)hJ5Rze^f154)5BppWPLJ#tt9HT zfH_hcidqI^ZfJ>D!yT&vU+xv6ZJeB-E#To3N8}qLjG7$!GE>4}R;2>a=M1HI)kDby|v)dqwkJ|Y=C ziiVgbTO8Hzx=$apl`>8Gf&u@@a2RER2&aF)T=GdPU#dS#_9_$oYDdb$dh)aSqv*rr z|J*W}Ds6G@^!xGdO4F;n`p=#{+j{b!4HI(O7X+EZ8`#Zgp&pz2t#P_U=H8ylgX4km zT?f~(*s*JOM~pG&<#8LIc#pQo;ww1~Z+IBm;JK#yXdT~WtK=0uNdM*E@k4!2)Fmd! z5!?dn<_SP2g_FSTJicDXQ8==O7uh@dGP-SmprWlRou*m_qvR#U_1feb{#1?)Iu`YG z6Z<;JoF0bqxtSt=QZETdLCN?hl!tPswmh8_x;Vg^fxX8hn26KeHTAm+>5}c{vB_LJ zQ2uIa8MQ*q@#1s{EaHIg_roW$%+Q}&-pPy{VW!QAP}X-=e`_STotBhtpi^|$>ny|3 zaz_upX;4jg3Y*>KD0Vkw==D5SIs;{Q9WiQe^7s=h8{TN^5zC3dt$*-p_N4+#E;~3- zqApTqP1eaGDq_z5Mc9(vqQT6JN-TAJvl4x?4DrA zC=lRkqkb$;t3kI&8T>jbG1SR&5R|y6U7t6Ck2K791bJKe0ZG{*uMZxTIe-zIXQ8X3FQ$sM? zyt{Gy2uwOB{8+NCc5;qGH8c}lGL~@cM96Bd@kCecg}hA|ANXExvp zme&E_nnj%r{r!V|@lPs+fs1r}Iw=O`@ka0=OVm`779}ezSJ4zbNZ#s6vE*dq>m-*x zFm9uUw!>e~Qz<>$LsXnZ!7zBU{TLtjGlt~jBN!hy)NBP-69=2k(DGO-0wHsbo;xCy z7mfyd*sk^vYNRxpnb5x6D+Nv=X5Qi-JQhW|h60<^)kcOXILn#ory5z@hOi6D6p;}W zWDR1!+th}turAEg{W3^QoGzQM?)f>q>w_a7Q0=-fp2;FZd&0G%U?&)|{G`t45t7;A z9&B0S>hcOz=TAj>e&To-J;fHGqUEB!3YAC!`pd4NLZS^Tcr*f_fzTkX=_H&t?(VLY z7fkU)*Z7&1@k0y*oT?hs3rMJ!@?=miMzQmqoMhUW!F?$)&Ty+zt9!HiZ|sz+T8KMU zA!=x)5LuJ4?JxDNI}2^s&3`|x_XJ)>9weAyjsMo5_`=Og8WWfVPFG=0l*uw}rAzUKSr9p@;NAF(oJoaM4aO8!#p+e|b$KAgs|x5V{tjVm0! zGEhN*R^r}X2~mA@NC1N8mo~r%;u|!f$t%YhFTV@9|=H;|XogkJoVt$rE}nkHt>f zxy=_Rl|=%-ctQHoy`b9G_GOEu6)_%*hJRC>R19F>x;IJVquP)uZ){&R3*ueUaLt7K zHMaczFp6BM&hx*`XHj0(GQKvC{>|=~pRJ>r5kYwMJTH@i@JFQFSBI%Ny&mq6@~CJf zQlH#p8pUQM{kMF}<65{HEFRqTK#r!$|2^PUO6;*DgoPG*|*aHTz5*81~Vo zhYEL+QaP3PBqs5cQ=OrdJ^hk~P6rNW&{*A0jRQr0&mW>nUfHCTrAgz{3Kt%|FL9C) ze=BcH+E6XKrf^L8hCyRS7Z!03p?%<|^nf`cwSZ-N?0bY7s)$OG$20`U%@{6jXZ?}R z2rx~G3L>PYa>$`AkP-=N*GF@J-GVgoENSU^ z9&{nNi~lC~;?X~SiU>&UBdD;4ng;HEdCxfqqrF4H&kR{J=89FET_-NQmQN%%t)e~% z4bQ6%xDlq9qnJFrG-Hk>xuKca(B*4HBJb1(j(1$rr`Z1u_=Cnq4e%teFOLb$C;?k| zd-hIxe;fh!g#s^b}D?MimU4{ z(2gaUP*cG?ay`;HP)30ocs*9MBg9gGrlzLw@DM8_ZZ*EeNi)Hzkj#MeXiU&u=@Ak# zdhn3^IrGpp8p5xpYEz6z(w%{xv1VZ~?w%EYqs!^?De|S%7kLX~D4mK=saK^Uie4OC zJn_KzXy~8ZQ~A}-{eFEa%9*{eHzuF`$j{caV<`Fj#`d?4TTDngue*T-yPG5+YU-OB zQ`?fQB73+npR~B-R~3FfCt>lLosxQ$Ly0by{Pj%~&CCHI>GRz_UY1HdP*v9BXLV(lbz*$P9edLZ$!z5%t~iRDbdR@9Ww!%7~OvWM_mT+>30v_Rf}_%AWU< z5K0Kym+TQCD(jXcAv+-}*=5hmz4v?c`F+2?9{tthKKkQ*&UvlpcwH!t^&{igFyG`xP4FcVoT?S$c6g9vM92ctx?)xd zpbmb}S?k9y2Q&R2_TY(!BL}h%TX;d=+&`>!p>028%ji@PrexG%f^jR%m!EBrU0<0= zet$b8@X|SCI+0jkGK3_3o2F903zZKc?=xavg^?|d8BXFYbzk!>Pph5N4O&-c0f4L~ zAxpRTuJxbn9!Q4*obKFI74?}-0AO9&sE#Tk1EmIRXX$KD2n30;g6CO~bun$a%2uD) zON}c)T_gG59E~VgYYPxNUqJti?!y`(a@#vvHJP|`v@9if@FDeHD{bj2Zsut8q?RU3zJtgL-76?aw)u18;=Fvv4IvZXtdcIdmbKLh&6F?<%|)hX@(FX^Zhg3^Yo3+l#xI1Dm z#FmP%U*6jm%Vl<;1BdQCWuq`d+*^gG`?fXwof`3CjQaa=Nlgs37Pza?=LWihw)m4b ztQ25V^wWEGV9sw)td*7;^RRQ@S?jC?Cj?eMrVh~@$jPkUXnYKn8$C6*I0XqJf|)S2 z^vLLTmh{?7neD2v$$y--;D&1Y$;4m?9BtNP!btNSf`^X|UiVSF2Ki59tUL#+H0^LY{(_m)|dwZEnM(XXC1 z4%nMyj*1~d)ki>9^738h4qcBH#5e8IC0mk30t9#%G0_?zmaRh(`?HB-$_<#l>dfkQ zY_&@)d(U|xn%&cg4I955$T6~xkv7@&gB-diQY-++MYTJrjQg4xPOl@`}&_%~W>B zHH9LWlnGnf8wm<018pGu21zjHBPmhJHXw5#bFW!Dgc@69buB!jmZ-4>t@QmYo0nWM zGW-^BWiDK=TQWoU&Cec2IPrPtDWsS#Uaq4cBJmNaBb_4#lqvbhc@8Sx5l}^;<2<^O zSRhbJmPM0-&OHeb@6x*>@2E$nPe}ZZzw=Tth!e?T6I+7krZK9REf8^)Q<+-#NR?F$ z%e78;C`}d{Q-*#|%Tv$&n+`jpEn0L|@EEl*I2{_T3}bu+D!OrvP@wap8P}4UWF%d8Smw5lJfD zS>*m$1ZtA+jzsek@f709x}_(gL5spxLJucz4H`zwdmq)X(g zPT6uhQr+^SV)Vg0c^_`#1$InBZ+>15*Op^IGDWTby!`#aHO7rs^Ri{a^Ab=iljh@` zp`G%Cp4ykop!VNIDD0NJCF9}5qO|-UyrkiR;%bwj((h!FnTeahK2rII?BdW469lO5 z2X|$D#ynE6y{##cp!j5P*%L|CF+wUSyDKm|q+JzwzU6j7;OCs@vAl$3T?jtr7+GJ4N`HFyLZl#NBNO8433hc{(ZD z2;Q9Eoy-&<-8Xr9E#DzgZM43II2w?D&gN))%BxqBf`SZB9XFm^BW#5#&>h2FWd>sie3l72 z;o-7eVDfB|Ya8ozLpRjjFe_h4{K{v=7B(F&jKATDErixZHtWY;*FJG0n|O(dSJYI` z!OKyd$}rSTM(h;=%r`mM{04#PV8)&A?TNP|=$Zv-jmDRxB%T$K9pKtdO;Nhij}-z! z>e~Pj6{d^N4Q(=L?Jo4)rtBl{57OUwC^8GqCfVL-#+FHNW7^(a0>jY$FfW^NF$MZ6 z3uWL(!^mnN3>R5{u&3QDP1YF-K7y4lTlK+;D>znHIU#)uik+=MJ}tC~lL>N|6<2{a z*c3U&2oeZk-7m$|4Mw)N4KBle8=xsb?t)(6K@rpPAmH*Fm_DoiT*t4g>Qi?S9|T}= z0oiLcz8iB@>8~kr(uf=PR!6SrPzv{OaYJ&G1v8%e62YJIYF_RfuH;kYy0yT@Dq2rYZ!pgw^*$I5r+GV% z_ueBzBDxvMV@Mp_PYb{2tmxFz0!;-v%=;to>W80V8+RR{2!C|a#|HFQDG)zBjr^Xn= z6BWS4atfek${}3VG>g?2(>71hg*&Kc|zDk?_8*j1d}Wx^XJwG!SBjN*?1I9N{2%%DW*k(L{>S{J_l25~N<(R-_65f@39cHP^0I-%=v-bvQ{tk{L zQ&xmnzc@;x06Ss657LvU&jLuOC$P(z6WFDc15Y~WeR8#JP}YRRoH&$wm2u;G5%#4< zr-0TWlo^sq@)-Zww4Zg52i4%Y)o)kxvVu6r%|>(?-a5A}Ge}|$( zA}W%_R?y}Ld7a;aC^5A=($2yd0Fa+AewTWFu#!x;OZ(z73f$5s&*+&-9zCr$q@l-l zBu+s#*^jCE@6_zacW)Zw`vUJvJ}pFG&MrEPCqS5w+pj6iN@>5){*69_2Zr)yDqxhF z>HE*@CswQbKFifU+At)?T!D*4_A^$iK}(-fKlq+IzcW*-M!~S`Z9^E+8F3E1uXpnO zuRNkt2^?J)dm6UE{N^|1$$zHH6!i@(FddpcNkZrEsD5A!%NCjat1$$|Sp_KIG z@ET0zQX&K@a4p07hi&2*DT}d0=QG&cJXDZw_N3=#Uh1T1VmU!ebVd9!?9Mxr3!1NO zC0;%lRrcplilKQiViUFmEYZv*LqO=XzQo6?s$%}I0eY@2*i;Fq~_bsN578WMJ zSoCLFAH(kvQbNWX#^|ziEX;p{ATW29lwNG}d@>$~ypjoC1E*nUt)pdV<_{`)ISC@Y0US$XEVQU!|_#J)2!hK4E?3^6{)@2Wt*pno_BL`eqI>xWqd_$aY8y)rUR-=84jL!Vg*iI=uib9t=ne#} zY_$<)#9V-ZGDZQTKm!@CnR1YSTB`9BRMf>g_n zqq6(&<@_#r?)aSCZWjH^XluZd>lbdlyinJKk*9si`EyLYF$ix?q1UrlByMF(s&G2T z7;4uhd+tD>m;#%5U+iaVy2^i}!&PJlYC0mT27hU+kGlM_wN8Nl_EQr7)&aR*{a8?) z-Uz##qSXmOuGe}=K+l9e z3WlxVPgTX%v3K`^$mJxqejifm;7-|vb(hXd772Gxga3i@;V4#eaItQ$2RxBE6G z_wYeaE(ZcQ7IdKxBD$H8wZB$n7vDp zdD(R9g{FPm2P)=^vl1C#TCWuZQ;A8snwL#)RCPi~zUwQ2aVe<`NiC6Iv86Y_>o88w zyUARskq|H(qXgq8*=@^wVyXu`IC7`r}Pz}~+$n*E(l-}jP3rpKytE=4a6+h?c1*Jr8o63#c~IV!me{?uf-TQD8-h1rPW2$C~`Y zVoW~eme0#&j*z~T&MU4N9ZObRvzTynH!+L`l0T#gUOAZ<)zY2twf-SDL&Xjx@s*+I zx^4jcq@i)$Tyu4p>&mU4bggyS93Tf^gr)R_MU(6lq*$92CvF?;N86U1f&jfI{HXycMsMN^~mx)>x$J+&Qi`WxmO($AuK2!F|SzzapwFub(l`E;iV9 z3<~I=x@0k&r-0ETBpMP@ewppULDRZd*CMQA|;l7m&DK7$W zred5`wFs-W!Gi_Go&GE+>;8j-R{?ktK$ZJ{)mQtMIP}(FT*&)3(7kK(Zys!B9ZX0u zg6C3E=1uzbLx+L=>k2yX3*5Zr393s2fRoiE*oM%urdMLwGX=~O?yplF8K70bbDNwm zZc247ette(0ER0gB|B<8xg0`<%`2d1_sF7WU#FPy^KX7iHRGoA`KY%m5Q^lfQL+=W ze6o2BG^U>q5I2gHQs+T*8YCphUV__XKM4*NGJI2`L8*9erE3`sfAnW;8xJ!{Rg$7*;g;u7iyE{FRDO3l@>NF(!0q-eL z_YR!=laamlHu!0vwpih+5<)Coy_i^K7~PhLmB%)xHxwVGt2@YZ?;~0poE? zXs&l47O5>GAne0{Q4qT<2#f|SJ}uxZwxV>7zs9|d2ajSvc7n&8!1yw)@4!-N4#Ceo z(x&x*3@RUFbgwGYz2Un4>lALPV{EG9uzl8;;PWx6wugmEAb?6>KY31zzca-SB=0y^ zxE>!%o&?nm3pgeSrAG$9zgwLlElQju%Y9x*>@&Vu>9huI@u7L%f@2tMUng5`B87gS zQkpg?wf@{3FmiQmOM6Lx6pU8!CWFDgxFH<}jBTpR;w3Ghs=gt-)YU-^n7HL=^`q0u ziQ0r=pQ# zXbCja^{7VR?8os0dehdTsBc5T*3W?VUT3Vvn+wdkb)>vJ26VW5^L!z4-dazL7Ijk! z_{$y+L9^Cs$)T~|;ZFJ_nM*j45oj|wujK56d!%*T_UgOaT}t z(K__~d_fmqo>*r02no90c+0s!varO0^Pm1g3e3z&ZhE;2LK9c*f?HNI=HYj)TO-UV zky3CA}pU@&<2w%pqV_czBcol0n68ZQL`%@>T)GeF@lM%1u}b-e=G@RD0ps6hvt#Ul+ijDZaG4Xr&YP25 z1PXX>k&WczK2B3CUUZ!w8e(%MD6nXI-wif3GIy{(0OBqDkvz^gkGgAtucttxCr1QY*!Xv-#aB|HR+r?}^yA;hj4(~Q_ro=o0 zBD$v{_-TfgfsmlQsqUlScfV0c_y#S~WpNt-+ow+*y-&Tw!D^kZ%^YuD8F+u6q@y$m z-F1Rv;J%z-@(QCe<)jOWmO=qrvRJ`2wvIk>?Dai$BNB}D{#knt(9Y~~8jN~Ji5`=L zluFY20>drD@@VpspT@3h?;^C_L-dRmU?+Xe)JVtFNYNjC?Xi58ZJWQPd9LuM{ILFH zq5^$MpltT9t`f2xH>rV&uA9{-63I2Yqv2&FcyHLhC5s<5*RsrV={#=8zpXOP9`g6y7^<^J^;uS)Hp`{=xcaS_7vUS2 z_4-5TOk3}d49nYX6fW&nx2WA^lsR_I8tMB`(eRpbBKQ}+0g{x!mb0Eyom)h2u^YET zP}!))rOdbFvI9@>+p#i;@s+&AFo)rzn?5sn8ZtDv;kGkJV>Tv7%x+EY=mLe6w*De_ zv-gMdWQ$*KQ*nxB{j?i%GXIzitv5QqxqUdd-CtMV_BSr6Pd3k`JGg3TI%FOid);p8 zYua+nkOTsv%s<4zJy|)WweJ0b)<;L;{Llf_h0rF0IdROLaCqS3j|$MkkEG*V)TSKX z3{>cz(46C-JuQj3{&q&O1;eLyPfWY?H5)?u1#R9A@2QleKN9Ve_R%3-ufguSN*KoA zfpikEvY{^yVp}}6=Y+mYe#tW0?Cr$YvoB6KWUIlEbu_JyTpWEpaTP|AVx5^UiSk^? zRMR=KyqrYiMp#;aCKpKZe1r`A36+);$e`1@Z*oGI9`G8roPSdVG9uG(Y`Z8CD^$O& zn_Lyi^Q{G*PNm}UhtN^+fCOmfuxn#cpxsK>K#0j*PRn_V5n3IbPQDg%iL>deUnsNM zoVU^0`^Rl+y{JeveOn8Pn_7^`ay4xR%!W|0M>PG50(yG>W)2JueVSg{8xn8~R@@s~ z*nV0;`IDvvHuJ4`6?&lRJvo)_v3rR<6>;)W4al7A^tVst8m?|7p$gHIZ)KaWydf>{ zxA^Y3)ARHV!l9*^iN9`t#7635cDeP>hqYYXio@MF22R)aR-x%^&byC6vuk*S)&@Fo(a)Miba40kYU()U71xvZ& zg2odwcKzG2!EMetw)j_Ae%+(SwqdTrR53sSzU>Zpi?8m19>a8A( z$?|ll`%G4Q(hf7yaTM}sQ@DzkK}Z>UgqQA{EmPfGMs(64=Ud4M@6k5+2{GTY@r=)V z{W{}!Ydp1=+1^c@UH4j*gbMIxp{#jcO5Q*yRmG(#9#VB4Pz0jvMVbIoO_$FYb7lS1 z%*XEn%xMihRHS}zN z>8&|)bn$C2^X?K?hU_v0z+$aacrlO}q%*$IT-NHB&ize4(<><=i%|wKQDkpK$cGc! zh3e0p-H;NXMRwm`Fh<7H5Fr~Uw{grzBZFhRuO|<|!2W&F@8Iyuk-mtRyDzXGQ-=4> zgEd4?2b!{8cr8y((hDs`=y4W-g=)=v68b)`%M)`v5W{{DozQUa*i-}o5e>!{l zFgN+(b(8!+8o8gGWl&?W|11fcA?t-6?teR2g0(x`uDq8)6&Nmj z-tZO1RZ(_n?6CyYOdseke`;Yf6Z_Xij;#!{=XeJe=r64MNAC3ji*R17SpSuy*Vk9Z z$|RDBzW2JjK8Pnqdz|R?ujQR12MxU1k6$oA4N2Y+$dspGbH2I42m8UUU$FxAFuKo( z$!vYr*aV|hjMFQOd-$ibWar5UUoV=d5N}!-ClJoBe+>)1S{eJ5K;s`@iHHV>qlsIM z@77V3#zf`$sy8!1OJlheZEa1n0>R5~-ElcjMnjaRD0$`%_AV}}F)ZKsdZ(v4;9h`S zf(qc#W&X!KpcjY&v8wP%#M+Zr{4KxD8^FN+rG(Q_>QKr21Yk(D?mmioKzH;?a}+vE2A9 z?px+l!XDc?&|G@MmnmU%!($%m%PCz>Qco`2FkOs0=~q_&jTNKcg&r_ZwC3V-gCKPq zmHd||&RDJ)ioc=feA-&jjPqkB9g5ZrqxYr0kzLLYD`+Q8*_l&qDJ$q6amPCMqg&(~ zk_*q=dHXw~bQh>x=Nwh;IJ;;5<$?ejav{QIdTdTLIQgE$AtC8z=>lx{=^H`XTCo{T zE+o@0^Qf)f`*qeHN(NlT{G9r}q6@o^U$8lL1c3Hb3mrcposo&65mj)PWGI!m5m|_~ z_XjSNvB7?#1@nbGQUvB z(US}Q6?@mt>PFa02QWVqqQP2H7{5b&2u~8he7yj8n?J;StHy=EQ>w8JdzEfZQSbX; z`TNT%vnN77U2{!;-kC35%+hPT&ASd?dxt-2rbo!VIs+%QoBBO89zyu4ogH(q0S*Je zHU}H5b27c>`X`!bew&LA$A(!jxcf=(e zVscilfqGC_S?Qf3(+{*BqGY*aloYnHdP&$(e&T7KP&glii$Cu}(PkpJfm*R;>UXf* zs8F4?>;!|91s3v|Tk26G8;D&P+_eCI(?-V1&{yyb^3pX@GK*D-Y|@VRjF^A$a}dI` zFqBiUT45f|2=ub7v3vM|qO*adzfVi~o@Iu+8W?2leg7K1;}On&Vwh$lx6o0X63W`Y z-G3DdeswszhOel7xh4CR)EDFOvDVbW%4S7W0;nMR{?Rnf-8m@q{>FWj+7R=t)MwL9 zCd32lGUq|_=#R6_Pdbe@e`w2L4Q{uR=gQsBj8q=JJyIb}b3E(ydn9hDu9A>WM!Kms zobf*5<)^=2L^X>@<->bf2a*fdxwp)=Bzfx^`h3AF@W?n55-?ipbcGYE9CnsbU+r0Z z=h4nh5rO$;<%i3;ATc4!k4E!1jn@NW=7Kn}_xgwK_vsE?<=j% zL>?a6-<>T8l=p59%s0w6G|3?uPP_Xd#vHbReA)Xz??XWm0Ta2{&$#F&(w#-ve=PYN_fB+EvW7gRs_rPnr~>Nuy- zElC$7^#bB9$QYMo+`~mtdJ*WRcQ!k954@Y*35@n81S8;U@Nj+z!bJ`C#sCeXHOhpnB1Glz z2prsWS;bPpp`YwLh_733a`9%?fTGBpj-(FEa&M6;xvrXkUlsTGa}H#P^4C@nCP2sl zS~)rhi>+JsPSZkLvB3r7rFB3Z#rlkoFew~h?mIXk{$RcWLv29VHV?PeZanYyCRo%? z_~*Ua-Ezs=09#M_UhW@0yNm(XtpA>brCqZ}^dI|W*xtmpn+8xx*gJEM9F;I{uS^5D zN&O#5qW7x9SKB`4@bmI78M-n(y7(c=U|Gt0Vw;o5_2SWAHu{`U5;y@`FR=+IDzdbB zF9N7|Zk7i^1zP>s#?!$=h{vec7X4q~p$T#}6X-Ml)LD$oN}pZ6@7*T@o*5z6&bAU` zN_DmPmr*~OnoKX3Shk?FDWKBD6$Xqa)bz^a-1VT=Zix^~KXE80=&uaR1hQb*D=K zq+};6y6_h}RF(T&8yhQaxu(4+EVnr5;4UwPK>f@R+v7{VUGOg{&}9hwqs@eGw>oaQ zqj*U1`zcD@fKl5dK>kx^KQp-f>wQxX`~@bSa~)+qBI_}?O(QaqU^^f$GMwG+Yx9#Q zETn8S(vqf0>BGqZoY;SSaCyil8vSnER79o)90?-KPq?(fyqYE za?ghjdl&N~urVH{e>&*>-DlU$sA_Bx+9dO{Z8+> zp#e(W4XTZWSoXX$c^D710hEcNBk%WghcE^01|R`6WIIiMlJ=tw%{Wa1Oxi?H4ekpp zJHnUFBfGU8%o0~#Xb0MuEXT1PUX6@r$fOIeH&)aN0?WoulN;b~SXqhRUhCy8o6%4u zh^*_JI&KnylgKdYcVX*>1qVtq0cF|XWFi_79&o`YiDdfXUm6uInufipTo8~_*Vya^ z<~U~r0Zq^NudZ_h3B;Wr{tumpv}iDIAs7T{-BdQ#zQ*ld<0*^*wUbpfht*qRRA9oN zh#`^A<{!SbotYoX7f%Y>lrCxCs!J1Bo%|BxIk!?pZl}rUPHfq%Dm|f!;9wu7{+Fn& z-@dS+w|i01vQ-r^b*sALtjPWiO5ZQ>!Q~IGz_7J>IBpzJZESEMhgg2%iOFGH*Me~#=~Ef z@t8jQQ={ucPfP+x074t$7sE=U^^~AEsTcG$1;e0}6$(djs%g9g@P|)_Zt&#<%hKM{ zzw&W=-3gjjKz(P%G22W9${Xw9rSDEJZm}LlLTnuY48nSgbm8PsZ%*X2Qxs?!M2*6# zpGIdDwY|B)yFC3vj0xAo68=d5lDH6OAX_Y~SH$d#w`?}_j6;GO}1-jA_yiv1wAaFfLquq0s0X>vxAb+!{Gcmoy) zJQn&o-zJQ=>OsPeMz9}E?*JC^{26_zuC|y?V=}oQ>ej2p1&058j41VTi#Q&yGjytg z0I!kn7s!chX~RqBuI*pBs{V)n#d&C^H%XJ0R5AET2*Rx<2vz{!M`7Fv8s2LBq~9uB z;Lt#!7Sm?fW}zA6(X7Nb^;j%M+J`nc#XgOQuQ?J<<08Yd9i&%LTwg|suO1zS#XcUW zCJ8-BjHVxKn`*P&W=0K8f33Yx2A!*XVRO!XJ}+rzQWMS;!Bj zsfEo8brof731j!Edatr^e%lc4)vR2{n06vre<&>vE6H`>lhBM`bg|Y-0-cvIfiTcB{zp7Mi{IN#Vxsyo}y<%LvZO8l~(; z-|qZ0kIyZZc`pMykHyxxq@uijS2uk!167TSxcIAXPTJ1+)pauVw&JioJlUS9zia+> zoceM;zh~lTfjrHP@+bLVDw+rX$1^Y(-syNW{MCNV&+RB=z4+J)9+qVrALb*lMdJ*y zKaIGseKBvzdeQ3>s~c^9YVp;1wHCnE$dZAe(%9>2gSMd}fx2o3U5NvoFQ`Clmjcjr z>0ht*IkM0krO$WtyQ|f?aO7Aiq26!ZtPp{Z(~gx{KPlOIbn*hy?STU*EZ47|fv7=n zae1Nkdt%CGcm)8-P53!&VAko5efkQX z+hvb+W>)K|H80>SyQNPL?pzowI!r9O> zPZ97Oz`R>SAJ&72?%Fh;dB3*qjg4Z%z4phZ6`fGvJLT#>RDgGI`BzNzbsOt+fy%R= ztKIsk>m%fiNtyHxX(IbC3%PO{`Iq?6U+h2S#EY7bvwjcnpA+Qv!`17@UGG|G{0h{<|Y5ETY8IUjCgbQqGyx}`*HU+QiNKHFSAh$jn zM|0eAF&7+fqPGCdQ&LV9#^;q#80Q~WF0Vx5D8Md-l2Z4MGI8TJEI3St)O;18qbH;s#<`0GU0gFjIw$)Oz5|49t`7r*X4L+&w`Wo z1>U7lhaR$_Tjl#xxtDizYscm#u@i^A{LjFwSHj^pI_Jg_;yAZ115LXBxzUAzNT%mQ zzh4<(<4$tE(YEisX*6sEJ*`zh-dk+mxbX~ZL9EXkYvlP8H^NyFd!3?N0drWjk>cm_ z6zpHC%#AC(AU*GRld|wM5`X}klClrMGKKMQy;R9E2$VOT9KYQb|I%U=lsGW(Q3&hD zr$bTNI)NL>+K0FY&#W{({xF{W9B~IeRnigZ9$viqDY;|9b7_SUD`?U|DLCtenRhdu z@xm-fW#AEKUX^_ln_IfG><#B5aK57=6V&>yC!|^ZsE8fU+AL|9 z;t3VFF#Z?#^b;)*_`JRA+~)^Z2jAa-KF9cev2l@%;|ZDbNjJGQDBC(85G?ju=ZES1 zn;HhRMH@!kv4B_>6ml{`I7cce_686csYGHI4(#IeY~nHGUol&&SZ`QcE9I;kJJ1^xKS?iWz3hTe}ZP)7q$TBXcu_C9@BgkC}N@?g9LupG*w>5>hsXGXn z?X?TuGU1>yp6;z^%{uE0_XOc6BFh_oZ8{)Ba$PF5?}_2N-so3E`rCwf1`XCOZVIdl znAJZZ>yVWu%KFog+e!X1qNi=NDo5{bAbKIMKWwh1q;VNpwT#o=QXj!d0MWMP6QgW4 zgKV~eei3Z`C^y~lwT->>%iBxZ;SgZz`GUWS?{>~QiT~h+xE&l zSo7d6;E~C)+i3fzxY;&?AN!Y5q}WOfFFqMsI+D4$(`mggjs@|?6Ynw`%{=*?en`Gu zQD(P~oqI4M{7$vF-Lxw>5~uFw^dAH2AHTVWxBV+zXfFX%;f)|=(lR2?8m5IGdiC$y zPn^e%l$s&P>kB)Al2A@yrPpH5m4NmK6BPSk&Zz*7Bcy>;Tg!|o>8i~4W?Tw}n1w&0 zo>7%j-*8_e@6R9#6_I)7H%y;`QYGD(>R{hrZ~Irpp{g{sD1njN%DWJZ2L$Q&wHFwe zr_p2Iqk(USdj$LLqynOlueC20C_JcfM+w)*z{?GWE=ox{p)Oy`y5|*FehdX+Y*hPeQP2o;Jm*6fn@Q{yyz$5bymrT5Y-d(-tz5 zW4vESoNDmPrH$K$d{?0}IA%P8-hZPCC}LCZbsm1-e^;x{kG&T(d0{NKau;=2{c8K`<>joY z728SB4;BCC2edi2M3VPeLF;jo1Td0DtqMWE^T_VuXT%+?+iqmw>)vYO4x_ear*``~ z(bx;XGxvbEW`5+00D!fDBD?;|trJG1Y=+{Wzr>V5YHSLqZ^(qV-mLY!X0_K~)tGY} z9ssx9=|(TBa@r$gIp`nNO;m(ONU@p$CY_zEKnKwSUs9FDUX(jjwIs_TyZHvGVyWWx zAQT?p9Zv@_ zFn=Okc5wLPf`>{^r3j$Gy*7^O6rO;-UUKj{b*S33&>8;NYQBLbgWoO!Jz}(Jf-K6< zs8addW}RMD`}D}t{0V{4Qe6CUp)3S3fA{O5ea)^QVSkGX`c{!y>?OL80x5O>nQJfx zp$5+McBVS1iFmqh-wcXSwpCaE=LwtnK@cV-Q7ZW@8_!+U`1x%(l{s}X_~z)iuyz|F z3jrU4OM{jKE{Pu-^kv+3&GKTRux}jiUF%$TLzEy^@kMy<`5fsC#mbAvPHG2Qo8i)Y zR{&Gd;&kC7^V2L@$|oRWVDyVqgM@|M@T-t9e<4`GcfKsMSeQ2z#Cmz9SCKsW0O;aJ zvUV+GLy0ZvoiX)MjBXCiX;KjV&la7Oxv@^-vt;1+i+Na*{;&i@AYYM0(bDzh?JJ54 zx8DnzPZxFS!!cWYK<3ODd;kpQg1dX6k$1Y6_Aw{VAtT9jJ=$cB#aaD@h|h2H(%FK9 zSaq(5l@;6kWk5dxB$RW(6z(G%(p%KQS?9EIO+}nhFd$#U@ha%hQbK<#v(&J#O^>6y9n3YdELB?hmDsj zSE=oe$DfAoqW7Z77Dswrv!2jw-7em8QvIk8%1)Wsq<;?D*TXi>66a4Ih|R92oP_vs z5yv<`xH*L=_6gRZ9eY>Di5@ZQJHKN^kG#3#P`jMeMs~DMZWum*|Y0#<;>G#bxJ_TynxG=n%$97j9><-M((-bv$1LgkxZ4SS|Y- zT51h{`{!ubYqI8u_pIy0M;h}hDRQt2fTKe*QSPbG4^k@v^G`#0>9gL^%BTG4v2Mw= zH+q+!56LcPo^GZ8rq*Wa50RLLSIR%af6|96U2hV>G!q(quYlSF4N)B3b$#_r+C=Su z26EtiCcAs*i3q|@kr7wQtWYHX6EKPYDMn0An{^g_-}Ebs(<&Nt8o{-lnWBVvmTfb$ zA*80(@iQjdX&oY_CU^?OnNF)l*r{3=FhZOvQNQN9`!EylBE7L8aw|?q>j_!^ml3>C z1ZN;sb9`ddFtGwxOa%jak(mqn=c*^4i@E{rg+-FsN*iC73SM>dg#g(J>|SmHWMS;FnXU%GLGstIhtfTwxGi-X~5fY{+Vw)z48?|`p94-XF*>p{5k$piMXV_W01YO>&Sri) z?Welg%G?lyIS+APdRik3g=hUOV^*L@rou>*ePMH_x^ALoaR)A4v+Ywgw@QY+Xi6Bf zXYK0Z!^`PHf5LtuKJk;4t}k>+K_{N{e4pT@>q6^i7Q7%TC0Q;e>Yz1Nm+h)HmLsQvHv3cu^BhR~-?Lq`Fl(#P({ysEFxX#AdPizvYWHk`Z%&StgDke|eD4^~&7pVg z@%y>ZD6xX#MBVhQM`*;J3Q#$==z$#w{Q-Nb!G(195x8+@@yKQXXd7})r55n`J?@!+ z)WcBgjE^@}lJUkX9V{y*u@@!ZuY5H+jD3V75qqo3>kMy&%C0%Q2x- zqjb0+!}-KhopO_~-+!A*(B9trw;@21uO`j9$p>7={Av-N#A&bea+$RvG%P3zwR19z zfa7>g&bYGP$my~gURDV3y&%torxQm(&pTq{ra!dD8^WCLZ$jrodnQyuBOm55VuD+z zAZi2vu2qraBM`(ZbUuhcM7L^;tnpRE)3nZX zrrm_49!7E=lntf zT)zQTN$o%nSpwkyOXt!lC7KOM`G*BWZFefU6T_jh1&#GH#48f{@|UZmcsVdF=`WZ{ zbv27i+s=d_0IlHSyr_#Udc3DDj5~;AAXX*f#IxY{t86Yu1Qrw0Y(obW?N{ zekJkDg-Ef%>F~6Gd05$uOY8WtV8ql`<#^7Q5{gn0ocjkx4>)kCJ-U_AOy&Ca0siP# zxeS7N2Q?M^HwpyVCU16znW>d+%kH&D+1&N7NvS5+jN3_own15jChGX#BXEvoURNU| zf-CD1`A-_2n>|OOw>$pb=EP0#ZtyAS9_>s)EOQ6St`NT9)LkywlsCXmWlk=9B~pQv znTBx8pW9nd{jF5Q%@0#xEep)|#kHB`T)J9#{C-Z!4+Lr`=u+JNZ)7HUIouoWWE{iI zJ0RYE_xtm5IL9+}$l{SFt}`}a<%hHg7e-M@6i0z{e*TEQKPDULtZNt)s-oyEZn7-i z6c>gWjVZkj0ijhydZ~6%jCtArhE}Ov+e{|0_zy;$S5k=2KKX~CNrOwJAfri{StheA z+gZ^wiW9t|3el5xwwWbvsf-5N%hg-`4qVAZe;);y@n4<#dEr#5U-|q5Fp0JeDD&tr z4f>EsFdY3_Ri8arepe5B`u5)%l#?Q40Pxqf%nOQ$^uCs!UNcMbpvVGKsReGNcN4 z6Pj_th5Q5)_!lmYOE|9H>FG)lcr(?jba2B2Cd9FR`^Jr8795Dd$AIb&g})e6_oohm zsyyVr{bk_suC|fWj|qb3uD!EqURQudRCv_E_l8*3yI!xrpzA#xW)0#c^2gMmGcze~ zfBK_0lk5Zu(BG02et5HugBcoT@GoaTJ#-gF8h#m(bSosg; zj5p~%o+fWg{;7#yZ1J@si<5^aPVAi@5i@xkiBZ#oZub9(8)rBLYg_N6ed7 zso^wA6Sg^ioC=!`J`<(@p2`nq=Oa0T%JyPo<5Ck;sWQtd_C z5lu@R(6T2(k#e7dNoI*ZWs8evQ!M3TJ`d=9^Fx_#E8(5bg(hSF?l0y$bYdT(wV zf9k?shchRI30Ml31T|e2H4%lswymiv$G^orqc}7N+Hq-6P_{*oskKd~jm_r~0Ryj| zX$9B)hQ4f8=}>$RcWn9mNjqL&{#LJ<)HoUFDFC+$Gfd{1_wivts9Ml4&H?*_KnCh? zg1pmqYhOg*2%QX-xWbD)cYZpV-HC&SDfeMJ9Jh#dNJYOiAfQwQ1M8~#!VMwoqX&2sR*ogQy9Xujv5vyF zd>aho`!Sk&o0^0?Sf?~M6Z=;EAz0(w)1jS!DS%c`UVnNzyXJ0W^Vb=-<}M=^3blO0 zO9I4A6>26|$;L4S6x?BZoNF%@Mttt{@gLsYKVF9STzIoW$gPR({`(rO<#7^kuIY8U z^mL)UEV=6Bz%FC9I|AZ-Ssc^^EIvusF!~Gxv`tF~wH3>f0uHafc*M%>tUA~E@Xq=s zTLCM-{?OP>b#9Eph+{eSy}_T1iQPP5Nvef2(5HJIy0k z86|ERM&j}-pf@7~Amzo!@$$XFln*$4xt}8=SG3@wF!{dt;Cz>CHC@#B7{9abN#Wug z>-ZT3lur;}u~XnLh0LL^unl<0-t%wV4gFWABeYxbZkRLVdI<-$$n4Yx?8oV(jXNvq zS*Z5cFfE4{D7K|axNDSSi)jUA$CPi$SR*U6vYaHdL<|W^3SB<7j(Nz83SHV$hS-_q zS31sbZr9-wluM=S0)O;v3R;|yQ?dtb54_9FUi6c0-5Sn!?RQuDldETim84@7`+}cBxfYA+W8XV5#MUTcL*5<(%-!kQ zQWiSw(NXEl)$M$LzptNckI`G|bGsrH`(ub6nk@YNn*5|22uBM7) z&ihhN5Q`am1$ZHSXKrQ&;G%97$tnx`I$q5)Rd9kpxE{K37c;K`(%HdY#g^=c^9J$! z_x#hY+kXmQu9P?1OkN2od&14`zv+Mu$J|_(9blasIj|D{(r=6_8krJ{Qx~JUaSM>6 zZlB|DUzZ8yV1Ck@;?abU-b%N8dKI>^7}pU2jukjK1uRBkg~5$Hw$yAWI89!i`!OUl z#-RZl43vg4RZ}HFA7MJtoap0TxKBKsB2Lv3EThaG29-@QGNn;Vl`-yE}60lzgJ4yQ@~;HyJy!@!L5f!=e0*;@;)QsDpT`< z8W`zJ*s9xXQEqD~=05tH8jp$ytOn<@DS+Dc!U~!L%Ul#soNE1>1KWnw!@yUXEeTKQ z7fU1yM4_`wsb3R_P0e4jph_R&9Jmg&m?sLh_GExb2`ro>m5+VdQ*cPZ{N(L$u#g|! z&MTRv)gKku1025h{|~+tzuWPKUWfSbkMYvjkc?;OzuUI$CY-Q$w?A|Bz(tgZ#AR7< zTU`O}%%}@%n8iIvx`}Zga`hea1Pc^O=oYxFq+od`D2fnS=oMOLg#an}1MVPi68Sx# z&l-w4y#<{yi?xtD>I|qeLQ$6aV)8N;{Zo)hvZ%b;hTY33ws&xg5X#b&Rm%CfQrH_X zzCL8>3(n;wpP1}VZ!>Vwh`_n~@g222HtS*IdWeiF$bGP{ika#?dZQPeQ4y$w@?AKAsbWTB4%HR*SwS2sAbsNYLEV zMX6xBRj6qzV1DI%qvd3G{nmxu8U35Q+gdhf&iC08|0rPGX_yo` z9vtxd#}6{}tcJ(qWA{ZC?IRFPHUi_1ihXq}tgD=;<0p!LeMM-V^_ipBa; zen)eWj)z=3I$Ig&h}Fdu0TPjwWgOPFU@G)JWr^Hbv(|~IgD&tOWO+?g$4~6yQJlPI{5Kb2N`1zqvRac z+6(cJ#c0gUH*_d&uG~UqF>_Nv+!~RsoajzWKoK!{W1?h7wq+bj#4p81Iho>;FD_ z5X@GwZaOi0br$|*vo{TFn2GlQJ!3|25*mdS0$cu%0T&EGS=F%{^?=-%Q~5VKiS>fB z^cA_`r4*I3(#Cetz9TqJl}LAhEUfDVwNH_BzZC|v^w)rFZyO`Om(~J>R6950S_trO zjE~U*((&~6Q~kSnQV9pdsaA->QUvx#*R>4ARqFM%lx*UO-h=cQecxI)WY`We%frlU zRT!j?t(N#IFbrINWRi(&$`TqTQ6EJA&Ci2oouma!MxTyUMuf%rfq@5U;t()u3k&x= zS}Kv8dNS0p#jHGS;`z~yppZ>o8B0Amo^o~Ocwh_q;j|WAu`63ZK#+rr0?VhZl&W2+ ziR8MF=IoNAX0xDaagpQ3fA!Sv9{BvBSkYDC_K#R`3j<+g-GHQx!?3ol@%+C5UX9X8 zD=yyB^=oqtTS+n}BHqxlF5ok6zmf@$@FTpjHs5;>8@=(WPZec23d2TuMglF(Gan zyitm>F^0z0JH$>`Tx}LT?ayp%zf-6F+dEN6k|B&d$E|5OC&xVlBB51=rRo|f^bx{Re621#HRU7R~+H>-et0AkhmD!cD=ckY49zlRi(&6yzm|Rk*xAE)3|iQN9bC z%a511GPl!=2)~(v6joRM(o$XVDclggol$}2mNYD<{NW- z0VdXK=a+O_jP)GEUv=s3e5{7b$B=}hcLny&LnAL=m7L%Gu-?Vl0w&uZZ@u8*6$`Sc z4f-QMZMu#!6|2@X(a&v+b|j~vZNRl|=+AA=JuNgw1 ze*MSA-TPOFi66;ccrm?#{*!oX9hbp`ClDDqq41HI*h{!hhL2r1F!YfaIV}l(QoxM~ zVV!^BF_J0o3$=4nvk#eN*ohOlKJ>-)&!21xZ}*L%m;XEcqfQ}r`hiF@P&oQul<(zy z4EZjj<(23lizIY_2JV-`mSs1z0a`(G@LExOVBtk8g6RjO4AfP-s&g26S1ToGu6LWJWb~UhIPlYUo^5w$XB4x zHG9t|F5TZ?uO&T4+!8?z_!Kn*8TNMRh2$O1%{=SyYJ>oGll1;#wO#=$3K#)g!>$W@ zW}jP78xzSrd`cd!zuG?u0XXgUObUAK>NYHwGPR*{c{bmDp4snCA)_uHq)U2Fj7xeh zQjBTvH7PuEFefT3pl0X$1Lp$&*y4~cq$}kyf3p+~X74O(uE%8eAlV7ssd-EMxS;75 z!=ii;MqDJ9_CIC%SA5CmxA>~N@yGn&7u!tqXUd-r{o1b7ye}qC@sMbG)~1bk|MBo% zXR(8p*F-p_(Y{O;fpz1AXYt#3uBUab--Bf&*z}UGIqKw}g5rHAR_+N+BM)zW3%&ec4quF~T!k)~aazdxXcljJDN=TVmAQ|x*8E_hiKi50#6!J76- z7xuYc*puf47!E&4Jb{)D9@=1d5&wGMzTIFmtYnhe1>G?d@VTge`PzXCdi<>+KA(S+ zp|1QZUSCQ#-z)pBK!t2tN{*j{@o#-yKsCBC@gmpycUrZBzJN1xSf^YgX6zo#8{dLH zN~)^eW2$%pPF?(i!wK>sw0*h>_TRDe&# z`gv*6H{X^VbNfXKmS-cHOE3WsS5TKeq^Ca5YH;T}Vk5GU^DXa7(%XAYNpzb!AuwmcLw@0G1>6CDU-=!Uo>fU#PPpN%t9x`v=Wm@=G5;UbkiLY%A4Y zmC49=^OnxMCl5T77Mk5^-jawI{vH=oQ!f0LvJJbJI~m2BMcP<3j9fI&vosvUR?BN6 zr#$620Z{Z_txb$ssi8dMdoG$6%MvXSQR`EKzru<2Z16 z;A`@YUykO z6UFvnUqd0aQIu>5i+c^8Fb^AZ2Ri3o>6>p#kJ)aKou6w*}3BaSl@ zu~Z*~anc0F>|6S7nTZHo=k*P>iEvCXnV0(v04X&X4zo(Hcap`XINAtm0~M^jw+?tL z8ze^8cC+f)H|cGQ_^NqnWIKR%AG1n}Drrz_UcTD3f4-hh5-Yh0?yF!tVd6Bp=g|uY zYQM&xM{V>Y58Ec0g^%%Ol+VW*-mmF>Ss-*wcD3JXHv3~x^_p&x z{GN;4TrgkAgM!kXPv5t}_81$Uv~OL^v}i(YoQs^PAmgO_-ik*iE@mFa}h) zFK{%_kdg_%A85J_%FE$#pA zq}4!ny=@H`=++aKbRx;*50dpx6@?%^Z_W>P;Sw;#q0*Uac^_5W2u7W|2-bRScxdmW z7+gir{t3``w9S1dF;JavHb_`g3+@w*7D^IC#} zv8|^qu5tH!$?hUtPcLo5Ed8B^jP9XZLuT%OPnX{>I6a^q!EIG z0l<{dq_O=s!?41x@V6{Nn||>Pt-$Vt-Mb;3K}CZc#QV@=*R6c&e*3MrwZbkr_rDUo zFx*AN3AoGhN^P@GE*&rTYgaG7z`mt}`HkPOg>Rfkf_lH}3V6YL)^C0;jY)Gt4=~Bk zC<4OK#-j?D{n~p+=q`_iXQkZx<9wDI`VDlOOD~O^ZkGME_&$QiagwHfr4(eo-$fXcqZ{zsbhVQ+7~B@ zj!d*5;BcUw1yF(EbZftmRBLcz_I#woF5*fEW=~(HYk}>s_#=jmZBoX!O$M|_rV1uG z#k_=>?1HWFi44*tU^4A1Bx^=vZ}K_%qYB=+>11Y&!d zXS4F`+m!>k^WNqk9&r8R^1H9NPP*p1@mUcxcVqWYFMKNT=yMfl1X)(^B7Fs=QK8)p z36HuE( z2QkW>Ky0D(sr>}i^M|1`rU|TU-BS>x-w?66Wb*mjVTh^4{lz}fvJiQ;NV0SRO~Or#7pv4QmUYKB*M&R_#^cl182G&3 zE$SQ?_8imrguAGh8s*%rbTi>w4Pe#5XC>T*iJEdCWj!XMFF?J9T7DJ3YS)5wzNsPk zRlJX-ADpd$Y1ivgV$^B(At>P3csAGHvQ;W3dAmEp_a%tl;SI0nBkwTuoy422C6~_G zxG+(k`|ZE`cW@NBIJ@awVK>c!l>nX`v~?zq4#d{>B_p!hasKh!b3Gx1P%Q1v<~>Sk z5l^qC)=5^2jWb(NSNK2|h+Q-z8irD>SJJ7#f$DGd&m=4VLIwg&uk_?!UAxUbi}db@ zR}iZ+ofEVUjnW=PP~Jt9uUlsj*-~nkuX7dv+X9cDPXVN2(JMQQxcj%Y@3+1$)jfNB z)rZz=H*d>p&QnjKUT4;=_Y4)ik8y6;y14wosWa7!Aj*b;tz-+J_YavZBovj=oC;9R zZ~kgys_9DU(SS0$#lr`#LV?byeronjHpS(WpX$4(`Kvj{%xrO&II-)(G2I^itt+S>{5kS80K@9>kOOP=VEfu{?*Zq*^zhltq>^mmw_$&GAb( zlH}k&;rlVyXQ(}~gIbV+*b#~AvlGfiT7fecAnp1(j0Rah)Z@Ht$6%)bEcb>QJw2_r zx8`=9|LStFMyNz~R=)T zxjnIY>Yo;?Uj~a7q?xV|KKSCIRZ=tl_@!WQ7KjFoY#n4;)S++Kjpp;t{=CmK?$uVk z4~o7QMbU+kG#$2XTFLk|w43iO&Sm*wlO^M3t`QiRV)B76K-G1!hxuUrGw3(tmwfe>AN?!l?{yTkF@18nP=|1tJ7q?hog!cV zQ4RZn06vWWyk`6uL=H;6cXpl`L3Yc+U+^bAp}12IJo*N;^u_FClDmy)zsW?O;KNG_ zR|qq$r@z@sNTu9ggR+SsE(J#K##$Mz^}d%F!OfTM>odwz$Uspd(w2-(Ic;FlSCa*Y zP0#WDA5bgQIF$#uHJK+1+7KXih-29&P=z1Nf;p=2##vtnZ&cm`LL&JlMT7j;Yl0w` zxmhs7P5BCR`nDdvoi*iUw4_(fMUSedJiflisEUyia8JL+6y(=T>2PT_`r1tEg~f(J z{=YU8OP4Iw_@FCibFPc~5n&7ZZ^GiT(nu`d)A*D+s@3mG1DZo;ejX4T(Z>X`i>zv~ z!CTf3+Kmh22ti}10V`L;)F8yG+5ZqmVWE|(=e#JX0->&G3FoWPHEYria0?>2>Rcpd z-oQRU%$sL*ySh%}gHkVb7(_lj%BL(D29u-B75o}JLtw(SnoTd(?-r|8O z?y*qU`I<3>>>vJk^UG^gF3e)#atPIYXei*#pl-ccTEptw@6SaodsAT;_g9LS~p`)6m0}vAmk%E;B9rzYn0@+2sq#uMCNR+vY8{JfR`Gl%FFNB>% zen>wOcF9G0^1;neM95%oT>Ad<(=yw^^@D}P+-L^@pYId<{eA07GUO6Y-CX4NH6dQo zPL5R|EP@oV3a9Ye%KKUK!R^tsDGv^JeVwmpBgcB3aw*web+3;QLFoqQl$v5Olne)h zaSAS)u>`Yz7>0*>=5o2z1i0zKF*)Min|P;t1L6(W0%n+PK3_xKBmhaV|Af%@dT)Du zkKyTiYz=XLJXtvIM1DSg9*U*>l@VL%cU&GR)hF?Uvu`j2( z47Kv*?kaayN*qva4hfJ1#**{a*DBm5Yq?yV`TMQY%kZFAYXdHM8DaGIpn4&eV~h2w zt#0D!DK}P~(-PUun0J#QiO>iZ*qeXaZHZP>YMSKMMCG9^uA!(KKI(QLC;+1VfegAL zm|$AIm|D8Wj;g1y;~kCdsQ9K^s#YXC| zmLdr994HO*?`-71#dmYr4ZaxByOP=3R}A<#`lD)N_Ko&y4Pn9K`&yF|lGdib>aRNA zgepyM<3=#Q1x4O}^sB;MMn%W&`?TJKfRV4CY9%P=zQ4n#3WO#yaarf5w?n0 zXTCune&jlJr-y*g5!WyUi-dLAS5XY)RSEKLVkb!!{<{!0HdqAFy>!Y-) zhK`kP#+})sJA&vpobdo4F_x&Zj?}OXVtNfkK=7xvgR9u=P^E%I>k#YU@O|KHs$noS zpeD$IyT?azOATq{O#!S;N`Ag{OH7P*;BJbxMRg4$mpwCB-zMsvFOCuGl}%z)O%-bd zbHwtxs0XVhz2UuBVZoYrmX>K_2*+Wg4|`~FB1c8&0!Q8}h0kX^w1z|d6{m8FKB ztRK0yRcehJ%eRYN-0OXHSK$bR*bDu`+?N1?%M|?4*aOPR!FNwAQebtTy@X=o=CT1c zFia@$1k>mP-STG48zrFq&3K38+||NTP11b)@Xyj{gDo}tJov$hvAJM0x`&B&2+xLY zR^FdF-(Vc71rzX&V@Mn!PM#h6snsK?a%)u-_*3OC|FdeptElwe^dK=Q*vumtiB}PV zB(N_o-MKYJKU=!K7kBy<|KQJtUNGic!Y`el?v-1n-_Q(stvbGkt)k#-#$6~50_T{ri3QQXE`#M&LN*6p(61c?&8 z>%>l zayq{kb!VMq2Vb06LmGzagpaPqlcmlstbQS+saE4Uux|xHKP@v=-0CYaj*@z1f*Sfq zbhn=OD{Y9It8P}$do~&zr(_~4Mj|;el#RIvw4)tE;UJjI@oHkjT6tJ7K+n3ZB5-sy9#YL4vYS5vB|V;v{G|H~Of>lq zihSf;n*0WMP%qc*x5-Xwt*bEZ&Cx<5zP!J_#m#M8IHhmddWV4Z9NtNJ(`>9neIAMQ zA0F^#8_tUVZTL6`Nh0!mKc-zPen>(F?&@2FWRk97fC|_~2fWuO4WpFHW?tk9t>rJs#}Tdx8g# zYm{Tp)5^*BDY!g6JhtDhA`M z^B#LNd-u;_eh64JYrrB@@s3XTLzU(VmjfFE$#0moZ<)L?UG=;p(;5wJCtXMuIV`3o_&@9?H!V=yS0Q318!frQeVTgU;c^-OErO-h^M6tUxp*b$q}7 zSDJr+Np05AduZh$(&ktWjF&Znpfyy%kh~T9U}UMir;s7HdVa4r4-4~jd_WV!7E^q( zK6(IpnM`~+Ky)S?M=?{?*k;FHV;k!|k|;o#lr=e5X6)33`zsCqZP&uOFRTcEO1T&b z>GJr+Js(e`aZSc4`eZ9Eeo8oHgmfYn7eVgmQxk3-Q#*eWc70#mry)O*^ES~U4>Gx2 zR@ho6ILub)3@>)6lX88j1?|(JRtO!#3uU4yJLcS)mdkP8HKv5(r0*0K1we3vafZ(a zFMcJ07YzBf@XG#wZFy&OMp$%Qux3{Tv|-w7ph-AjHF7+r;~yfZnR;UXx2K0}{*wwa z9@DRxj`?H8B=BUwt@pd3X&ElzK)J_ZwEI(L2lg}!s0o2_Sd*94ugTxg8_#bAp6yWO zg}6-Pv+J^v4x3LQtufrI@?t~gokretY+W`Bid|vqnItTaq{y{qKGr!tWQH`#D2mxMgEH=ao2m(Q=F1s%~ZZ~n? z{r7hD3|cT5+k1$$%1rqN4DkM2!hJl)TX1X^Yj1?9?cTGp%j_YFZ#S!Joa7WFG;-YK zI`kGtgt)O&<4Yh;#ez&y==_QwwDiaM_NbzIWEKG(GUtSfsiC{vyYbf)^hMohP}*UN zE;a{}Ny|k?&~{4#CZGicJm#pe`eduAXbk3okd(LM0xG`!$bU+h0GqF)YMG}@Nzs%6 zjf-wEi?3OoW#Ord7o!4c^f!T=V?>z9qUs{0BNmLF9=fh5)QagDsFv((Tq~KjIupz zwk1a`apdc3rSH2i-`YB7D+FFS7 zOq;+<63P8n{8{k>2xfH@2J4l6x{0goKRB77_*q5HTe*Z16_U_3dsz z=z7`i(gPPr#)j=0f%g<(%)rOl&s_cL!N}jCbn~%QXo-#Fhv=u^Wkyhb!Y^SPlv~DB z&uft0Gcssitihm*HR?Y5k5D2i67R-W!!Y$Wxm(-<&wxo3Dgu-t;B(2zyt9vqZt!D9 zT~&qSOB+d01qq7Gt}g2btlj%P=PjVp*U^y8m^EStiL4o$6L#=rwMf9Q9d*|5J_e^w z*nnePkvq>+;T1Uer5H+8kP2&Dc!hfY;Kzn{nsSRizTmnd)+9KT|JToVSQLARIzW!T6~v@`E2e< zl7`^5P-PA~Xn#h8R307n+T~$Vb5~%BEHlY+as7kecvvXuCVYPSgK2NVm(&~bw^qRs z2P~Im9lfz3h{WaRTvKZyM)85Ozh)TSGi6a@=NYxu09qPhvXiM%Ph`wXloK1MX}X5yj!x6 z5_TpL?UwY{X6-Ao>pKxxiRR(9WgyVgopU#P>x#6hA|Powp>k485pVy_6&-2`h&|=8n>julh8uejpPwFYb*m zgUk!Xr%>R4d%pTs1+vW&qbN>lsF+G{5?p^L{d)h1uTzVg-=R0%VGZ2zE7EX=$^XP; zSk%uMyct{nr2Xqrj%Nm89?hH*`Pj12N&qu=H0WSNEPfx28OEjPkGcG|C~BvA#C(2* zRWR^Twny@V$ib!J_Gr0MAGJSwMGR*xLGf(BqkP@$TwXUNcGhZrtH|`mTEL;k%>s~{ zRS17!-hw!KkoPVC_-9^#?ug_*|5w%}0298YlPpISk!=@$l#_{1_IaeDiHm7yP!tS5 zzsK+Se&6eaKQ;R-jdHa6v2`l~_x<1Xjk0kv)FOBn3t22gEVb9jz6DE-Khx1U;6R~s z>48P??P8}owfdC5t-Gv-;>*h}byRuSF}|jlkhaS`WW2(eiZ%3d)712emg_cRVedJa zp*}LBx6cj^9hB$``_r9(i$dDtOS?SgP8|ysu+mD{?$EQAYuHLyu5=#dBkdmB)shdD zjW!+M4c9-4ncQYN@A&`*7@v~i9>7#G#{UKh&cR>r!*pyVR@9OR*?-2PU4 z2xfViV^t2E@0yFl^s}w}?k{vA;Fwz8_4@Y-qVGsomTYjx?dQZqR(r_UGHoxX1waKv za?sOPsXHhOhot8y;c% z|J+2d$lM^_rMAJkPrea;HuY>UhN=Al_52c8Scn6dixtv4`7O*wy+*s^sWa@7M-q_D zZNcPR;gGd~|5Da)L5HrCuaAOdum=4~l(Zu>Ya_8XQv&r{EQbI}oY>u;jBbE?ehB0w zE?S|Qf0!Ffd90v0f4F4wsm64x(xY_b-~l-30b+Loy}c(<;2ulQcx-v*@~TGlLT}WI zYY^8TnlBML-YbXkjMpYF6>!@Z8%Pp59pII8c0h`=!=gARR-Y}Yz^#^04xdAJBw&8> zNONn4YW;^a<;``RoQH&pE=n{@PEYnq0#@!;{okNq@O~Raxb|zQi0clG(FDD7%5N_gBEr(?nw^ z!aHUa#NnEDMhW0vpiVdJWQdtGNW-Ps2*}3 zp@z1c&){NE1c)5m0a-bt{QCRPgTsGH$RVhB7_*ER3t>3Uq8$WRrkA2&0m;9OiQ@sg(w*F+`NRk&>w5UomGY ztFIt5eU3)+`715ca71UdS)bf)o_jbYmS=T;wxRyciJDZLxbP?r!|3ZLNJoHF!4}+q zg$GBRjStHQ}kZj-F_bld5&~1_9(0`kQ zWuiP}K0fZ&VZn8tVS4G8&_rM6cjU_##=`_JUm`!#;7c-d=cEb#=r<e+r-Hpq+12$+-5;Ki$cNTbQ1W*K2r?c;%^1C-EiInM7eQpty1E&xX@m zaU5g~FgwZ>fJNP`UkE^&`ad*_#JPzt%TD_dnUa+111(JPKIml&f@P*%(@x{ljz&YG zwU!7UP?sv&)#^ugcChfu1?!V7U6{VRGns#G zYzh|(K$=~|LID@Df(=RS45q8&VRvh!858$w$W@V_dW&H4@`L4!AS3L~;rX&Avz(3P zKr1&N@3fAi0-5%NVUTg%p9GDm0JE|`%;3d4={$TSL{W3odr1?P%&z@WBzKRwc#kc z@HXUF(`)p{IXsyT90CT2FM5iiDhn>?kciOZJHF)XT`RlRu}1Q5C68z}W*}N@l_ug% z(Z}U}lX!JnBR;3&19f-DZl~kBWFcoq%#mZuNKQiES6h&3a(boA`fVZ#Kzaj~y(>BE z`qC(m?(9AeIazb$VOtO#G+8)$HTN3{wxN)Y{;)OtDv+?h$wb%6)36pxJdst;TgsAA z|Ju#HVsh`w)!`>t*D#{^5+JlQ$wGsHoK0+>#MSeFGUTXoh8#@Y9_ZO#?YL9$rm?Hy zI};kNf*kd|;piL*P>^}CFj5s$%(Z86sd{*Zq~;G(wq|H`Ot*c&Q)*_Y-eYF2I_NB^ zslj@4uDS?&8WE0mGC4Y;FKDKk;iaa06NIaF>l}l`oK7hprRB8$LsbJbLQ_qfCo{%U?mcF&8TxBvE+4 zzBg`_>zt7JAIL#rtp&HZjv^g}M4ZkHgUbbHFA{#WzLk|}a%Ojj1BX~+Wj{=LPrJ*B zI(FU}`}+&cwXLg4K*Q?pz_p3}(p=&A<9_dw#W$NWh4^BUyY*F1cXj^GWB_>$V{wi+ zBbX43vgtkO9+^TDdUSIO8SawE_hOSWeNo|f~gRfyLhZvG)@E?}b#6Bg>>AtO4G{-w>`B#PIH zZT*u-GC;?<2@Qjl_}70-$s3XFex856Z|nZZ5B;kux7)CH={b76vgFIXJHki8bPM`` z0_Ats96GcEe%c1G@VrEUGR%AE@_gxJj%z~zp6UXmXYyBw^WhR;49F2<3|+W{zBt(O z#GsPIb@vlsdoU`n{Rx^~t7!Z*YadD00*()E_&#fPOw2rs5;VAFG))Fb>ewyE+C$e? zBG(GkpBSBu=cfFcT!Z-s+gHCmbUqeWpBbZSs5!h}a-gtPNL%n{BndSdR^W+=Up)Q6 zQDfNory;7(qFL3@^VI_Jcf8FWa^+rQb*l${J`DTx_ijG3-!bCA4Jt0GC(b?{*)~`N zB3s@2QVK1f7e5uwvTDF2t2bUXT-qJOhsDD2_3J7a;b}d)Y8cVN@6z}LD`e`ECc081rU8jpGB>BpDQjlMSlOeReE%GJrvtYX zW-qF_2U?^&^JRfCi7i^KHCI0v5I(O`;j%!we)p-DD_$)`d@>Lfmw?4ky1J{rjvwpx z*1|@;S53e_?Yoo5H6VAX9!*96dveEYe6&U>VE*QrmI<`^{>)${pyDJxQL~;+zIEJi zG>D%4YY!K;33yKCXbTH59jYl8ykXlU6cj)CTu-*Ty)<~~n9ZK{I5-~{wh8D9Y*xF& zr8wAgx(X$Ya@b`V318Qemgl$2zgPG-kf0O7 za#Z~A5ywGEH^DV(n(1NT+C0R4W1*K&U2DYROFa^SmBF8kE$La@Vjz5UM7s|aB$rJOQ!af=HUPrp@e!0X;jUG2it5a7n=kN;hDz2;IJ0Gv zTO9da%@!a9N7pM9r#U~fBkuM6?D*dIbra`Vfce-KX7hEu|5Ag6`RuniDn0@+o6au@k3vvk0`VRN>!@~yk7$BCf>;7PN{P(- z7;^*amY#rKS;3W39v2A02d|Fezg&>wrk#dAC15Xe$CRH6z$9LoY0o`6o@+?LCY2~7Yo*+}r>@H# zn^TrKB%*qW^^f2$eE*!zDLe9qW8^MZvF7iK`_7&fTV|fZ{Al*JMt4{w52)v{wMQ`C zUr0f#G5n_|v+3Z(o2zbY`0`V;f5V%!Mlt_T9b{wBF$I>lskQk~#C__7!=FG~O{AiN ztnGhQeq#_lmRmj)Y%Wr4xPMvSd9;*5Mkxv|;t-|4EcwF^cZ7`N$EqTQ1%UB%zwA+x_z=ufn4M+DcM8^%Vx@C|eMNHq zyt-WCw5+!}qu0R?OhQNRV1Yq5RPS*E>U*W_u=c;dEO|td$7ld=2{NK%o>#Jsq=ZU6 z|7^`%uYtdixsPsd969j};xec|wc1!_odK7FjgpD#0A*&;7DJZ1l|$(4X4 zy;(cO2eE3M#4CK=1$R9YEkP`(Hi&;6>)k7C2}e1lO?5yW*WY20mu9wEIMu->_(LA0 zg+}dE<;~4EW5GB=eaz5C;O~6yAXYsFSkEjQ2R7!yY1{6#$zZjhfCzoQ?X8%|QX8M00PTF;3NYl^?MBsxrP z!6G?xMN~cV(gO*(t!Gy;gKM?FHCp*>zHbFr6Bu#gW2vr9K_NoXewPmTP%g^6&lb5g zTL|0;<|y&FoL?VW{GDo}GDmwlRy~^8_E=Hd*Tm{pZQmre9MPJX)u-%a;@X-jSL#bG zO~$>%;qThzf*REONXoYBxmRu;3vHf07Cxken7;)8?iow@zIYRa9TPbmsj~FFlG}hQ zp}>;KyP8IQAGm%W+mJSryVkk(1MfDB@L}P$6J*NfJnL9PA3b&2{Y1H=iJ=Y^yu;C> z`va+GP^;KSed$2U_pBk{jR$?pn_P&SyXTJ&BIsp{1-wP%h}P=%KsKd6@0r$#oEvRv zAN)w$_V!e2J`L#co%#l@GuwxW zVSH7P~CrMrD!cnzb)=41IoBta9iE_U*hBUyhK-6r)U3s`rs>mIPlim=-TDs6p= z|AO`0ZYdk8_@RlWw3xnNg6=0kV;b+G^7bmw8<#|URmB$Z0vaBKclIdL5abeR~N4g*aV)X5CfW)Wctu6*Q7!!1k@P9j1z+Z3|xM?no$?ZkXQh53!P zv$Zs;)=2w6D-nU5;PyUTV>4zB7saR-q!sU?{`?G5=iq?|seP&B7kKHIEx3R_80vT@ zQ+C4(-@XiEM0dz)$jX$AA5$#-pYu~>~-v|WRnruD|>`xht+8C;$K{53^Inna!!%y6L zeDZ#X2Fs$XMYrxViG(mUR&S4aR*;fDj{gA|qXLtv09$WlU9N`dQ@NAkJ;*zZ9M zGr1EwKqp#@SW!az;xn%h4y85sHIPMlb~TQKcFFxFgwGJG_iaz|SymI8Q!_0zpn)Y=YC+@Z5c8K2q-DH7x|0H!c(@+E*_3dUQb>u-PBJL>36;S7A3xxi0}LlGRZeix{12m3ld=A)Cz2`7Mv37q-rQso&$ZHqdafW ziWcvmv{63;j0m7!x3sVyy&asQKRAb>hutND9-txMoD2A5T~IL#RJR#({s9M@+H+mP zc5ANSG4^qZqA@_)JJNsqy`tfk!!`lk0=62%u%+;t9OzlHL3$FCW%>q z%f-xIe*dm`6RGxQ!@>*FOkV#k(dCYdQJdnw!12~rDk`QP()2*=RYnQiMVgdnrEm}C zn*)q{*uKHHuTy@26QI(^@d(%KejjG}zQYVbjulHFGt|C?#!QiPpM6C6Ek0xo52>sl zpR4Yl?m}bkq1W8VQanoB_FiBCDA%{W`g6F-bP(`En+I@DT5^bQgj_r?_TgB=%MY{K zfViikw&xOqN%h^6=1;>mBqlBbPOfTMC79rcJ>ne!v_>82Cv+1bV--K2#MC^K1w!dC zEZdltf)1wMF9(}GknM0_#LlfahGkst&;>6IQ)4f`>aG0ItFRqE#Q;U>w+kqvUiub* zwGrnJciu`axK-!gwosCNAxpbJe(W<$FGM{l0HEB-i_OiFFMjkh%f;NjbeVOsObU*% z!MkS(3YffM8~y{sPy%EUXI~{bmWGK`ZB$7p)QZK)2+d zSUcR_r&^5u+?v`XqDo)1`|F(`-}+d}V!DajR(aENpdm|d$QuZO(AyS0b|_R|BIm!8 z*VS(*{V&!6m{sGuydT2zGl1O(hlwiyMpv-sd})2s@$O4W!N$`Mf%F8nc2>ugClTiEFsloj z-;l*m^>w=bubUt5fIl|Z>3R!TzyI;MhHU{JHT2d2Y}{qQH^fKQ;#t1}Te^X5=K$Cv z0tY8mV2`u(RN^d-7Z-qnCHK+ogWPDj7F8?qaal-HRSG$L2e0!7HyiLF<_i(~gEK(B zn$E)BehG4Kzi%J(=+3ILp#ol~X+1F=#GlkRt1B5GfcoI91QGo$+(|YPY_|5tiA}ls z%%tjGRy0sA*%PE8P;jMIn8ceSzD+^BuRn=sn15rlGcJ7KALdoj6a6?-jB1jN;8bWz zpw=-2IhJVWO1jK;`}nqdEQPG6e_ZOf@t)!<@?-Is0PIuI;aUDqCkgdK?M^Ry-{w+< zG-Pg&Buv5{Fa0XzBHcbbIFd+68jg!?#-(@Zzl2a0m7-&5|MrObG=p34QX{Rr!zuyR zNuI~e3ArD|e-3rMX=-}PC6z1R(I)fwLXktf&O6esYR(%&(X*6X53$X2p|0*k5gp-=D&P_D_Kea5es6c>o?vy1W3Fo|1Rp zs+osZu%2HB;2~Mx4PNa|g8`17kBMylf?@}F^6rIO*KIL3 zS+?GDqkv#-MlR=0OL*h-_^?Gr1ehO4YYg1$K^K>~yAjwhJ;HUz_JV!+m{{ z%qK3g)Y3EddVJS}peynO99~4Va!v~Tjv3#)PGHV+Xj+IqBWIQu*Lj$RqNCs$8NeBD z()n5aeF121ZV%jZ#{|zv$32oxO?Z9#Dl%6C#T>Wx=u&H z`m?#Sv#24Kc$0bFQ^ZTU^kz0~6dgW5gW>s#T$6P&S?P3po_n32aenMz1|fKj^~mxGX=?jWsDTn5dfB|426KM)Oa+-- zd^H1+3%V6z?T(oe{InidpU!xCIM56lo*1jDr5=zShpx&1@hY&WyJ&f9uq*T`z5Dyx zEEq^V=Bfg>nP2rg#~9<++ZI2=vUNK)O;8LYD{A?6reSrp5HVWEv8Ng0ZVfd$OdSi4 zq7q4Oy1$zuCr5wRVS_;Y)OR&z5ct+}rgXNc#R#s}u_Tm*U&iZBchN0}L5AP&R}#$r zb$Db0G?i`Mpof)a<~Y>u)kza>R*fT`Ka*Lv-nexGHqi|_X~MqBMNjzRQXArtK;o@H(7O(9JKQMm$OPpf*yk^E1%fjZ|)c7x1JGnW?~+U3ZU9H50Lq9x$GFcaPi1_8GG;Q{+3jZ@-Ur&s!-Pq!)T9W|W2@_;zn2 z5nH1QbM}KieLg#m?3ze5r!oitxHPh=uCtsqjLH?^RNf&qiatjA8$Y4I#IHubN96lV zQq_6HNvlx$3r&72%+@%qV&YAHx|Un-FAt@{9|>N)oyGu6l5 z?G+faw!ErJ#r)Amg9VNAlZ?CO+eW_(R+Mjww0klq8low=+9J~<46o9#m=6cG3$E^M zdu5d`!0mR&JVnMH5Ghs}Vh_fwnNet5UtlragOGk@V@oQyZ*0#dtwW*aS(;A7EJ)l}-r%aQ;Xf5Vi3;&lOYj8Wqr^ zO&QLV4|5tDE=qt+`2$MHb$dPcTUi?QZGBZG2O|gpmb1D_}!G3UNf_?JSxuf%k@6Cp-#Uq6dc-2tDoWHJJS zIBYmxg6Ulp+D_g}{w})Xd-?Wr3BUL-Po;SVNmigs*#(>T(Wg6ETx^C3eJg=)JKW+V zLo)jspd4#Az#8GXgB)us+9y|B!OaZOWnkF2(HkL;LwM>}L!g_JJ5yC7bxN8uNv-ucQj_)JGNx7h#>DZ(b2vnN`z4T6aym8kiz^`k&4^P znAree=yrx@GfqD1kKYd-W>VRNZxMoSw>ypGFCswP9@nc}=9!M6Pb-1W;Y9Wk?Z#n?CPa|jt8f-F^HhsX0E)udO% z#aD@Cyd_lczC#MHGb^bUV}lT7c6Lv&1B?E|9Oy)}9>Lo)^q#^I18~eW+OaLDqSi&K z^5{G+%pxq&^HIrum?kP&fS8H<^=b;(z`m#cqJQxwemKc5`_C`#okw6gP}RhOAqnCM zJoZCuk*R{a^|cZ75IqQoZx|#BvH^zRnhz-M;@_Vlj z!XY3P@NNzgi$BC{M5sG#nTBh-a(mwWll1X%`7N^;JKU|{dWe|Q#>mI@{5_bVX22Qp zP|@vL&-E`7?>W=Yd$}=kpM!>5YE|;gq;}+rzrY`g8Ql3ZXg~+P7108DoC|$2Tvr29 z`M!4QAEb1`Cpk9+;m;#uFe->3MhPSL#}Ccc@zB2>(MwCp#N$7A&m zW{Oet-sBCOdwB4R`d`IY_{$m!Vv5(*V4~nK*Fv2q1-ph^%vg(98<(rR!`%MW6V66s zIT*wuy(9D_#Ez@6Tb+PW;~g&6^92{$MZC)h&BPCfB!JOb?)3BG{fDv~eW$#G2*6Qb z_+VuPhB;yeNKK>`KOH%ojtNsx-jp+p+9;0eY_VBLsgDyNjn!~eYb>(jAF_hS>T?7v z;D*i)+@n)Cbk7r8Re~%^AM1S`0@H0|J6#IjmyoN`taZ2rp1}CPC@cDHJPFTF*6#!f z9WCGXU%{t{vpbE!dk^+^DOW+oV89M(Snbi9IJzV>_QkIdx3Rn)?y5u@R`etr#Mjby zp`|#u?oYU}^Q6#v6G5At4Tv8v2gP~DqCs z--l1F>GHqMZzCP>rozqnW5F=?TP9#A_vKtN@@392pRKHFx7a@`!G`HdAGMw8uC zHDT~qWdMZ8WsNRxM-Xv%JDNC<~N!l)$aOgh9Ye zSSG;egVV&lIm`spd5&eGT0f2d?>l5zWD(dM0_PvHqI&;3!AN0Mi5hcr#pF18>mx^l zPz{MZ?QKhGj_V+cwlw7_xn8+Ln_BdpcAM?>3JG|)=VmE# zBos-zn!yxJH{aemCm>#}JBrIbtq7tp>6@_qkE{H%*Aolte-wnGb1PPInK)w**m$SU zadz)-(xnays0SVgJITU7$Ftl;JB|3CTr4TPv<59uL`=eSq)fu{UT4qyGHuL<=!_9( zdxwg?*AFz+x)y(n_e%h-KiM3aoGXiXv4Jc{cycoqJClLvQ+Xq%W!&9rn9pl z`|r;5q=NQ>S#%E6XIccPseXs={(|un-90A2R&9d=Oq**lmPyTm68U<{B}$9m@4x(5 zOXN$8Hrj6U&{YE@vh`k%y-fwvFId7r2`FZsu4dF*ES49mM<1X%fsU>!CtrNgiPBuT z?z@K`Q5R0)^@G(R%iJY&lj}7l#Fl3b(=Z=vRj5Bf=p{I9jWRCJ6nu`7Q!rf7zg5Xw zhj$|Gqrd-+ooBY9O({8H%f|02$M|1ip~l$S%%ZQS8Q3Yoc`voX4g>*N&k{(4fqgOK zGC2fq1A=VR(~%7ZNU+&^c#Inef&cD4od28{hIv8=6p_REw^yHh9m`22Y$2MDvktp< z)C~Z}L54MF^ur!>p48yf9_%crjZJQpj1Qh-%*9KS2 zp0;J0yk;eT^U@rN5fmB2HKPeHh3>GW9biMBOScc^{2`6r{%}*rx<}9AK2-}zl4FOm zv2E#?#dLoI*b#6tesOE}c6jwPBHNF>+YigOxzu0|bff5!bBndEKqqdS>0fWLhx^5t zvJulknBwkFMX&dwK^?aD1oNwat!uXE&tuPS>KXk=HtKH4CcgMNJ~SSvdcy2Mrq+eZ zg+^XO1av0dUX6O}fm9CiqA;EXq;PA?97nxRuDit@$Aqqd;ha>`R*8I5HbqeN04O+3 z8zAXETkjtbWq~8WT{$9ifFHwAaR>(mPXNo&w{8)HKhx30D}zD({lNfekzm{gTa}WZ zKDz?SNV9n01N?rht~v)3Fhl%J<0CL0^17pg#;`ltj2tO7>8}R1Xna1xddx@aXr3q* zf^ojmP6xKu$Z8_vPwQ_eWEsj|^%>1BEb9fUCqu+{%yy1Ee%^u(Y z&dc2(chl_RVskGTex`&OO7}_V03N&v^dp&&4DhtK=^R||?Xm(bUIIQq$_xVf3F7(K zy{mc2Je}onAig+fhD#kL01)f^1bI zKja-VWt=eOQVVOJ-CIKh#CaR{1u&bhY=3L)HzZ!`Q?uXA z%Jg$GpCL1=z#13yZK#7QeT@QeeJ=83h?cSD!%g+itmi-1ii~w^tu;tMJNCyiMI3

B4XcsA3-N(tahbs2#NZOq3-DKR8MH52Ld_;)}Z_7yzw-2rFx0^~U-#%OdiNOw`w1wY6TC~7no`)2mMoLX)z3E3OS^&pCaRdpfY$?_#kqF(i742s zP=Z`mrcY_Yfe8D_Bg$lG9XVr+tQtF{#* zw*9_y!beISuT&Q`68cjW@$DCrgG|a{zpiX8AbTOR?pU_+Ns!O|-lsno1Zq6%9B;8C zGRk&Mk!Xm3 z-mrP+pb{TcbuIt{YzdBvoB}oh{$Z=}uWVVqd_BPxwhs|}&SYCE=fLz+M6-4tu9YPH z%Ts0WQY30x5kS$wU*MReiySRHcIfL3_Qnsu0otQ8t?7L~zx+0JtdVmkkx}d?pJCbV zAE%tI!nNNWiM*}sJEjwY*%A68cXIRv5-(bQX&ztQ@ng7L>i#2UrLKUGI@%ljlU}P- zJPkuZ{i6`d=T%`RT)P&Us3D6#O<6w2-9l}}igM0j6P#XGQ})ld&8ad;OELGBSDn87 znzpyt@IvX&a_LeaNNmT6{3iyYl_ruClRLFoyR`S?(6FbItA(v*c(Gt7kYE%(QCL`g z_(QxK(zWe(i-Mv)?P6LX6ukLs(K&1Y7uYyTZ|5Z;4r;I-g*ZSi3fozPT`KudC8X0I zg^G!7!ZQ3X1IVQm7yq6z6_PI&1!O#b41N_|XprM8`*I&8i+3fYGaUo~;BjYie?Y#e z-M{`V_=)UZ_`-~Wn^MwMeV#k+66wK8*mdcnp4=42jkHOZ4@c)PflD^z+#EwjST{)ntWzr-T%O~x%+-JFql<0oDCpg?S8EeQ= zIE0IoYJByUFr{uPY@@06eYqpF4QEdOQsT$;AKxQ(lO31=vhFb&7ky=(kdxu1SvZ$m z=w(I|zz9uFEvZ4ZnPQC~azg8`3a@G$y+mHkW5Kfj1&YKxt2{_%`dIT9AoxZLRYoV% zrxu9;rNoLy{He(r-?qpCvya8PWwuw1(C1fjF9w$cLNqTG^K>L{(QP6g&mgjO#K?SC zyFp2qRgO_I*w23pfy|+1EQUx6G}lAwS&Bm;*?< z)`gkiLK^Shvv_O0!~EfX@QnbPXIPwyslWBf?KgJ-W?OdsZ-v1eX(cA=fKBRJ1fY&~ zOCPCb>A&dDPVqx5AU|j*pKGWt7HI_4kN3W2n~I3hiGXnxUeAF*DH}_qiQZ*Q+(D?g zDUx8RPnqP}!zDE6HV%zxexs@$Yy=5CUx&h6`ZUD%im<7R;YQS4=?XXVX z-f~VT>|~HMfjexGBKESNMne+&-q$C87Lq-ArK1Q;efb3L5`hzfilZCcN`R=%8>gwy z;Y|X&mFxH`kvTQ0ddW}`U2|$mecQGEA*&u8%e*x^Oh12~hU-~wPSuBqsGhMv228?$ z^zHV}g0Z5Zd8095$s-z2hof?m)#!L2SMS5%IivXI^xj$-5Gws-a6m+Q=s7jpf1UNd zcFiBT^4xaIrrhchHe!bg`UAcP=O~Zg;38*Iwkgz>wh7PNKnN801 zd#QQ24)p5IDV7*weJ+JIv3_KKEg27Mm-3Fma;_9{bC#N+<6VKf?Q$g8sr8ptv_-tp4Zg>B=!WkMFA2hOkjJTx)O#i zeKe9QG5`_30-cTjt?0Rq?|GZc6B3Rx`{r7Ztl@1XpuT9B4Va5$R!r#du(K-)OQLo% z4IYiM*1A7&@x%Fe7)NQtevh%A3 z#RGD6rYDACoOLAIUI7j|r;0kumu0K5PYesg5Bc%0MhHYi@W2L?EaASwvu_@`s6a)+ zG>l5oQpq_oHoXwj>!de>aOsBy>7K>D*lg!oQb)g4s6;+0ya=sqXGHXZsRqklI{ zT~07r=T6sMG@-hAzg_%H?^yioQ@TBR;dg`Xj@upoOrmkR2uoyyPvoYOz`( zvx}?*!!cGsK?L=w-1)KIdN52Lw+j3S`0=)O! zNB@u>!eOqSd0!nz3+nRB zD0#sMm6(8dFu?g_9_(AtKF)=kI%)TP>CVtwcPiKKvfNNAd}yvEW(3~#qsiY~0_R+u z6n`9=OC$A@#$r2V6xTbKq~zC0;0&n}X_%FAtoa&U!9nMOq{aQpPtRPxc`Y_2{6boS zWQbxR-f0$1FfNcYm=+@$39de&G+(NWd`2|4h8KBq9&O;-uPE2e%GGz}0Z>x8urchX zG{M!?W%GB!`J=0(6?^18j<__?_S`EH(O8F5p(9v105;`_y*)+C4q~%1i9sPSIH&$7 zN?qkWV}1C~j;M|1{w_Lo*dGseekS*Re{$KJD}B2fcUMDpTfb`q;uq=Xx{pvj1=@S@ zhkQW}lyCulIphPHc#}Ob;349I*6@$ZXs6M!_X@Ift`NAiK7jl&rQlzQ3-uCP7UH@? zVp(cTzb)UT5LQ?;23$S#aHYHhbWN?uPCIS2RZAB=)rY3vdVZXdeK!ZXH}ZMHRIQ_y zou0L<5Ea3|ReSy>B-A2*GTw}gWvmw)DQ~xK)5S$sm9Z7`6CzL%gl6Ws?zgH zjMfQt^LY|zIu>}dTIitO*2~D(zPip_rBteFoyin+A}OyXEL?f{kI@KpI_NwiQDfiI^7H3Lt5iZIJ}uPK`up*uE?!zVw`2%?-~y(^AWq1IvZ zG?g82BY{sPM-)~OZ1<+xWe;qKK;DSwA3|3Vz^NYV#Dz~IF6?G!aOpvjL-mWF#3=yC zL|q7(eZ=*bW#`HZJjrWecbdh?<|WP&c|;s_h7tgvbz|2M-Mn1fBJ%P3%U|X9Z`}Ej zPdJ3d1AA2oi)BEcr?iJI`3GrmMlH}E2MB@fi%L)36Z^S2@;&zJT5b1KMH&O|IJ+vF zi#IeYVj(rNz!5_)53kM2QpSAI_~67#d@9GUOguc6)Q0EsizVy6yU;Xtm`LrM;_)2> zc^0Mrlpo{CD1Dunxb_4(k`woPRBvLxkEG-qVlGdWw-^;JQp!U@c2MWwLARKmgM@of zXBC~?y=y4=yDJ(tdA-$$X~bWeB2*^ZC)YI(5r)8h5@>4U%0xysidIVl`X(uLV|pcl-0rFrOh`1&Z+*0%rc^KoS>7~z1SCrPr}^c_t8blXh=fr@t+@SBexW_AGPkVf$sQ! zS^m)^LW2%#zGH$g0zeDzkxs*NW6#GrdBqu_H>e|#QPGw8eGiuGnEhX?5ws(DI-oh^ zwb)5t{FMD~3pwaCwKXqPDZ7x3x=0_F1YbTm&fEmKh4H_^8raV|zK)|~kbV4zLNB*M zeWPZs#IP<+J^&4P#k3G#!wpNf&USzE%;Nd!ca03fhlq8DbEO4IpRm+tw&ipo_ZS*# zU(*_j{d9DLiQ?+>l=dziuo;9-5}77-xD&{6lS`Mlovpzux;*;6JEoaeNzs$&LrZJT%2dy&S^&ILA&MkB8RQIl_Ssjz# z_y<*CS|(Sy?t;V%UQ+$a8sQVcl((qXLHt)K7#jlp5Bu%k?i!fDNeKQ@nhde8&fIC1opG+5;4>+tkyOP7Y5zf3R=vc3Rc5 z1;ZWQ+!6&?UhNMh+=I;i#f)-jU)If3JD+#GKlZJjR-CvJgMn2+Zn8C#Mc>q#($5V? z-}tucrf>;?(%`N8kCWuzM|4*cmuGHEPQN7=zh}Z#A-^)CGgp9UmSP|7YNnb46Hr#- zPe!|)KdWVlLb9Sg3t)`u@J?sQ5Js8#T;aaW?UACz! zMLN5Zx9prh(C?>pt90)Hz29V(@(RKEpP;PPI5v3a$+e@`nIe}lH-QBIUm(po zZQweO4?9_fC>T+&e-sY^mbWv|b)=V5PvE?`DI84oQVLE7lgyQnaU}`c(XkJ648Bw(d2t_fTn~t}5~}0jWC^dg*G3x3ew$uH3~fUn zKAMULRVC++xOPjs$42?&^pc!zcnjQ~f5|@4z@G0RL&Cz@Prq=J)<9w8>r9ugkKZK7p#H zj7-dhki;>dn)wN10;=|Oxz_`@&<8+$;rVY2P&zssF-l-o{_JK(sZ@7l|97b-^o#<1 zjVv&u=H;~^HxhiffW(s{IGu^a^gQ+YUTl`hz>91Ilq8^jS@h}R9OCK3&6<~PO zmywD~h3X6LurNFAj4f88$Z49;Skn!E?k~ft#^*4Wimwq83C23^YVo(MB0GPNjSI`R zB*b~s?MZ&sEx77Ye#_Q5FI5*VSB@j)Sj_bV4V4!Z=;y$8=w3i5#r~d+fBl}w0kb=4 z{(<|IEQfdVnlgRs`RN`bV7sL}(otiG{jVnUcKUHaG!{}ldQAsUbbC0NcN0lJ|3wnP zj>O*|Iq&&FyUs%|bKTJ3_&Q$*XiOyvsMt@c0jEHids7sHfvR2b8S{3QlvE)Z{uTsp zEP0kp4+!AdC+af82FWxK)@OXKsqhtIvrEDq$DRzn&xSnrtu=3*y;S{;SDG|RFdi7Y z&7gF_#*kFM)YW z`bW$xUUc795ZPr<{A;!#x(ha2vg@%?91s|>yTTk^S@H#AwfHXeKF|&gKUP(Y!mfJ# zZ1u3J{VNdtj8)(UVd5s3;mzl4qqIxa;LRDbBbKx}I-k?Ca@dvzdD`BN#eG`}_%F`K zV|9_Ds}WDHekzd=xVdoNq54ieu23+hKo~@%>WJA>Td)ZrPVwkE36}n{yn!Efpcv+< zpEriqqXiY>5(2(V$@Pc9ymCDm-7*ma)IL`fN4UZ+x}Ofr$BYS|Bu7~ANh_V=rmL?+ zf4H<4h5v9Piz6#e$S3A%BGkZ3NF1F!z<;g9)yyE}%2sd{h2jC5&o3x8#i#G(w6Gjh zlR8b$bem25LJBYrhf0Mozv`m=9awYavPujePjBM=E(=O682@)KRCLNoGD_yfw|Rl{ zojh7Rxr0Br;(xqQG&KbDC5qKEu*|anApRZyVNc;5a5|gM8oHC$i2ZJ^jP|_%F}d^2 ziK!?rbBFIvzEuMZaG|WwlT^?jdRBve$AgRvW>MKIt-t~@t$#uB;TtZ1oScPC3^KO| zQ_cXCHUZu)NkTGLW9B)>4w>-nfwi;5y17}F z>^uPIr4exnJ;<1dV?)YLyE}BHOVbMvvvqwlkd9Q|2YUOEk^fl>kRo4(vM?b1WDq8q zM7@c_v|*~hQp%x3%a4KzYWgS;^5Jb&Z)vbusnU$wUJ@DEv_;6)4{#7!x5NArRFL6^ zZ&E8`mhrCqsVN44`r*I<^ZeFn>hy@ z1B}i?7mI{jn*Tc+(SQ9)q9+yn37k>l+&p`0@UlP+0BRyL{NbC#pPPxszqYJS;t@`3 z1Fyhw*G^uHWG*ZQ z=|m}Z+?dq%U`u}l^g5=bjk0G{mUE(MB_na^>Gl~VjFtn$M#0oPkl|5ls|X)22$(0K z-J@$)v@1XQCF#?=Io6q#TyYR6wm|jKHT3O3+wz9S!&^uy=TMSLRHkJ=G0`c)`E#TM z)`evwJtc!e{95RuxnWGQ>X`59U*Z|RF;=nO-;rqX8hrsc&oA*jxQ7THvI8ud?{j~h z2G@{`ax&!~13JH0{*|;q&tDX_e=Bp;_P@CoYn|M0-pB~4DgY(U+CP%hQ`T3Wuh50pfqn^G ze&r97=2}P!uO++xd~S;=hW+zMPR=%znzF8`MbvfabyF;9!wfKPMp&yKKj;MKLc!(w z+fpLZ2$v>1hV4M$x#QAhbHgMiTm|9bCVd9hE{dDl6`>)3(>c|4v6b+{KYMYmR2mEz z2=pybF%Srz%)1YvJRE!o{}hTS)HhCm;~fqtO3Ose>ltEQ_-w)JRPF%Cw0}CaX%lrC zU1u&Y>Ef1xzbqE`-fq`KNFFn@xZU0sDo26Q5*n)r&l17En22M2iyLpmmTg3=lJKXnEAtQc zTFk9#(9-J(T6&*@?ZxM)MywVZbU{udo?cyz?1VnSdd*T?KMSM%SJ(d;OrOE!x<+RN zjX{kl(c+@G;w2=Nkdj6DRagQzDz1Dm8KluinW)VU!s)-2%p_P+B*y|^*xo`42s?R8 z_(^nC&^a}d{~=lcuTe%PJB^(l_@V|4)TZhQM9dc|0cRHzb_o@176+_3hr2EsKHE?< zB(szRGm-9t0~QZ>Mr1`KMny)4Z)Zs;8()ha9CoQ)DwZ~28@R-U?mrx6rQ4<7k|k%Ss_y=JrORg}MbS|=Mn8J@7C zcz2#hVdCwdspj`-=XvkF%fhMiPTyayRtLxr?SRRZ`zC?(5R?F6)592#c6&Y7@FH3T zwvmM^3L<$o!sIQE@KO-Oqy-(**JzS7en@l2b+%zwlVJE}m@EisqIGzbT}3w2 zH}wj-DwJ^x--T8JGSHjY$3t8rt_xdw$qwwwFGp%iFS?=>a9dY*xgm#_9%u z9EQJX@}Fj1Xzy$3+a6^+1#E)9L{f6(0Pr{IK0FD&F5I8Vx^!~%$uNrwp4BP=_&1pX zybmo*ia%Grn8~?u?#lZY;lM>)vtVZB8I#nzyv?Y&65MSrH0$o0k{IQazC@pt10l-7 zYg1%6b+ajWAF2_JZ(OVWyOW77lRxfAFRzkJjll8=Fk0yNKZu1=?4daoD3dFZ?P2B|VHl#@LA=by z&4-zv@Z>`G((bQA=hSem z9A_&M1@6!2k5;&)BL;VqB=w=0EJjENNV%i!&tq(!Pq)UK`{lR zw64S&G;sfl`8>8ABMT}bs2+mL4%zP@3Xt{jyR{uIl&XShNWYut8Q_wP|5dc8Y=wK< zOZ6Q_CP5!=R<4ICi!2>^+lfs$kv+{uRgd)()+>l%Z{og+#Y>#`t6Q$9+F}0$A|U+qXZpolxNJ6>%?DD6bp(+`2$Xoehew zp3|_L;tLIioj{LXO$61ixc{8Ne5iDj0-k{bq2KiTkF3#CrFkDx5T_;bv=_IJ3>#5- z!n>b%fh;MVh2MQas0924nv1!6=yY(V<`?yGb_*}}0XPJjDfF`r-cd^~ zig1tCrzVlZ-^|V_J@A}X90)T%;otlu@hxFgq!KokrI!G{&k|!FDzsF*nqVC6Zr8hU zQ!7#@^|4!%t0!^RG#7&l3c@-U5kI;Vn|EwBuqo8S##kE6m}bX6CYMd3!3!+qjsF1n zKaklIJ%Q`4?VCbK`M`)6+V>zbzU1zT`P-D$(iV!l;6U?jIFfR`RX`zd;6C9r4SR$) z9s3ik=ms8b&ER{4Ty`2v*TB6?8GLIKx#Ot>On&H^epO8j?GU)B5ulpDPH^I#D{5fP z{6`s;%?V~o5o%@?!Uc_Fkp}B1?bEqOBlg}k*&QJRYgmJ5hu8HSm^apJ@O-Jz-U1I8FMsRh`s5?B z=okQyP#Q@3AScCm!;J=$r0=npyEwne2yD*UR`~#aF24{j9L@@jx$jr&Fk!(Aano=V z_~Niq=|crbgFbV!pXWTCcYr8oMZB3}{>9^xRzaZk5-%9mVPl76@8Is;|CU9ZB;r_W zTW)dBJ*tGiffn^@{8@lBz{d1DL}oa9^ddll>C7#|S zLLWWnwbi5`&*plO7u50W@8O)!5k-&O@D2AGL)k#|N4tn8;Q93#cpefZ%eUV^wew8A zx}_cSlEzwC;M1kcgOLs|aaB(Ncff3y$X2C>8=gKIcLg=2bI?00>}7s79EFn2wwP$L z1d7EYaHJzDe0@;iRDkUf+1v@qGXjFMnWui&ch#bkvXStBWcG8ZOCmqyJk z8b+T|G()`88%c_9oCvdZQg8#q&L=&0(OTdEX~HBy+q|X6k;;Q**&Wzz@WpN{CvNPc zVXu`&GmE&}hh=+M)W~!vjPEjL8dmd&ZS{P_|HVx+`s5#8A`XUsaSO+V@&g$-Ct9S~ z#0qKfI2c=wT0r%;U@`haKn#qNceR7!%S4)Y@dq6KlTm##-s!NA(){&KULaXk1^6t< zFT9Y-wI@C}_%^gJtX>Uw6>rDsza0WPUtf2NnI+$&Xrdk{k3o#e$6nY zRK&ri!Iu&$VW4z+Mv5mKq79jb6)0mO>c9Ha3EU3HQ$~&1RAM8jB4EKR$vNPhG|00U zUH!=aJl5VE>O7+eZ)KGK9s&{t%{zJQr;qw}r;c|en!HTB81#1#Cg{HN8if9X8|giU{wBW( ztFY#4M~c2QvAJQc%<(h_E{jXBy%{+(TEU5tp-hCAwfciaY9Oy-?3V7w4tS`9f&)m< zK8f%QD!hHyuEMtqR*M9&?{GHp3tqV4aEdQI)L?#rBj15P{50Q&mKP8G(z#U9`-A8- z7|y~*I7lbtySgjP*A$BR)V+;*~h4SdshtR~?qeuzUV%FzFo% z_6M8#Jzzg)XvJONX(;;+Kc`ct zqqxue{k)&~dR`Cfm0<%0?{F9Kx8<;a58@SnwFm3xlnm9$hWf>=T8vjC(n4Xa9!A8>BG`_ zn+CAOMh{X<iMbL@8-uvADIXO0}X--j&Q8wwLt$p+AW&H=p(_}Q0ltL8yA`YG{E9foN$ zUE*%L#A#_io`cbki`v)!%Dyy#EeaI*+&~|1(wYECp%?ADoa_7NzNRKttKf%ix0)N> zG5*gIGS4y={)-_GQ3CYs8=dtkpgeIPC);ZMw(wh4t#pBR@of1PTCNb2B? z5osBv=yOa1wU;}@0y0V{mP*F|MQ=5!V@K{gF-&Bw9N$Hlm{VCw+m`$OVBmHn5h+bq zwA)D>-70lleQPP91i4j5X^L2G@o6$%jc&FHJ{=9~wK<{QmmN1cFFh=Fo{6IcvBoU# z5~Ao8>q%_k;!>;euk7IG@bJst%g;}L8z*a(x1JD2Y1w9W*Dh#TE%t zY*9RH4E;&MPH}kdhQF-26a@&-qbs0KX)iYJD{FReSc4{)_%h&~${LV=t*qJO7LpuM zyq1zB6`qzNsV}(K8B{&Ig8kkQPWi38+<01?_eEl8^)^58;%hsRas5Gq$pH|hKVuxv{fKk4S-Ms(9& zFRFhm|L`UGtJy9#xn$ijP{i#^`Y6#MNyawwgVfbAY3IFxqAxq0W%|n6HS&9f@WW?2 zAWZRk?p8GZ6g%7Ct|=n@#w2b+v<%(a?xHCF_y{-1;vc>E;*!dVH{J278|F4>Z+C@v z*?dIQGAkuDA#KYq9Fp}oZZJ%bS6;~j`y^|IRewGk?3Al5;cM*mD zg%SdFe6#-*YNiKBU7%`uKezU_m}I4jW+bBMLWpLdZ}i0-H{il_eHvXPGtAg`f1+TowwnyE>!*eDgGTH5B1~}`x#qw^{t6aB*RpP+vI~-xq7X0)+hMFBRCVp zq*nzE9?|f@KN&?2Q?h1GZscIt=NQALP@nuu9^agK9PLDpTe)4KG-=lRK&4Pq`AeyW zMl236-5l&Rn&gJ8Awv(_YmOYH{)>4@WpVmxTqlQUG{x`tiWcdv$RR%9t=S^n^9QHN z=(ix^e)&G(yo{pBG6R?7u~t@Gqn)mVpu6(E2p{pZ{-C-oJx=DJljm&UFmpC<(m zA^aA+nmRuAFsp?&z|;k+DsXXYXFrPZNu?NC-IOR(zzQf`sx#wcyN#1H3(0CfeYV-H z@b(aCy66udijU5*-`O4$8*&-?(?lP%MJK!NSVGXOvmIAf4WilE=H%IyAnySh%;m|=Oyh743k^BT~1)(>9b z!jma*Br2iZfwd%XGSf9}5_d@m%re}=(>^GVkp8bRn)_~xZz`(qkr^`pJa=6L_KxCd z>%A4K{>8dVvzZRmCqJV^j+G6GFgyOA&qVfI+T*(JgD=5<5Sg*t$HHNciP?9dcObTo zdK84=ls?49g>x`yz~=FPbpy%D z3>kNGZ0xYvNA(%!)4{LiBOfv72e-n9?PEd1`TK)-KvVG<){}(lUzQjK-aes<&X>tK<&|*xvz6g3Hjl|DKNhQ_ElP607I&4ljzv zG}Eu`E52=Wh@P^RZoYaK|E;mN?k(c5C7(D;lQd}ghisjY=Br4XA}i94(|&};fsP(^ z_+Z1k?KpE+snoNxM&FqHa+(f`iD4$kC8|S0e!J<5q>BqGL4;zaPGwcp*(z(y}t>uO-MmRP2~T&GQ{!96p!S%kw};5=fr@v{dNvhmG=L21%h;Y-`Dp z5yO>LPW?lTkl4kY=4&VvM-;tiUt`#2d2Ef8^kFzTR}O`+S!~@4gLB!ZoO(bf;+yZv z_LVoakqFVAHybT+30MaclBUm}k|A@O)48d^VZh?JLOc#nzgw{^ zboS;i{Co3eEu<;tND8PJ(xEvIpyrv2Kr5jA020WDjS1rV6>(vpsLJWN4~PDwh^Quw zNuxliFFW>}1AYwr(=p2X}|w=_n;{*Tw8qf zCxIxgRjkNyck(nd)6DRI_Z~JTAP*^13!r?}Lu#6c2=da%hs1a@5~#yGW<3myG$_7) zCbjJM{D)Yu%@H;Sci#v0{i0uXIz7(}-}lR?xd(V&MSfL}q{2_H+=^w+R$A2up)Z)gENn-RG4%EgwNOvO#szDOF}d9o<_m*TV}aY&?|%po zZBq=FK3=N)<`E0tc=yKq&&-L-#JEbHAMRh)W!wWDm>2jJg9O-xO^P$L- z1u!=nNp3Q!T@=zSZcQlq)XfLnzs&Y$Dk5LuW8`E@=9W<&l7@&GFCvrA?%;gBX=^mN zxtlJBw4P;U5Qzf$0qiV4SONJ#|Ee}dhZuCdP}|7nv?FqGk)M32YC#)z#ILDJ+2;j< zL@*%u;zq)|!AU-HVMNZ-Wpux)6g=0A9SdRua18qjl2pKFst<}m;chy3RK3`=f@BLA zYhf1ZCKTO=P>CPQ-F&TuS^~Jqmlz&Z*M!wq;5}>{IR1Jl}&=`Vd9- zRKK-D=m=9&R{IHJ4t~rxCW#MUx(kl{HWw!AG|48zeOWke>aXo0$*(msB;HDipaml- zEI0cUTlmCP5sDSOGMmwHAS=qfxjUSXgi0m6E;dbd*a67QsYoFbbaLp<`dXI(TFCju zz84fquTpnnvWqsiBMf|tDcr0UQpIDP40hQ0uTsJB7R3DzGS_HTrf>684Y|DfX=xkr zY2;qz{Sa)Q=WjZu0cs(2XD~a!BCDtJ-X3K!Rle4Jw{Nn!+)Wj;H? zE>aKy8+yid9YLfM9Jm}6uyp5{L0YHv zb%!~D$e0Y}NER|}I}%9tUEVllzLtee!2K?wi#ERcds^Pf%!()7V z5XLbZLv`G~O_*!GtIU^?heX>&f^3FnUwejlm=q6{$*=R~5I#P$+AmF}0FU(9+5F3| zo1S~n-sU%^;SKm^wE(|2l*g*HOf3Vs(UNr1SHS{?9DHU3n7&oQT7PV-MyJJwDXe^rY~NBG_m?wqDebyi9t}V;tu1GW)nt zIl-ZLw$b;$wQ*%lVlb`my~{Vg;1pMvbWeBVnC&O653U)FD#srjJZ!%@r9up9zXHe+ zObzx*B}^9r1n|W)J`b-i`x$kGYsz$KbPdk~isJMUfM_1%)!t~WKTwP212tp1S^BfP za(T7_gY+GCI<##T^Mg`QPspGxzrv_h#(h>LXf8X$y4bh>;!aZafrDcz(}NZ%R2!sG z$J!n=^Sd05J8wiOM%sRnNxkO_mmfgvUsC;i?#CMtp@S}h6dga@gZkk+Z?|&^Rd>cK z>pd4Tg@zp{!Q=|_#KvIp5imr53IbVaJ62zzMMS%6(jB>%HY0n2V>+&vh61(Z)ao!x zy1weS8L3BnDblv_ocqu{;skM%95-Iux$9qmGiqLB)jPF{Vg$q9iJ(~?;Is4`3@L)Q zJ9paiqfj%ZLE+Ha+`HykO_rTV*~!bxf8@_3-tL&`*fmI&dwr+h;3AhCBn--v0U;+p z2$gK0KxjKPCatMSLjq?fYpuQ*?Z{X3gnaCJ%fQ8u0a_7t@{j{=zxLVJj{>~%| zse$Y5jDRv5_4(X2tCrkPL;TI@_Nb1DoKewi$Sb>teg1IeVEDx;3l;aC`!WvUMw3u+ z!!*6mGBr@q$>y^GA-o6l=|^o)zXh+D7a;Tc5Et-TMH)Th!?Csvr70`dUmn5Ljv|L) zd&>l^hbO~!^BC^Yc>^~Y#s8p!6YPC|;`{l}C4=ffK`&oF6Rwv_gd~QL-8Ub`*Cu@v zX*-T9YXyGi7Jhtk4t{yt7j0NX~%z_RdDv zFT5M8lUNQ766%`g_%xUNM4URxoNE>+n=WluDJ&z?K7$J-FZRj zXk$RIwAwT2+}x)OhJjC90<_FLKcT*ht{E$Y1zX*SSE~KYG^-hZfMM~{Y;q%1l4hL; z^)aFFZimerR?!yUax%fFfzHV=b=Z*XU( znnpZxHIIpclN$^C5i~w#ieD2|y+DHwGCpxq=>j!-j8&I*KRRQoI{D0A2BAQ5D9YOc z#L9O^zoUe7s+Pr}b1!ciz<`Vc*+4@c*b)utnVOc<_srz#JkiOJgyqeQ z$8LK$nBs258&cFukgC0OpowrMf#P^f3@526vQ+M&ELL;WZ2YQln{tH);x45K zc?hZ4k(ASe0Wr7lLu3Q+qGaF8eUU>WIZS-v09Z!2}(yqCTk?Q$vV zgmUXAS|o+AAd!!Ro%x5Nvzpc?9$5yR+9`GP5Jx8%nW;=9Pxzg2yS*> zw1gsrjJuE=Hpp<)QuXtXs%}1fbw~lVKvBrSEnX(3R<&OZJ@}Qfiumt>@3@LFo;eD6h!9p`lP}b}q!h)QZ*(FT z8JYCy;JQ(&u8Q14uq2A(7dPhHLlN-4^m_zZFfE4B`Uqn2DW(-f{O(y_Da#eqU{Hp5 zMf~I6g4i^stgmO*@_+szcnx;T>CU<7fib;u$~!48pXE?@f;yUp=GY;H2pUqz9(0o6 zavFMt@APJ0o94C3{3PwU?@Ho0L}pSb&v;2d{T+_C>W^|}7K^B~r8qjITgw?;`_POa8JSXoRULJ};gs!b(- zhB+S&+_F0XJkLA#3*VqUYNrZ@qYT(;z%^xsW!I;|>x~y0TqX=Ycmba+sJhL^m&kW` zKlux5 z`*gODfo_&H3&fDN>!tJThR~UOVvy(yGHQ0NIFM*w)+cE?X-@t_+j~}ExxsD`{WO$Z^zB1lM}($fG5Kw&^HF_05y{6ATvA z{~Yp5utA%;mzEhUv}==g`;S;_c{owm{|QeHp@&y<6hV$nH+%J;H>a@#FHO%bypUu1 z%GrwpKJLP`>r233R^OOB63rtUMG31)YqF5Bc`eP?h@0%fR>S_vo z)gfNnOuy4W*z4ysB6GH4d`S?c)3f+@iNb&0LJ)dLz!*NyZfs4)PfH@4K4e7(10poX zWhZk|#AcAgQURWiYXoWbT#MAGxUA+NY8(vUpJNX&Wl`7o&X{dN;^kst@H{Pe!bSWY zJgQC^?*M^kee9C^9`i&h~%yNeMFGq<74~` zx=!=^j#zsaTasu+21Jl=@%*4VV0;pI=HbZCT7079O%g15duS0e!onTYP8c1lQfKA< z58-r*vb*zbKs=q-{!}s|E!Gv1qOVDu9XOIi1PKepzH`h*`GEK)z!XPTeegwyOORhE z1#I<8&9LBoZ;H@mjsLHzIJS5}ZAzwSq)#0g54_mjaPL4qB3 zO{^ zbfcs(d8Ydd3%?*Wqbd92fw=6#2rzAP zC$+2g$C<)DjZYxuzn}saz9tf~Wo-;%>gMLVKZ+Yd2^8dZ?={sr4s{-%JmLb7t4q{1 zJNKV^)&H`tU>(m#dRH8P34*J19`dHlPJJrthJh@VW}wd1ysR&jEGoakqvTQt$$9i1vSBp3uD=x6P1^xt%gSy!B?SLUL@Sm&U{Lqj z+UYEQw4CtoKF`M1Mah*tQOU^+@j%iY=}^k=r=8n+c7DRxJQBR1SO8od#=n?xi{!l1 zHy;`REXs5&(-qY4vq&}mBHF8?manbu+*Br%abJj5g5f}QV1bHK*|cU=aP=x(jZ9f05G`HfK+ zeX$>yt+-5Z;6F*9ambfOI2~k{Z1Y)mb+7D-NZdreo#c-^zW)4d7sC4*^|J@3cliZ? zjl_`aFZ8#ehpuEXTS;=^jX*zLr(qO?B{3Nb4E#o|Q2ppmKBsO8TXeu2Altt@Oa@8N zeJj;;1HYpabB5V%O6b?oJPUlPrm_^|EVMMY;$jjwnbu~l#_DSX%UZZh^1rda_LnP{ zo9b1VW4OgjppC>{D;&I(TIBIFG0MJ15`8X`7+oZvlRr#L?plWN6U3doc!UpV##y~% z1S_XJP9#BGX&dbtEFQ~-h$8&g49tJ*|04Vc3k+v2ZsQ^Vu322p?YX=PgJO;=#ZcG>pf4Q?C-vBMuu9U^iy#urf9LYQ19!kRSJiVq`0X|w1Me) zXwIg{;G^Tb70d+Iqj^X8Hl{}ZVv_a9iTo4!-?rpUmA6NPGE)*_M8d7tbh6JoJ$|n1 z7m;{VTdj+QqA7pj6nz8hf&rb_BBW)qsui@{>hXyJUQNOkg>-X8yoBbqh?K&dq-B!x zakpk3T>*S*T6iOeO#5}@o!x+x-TM^Nd$oEwiuq*IJIcO&|4jUAmMt2fY(bBV7@zKB z6+pLqUnj_crRHr^1wDB`XZ8^8Xd|bC4hDWUb}k&OU-E2uw-n#xwGZ002qBV?G~15| zlY5T-=1htHmDzWHUTRU3=jZ3N<|f`dFJv6q+Pf7A z6(uXbN0QfLm_{}zpZKjtzn;tkfr;uN`|UqC@_Q#ABNS)aj%{=ZK*mh`&L2h|g*=k! zg`w-2k!O#}gA`Q2o^8;Kl1>~Ms#4bOg+-C;=Qavn&%2jJ1D(qy(qz1?cv&Fry@?4qn%pp=3?j}qVLa7B{PJ8Cug`1g{VGR!i&fH+@0)tv zG9Fqc6!Y^cdf&ifUlDGgIQvVM(!ZHcL;vylWdECeV5+c(+8fmr6aAp7}B+ndj zjj6^Vh4As?Dt0VKZ6)DJ;z=y*w07aLK7-B|m!QD0L1>hsB?tyALnsF!!ZkJu*-V3) zzF%eav6dHdcoU)V%K`)BpbuoqkB`d}I;S(H{d)7@J!Q|+P(d_L-OmnL%ZNPRqu?Wd zgKe*E>Y&K&L(HDGSoTIst6XTK$F=KKO(y9oM#5lc>WOeHXW%}64X9?_?*7a4i~_7z z2fcc+nP9GEMB@xCdMJ$oEQOI-L^lT;I6C&KAkR{kg>aM|kmJ}AkSRlL1Em^T(|sqF zm)H!B+&fDzu^B;ItF~5>s1r^N#Z5Qa`KucbmCw^XBykVq7*BCqz2C}UuUcy$opRt!C=FDkpTNVD$-Eh`@N1lA+Lo4lBq&4;!5a+P~f$E>MkX==RkMRAW& z!@GQ#s2Qw9rMG^D-sA-KBtA+I5bjGrtI=kP)STb+{qnwLPsGtT4aJypR{CfR;T3bu z7*{yQwvbi4^XKxpkt%(hP5D)5{zdd2pNQF%=hNmhgdpAP&9elhgOkDw!UC7KKK4lX zC+T_6ri*}~R$v)psKDh#yF9;XlyPj?WwkpnK?Zn>7WDoSEeq6SbU<1reP!{<2 zNiTqVDW)@x3SPBgd=|yc+ih7gk zMR%sgs`u9!5Jg}4Dv=fsNrPes1)Lcl;imXZpXipMcj|h?370h3=IrA=1q7VXSw{sy z$5V*JO54uiJ)twheLOqO#P$9eLG&hzN*)pdkfK5uf!a(WxSp;@0d*c23 zj)=;+fLJDRkB{6RTJ%?koxTW1ls)rRGbark8vd&A@K|D)Z*jquo)dxJ--%iRgQ!<t7ZwQ9u64iCxZQiEMwg(Q;&1y+J%XnRUX&m4~#zY*3pKSb1(1T6F{ zCZ$raH8>rg8-GDP#GCk0E?mMsU*wNja#XzIHyQc)(}ee@4POkLZx=p9-HYa$B-2!o zGNZpKkvoIjRAp>Eu{Oad(c*EKTZ7IFu+=eA?h}~LNah`saDFwYa$2f~?z}FSoF7W= zOwX{)Izb}~rG?dnDaCK~{opog!N^T#Cr0jx=#}>6(CaUIUx9yMDAds=dK50M-MLBe za`~T}Xy`WGYwd`B{}&8gFUewy5v)xQ@9XOP=AGBJP7A1|;*x73RMJW&$B(Dy-V^{> zZiYDRllPX^U{Jf}tQ}7yVk39%W;hW!MM}-M~INq;~u=_F+W+=WD2-+pB)a@5MB6sSx=vQ$YOFW z@=8-lj&E?mL#oGn%YL$jcVkH!PackP)= zXEvGIRrY*75)^uK6iO(j4>f~tl_(m1^P4Z<7eqWoehP)Xqt6@3KK(!kY=Rr;5)x}A z{muA?-%)xN&PQX>Prd8r8384!Px?;s`9ozsuHDD&@Z*`WY$3=l=N)Gj>()W>tDFl( zXNY8W#|KriGMEpesbykS(s|eVqIWcp-h|66SVhYL1i4{;ur{147bhcK5T_CEAR0|4O`{$YQQvaSDU9 zq6j?=Vd==;~>kd|Yx)%{--!T-;C8zne0tRT<}o5=^HH}_p6fei%16WJNjiD&^S zenh?v`505)^XMI1zZUCXmO@yaZVbGro(*0lus0}?5CVkL8HfKK%FBA*=N>ub8Xg+10nrxni{kpsC)NyQ_o?lr^hx2TlO13l%Q&E#{Dlt7fKkE^luT#-x<_~Efhh8b=dp_(z{H&Q#~ zctl8U+L3g|1lQ@_f_D-xe1u5KVP(%=w(WbE$lb!hoQ}R*K`2fBQBHLM#-_0t`^!&Z zUwLJ;SqQ|eR~OdwxEL&wW?mz5@Jrdmu>rXR-b2z=-CYIS4UuY}1=5nJzjimd1F{~)Z^~Hu1*JPq%Qex!h=MI4n=i*UzZr+h8)!vG&@}& z%O|R{f4;AF^wqCd2oH?3`1IMkA<((0!y282+i-q}CY&Vfg_E8k>0CBfrhQ;M^z^Am z$^%RpEH>%pTRY^K4TS682S}%4>(Ey?h52jxzCCrr>zNVji|q)q{=pyk9WeA9wP{N%u;wbe37Vy?KZbGqWMzyp-XgxLI_{@Q6l# z>8*_1YPM!KNlQ&vbJ9t=)P?bN(kJOrxcmcp7ZP^etLYNM7;g6dE27hn@lINu%5fRynvrwY@U-FB*SWel3^Gn|xud1lV({WAXlK_P@ztM-S+SwP- zEB=h9f$>__0wyaD7Hj~Y^5%z*eq6su!qYwqs46LbGXBliH@jlco7|@twKp^dMMzpA zSVtmHaCO2=$r&fw55@w{m2_0spUOxH+CB+q1eXs1e$7 zNV2whxJ2+sip>a2U6!_5c4|Ac!QAh|S0%+!l!?riKgM%L1mFSK2W%l$J<)4HoW>FxR%FH*NX|e$9SHJ&?o6(dWBR3R%T(?wt{Jjdl2FrccUuQ|VEnD3yXGbS zQR=?WCbtEdS z2+0z_H9cAh0}Z(I>&d!K1(dwhir|L+JcO>NgH>PJQV6n}WPc(sHqW*3h6f!tqTzKL zZvEsidGi*p3xvaw_@6}CSZ$rzy3VJYL@)@kslre^lIRyGMtG>co|}X0`mRxnjlr{q znibvPCotO9BghC=zqL64?qn~RX`gRWoB01^90o>YmnB1xdds&?>KGQuMCP8H@mDdQ zX8PpMFX$Z%at89fWYd?$Y>=VUG|lfsdYLT31f;zPPZn5@1@7S)=U`76`bF-y*9oh& z3PL2QFs^UtNuSch1H)!1d+j;{^syy2XR_RredE%;iOtRnsT+#}PC3%ttI=2gDBDV> z5rVsWAz$R?Py3q$QzB5{r)GeBoB#tP^bfmt$7X{NC4phf@ag zi9j*=mJu8Mp)8Fu{4a44dm{UfFF_cjVC-=LwW8<){Exo$X9*Y*a{xJ55P_T1Z21;{ zYqf2nY;*iFEY0C}8eQ;$6kMT3Hx1vGMxYv`^&$;&26tDf4vS6xo<07xLtGKf4>L<; z%wWSBCB;r9`yIzxYkxernr{THsSJjwWO0iA$o0}k*C@C6#lKl4t-y6TYBu1%eDPIZ zaqtq<{ZY+7nx41L9#sLbj$~!Gc}&rhn;d9HFu_3y{VaKSS0hU?^r_ zJANm|j)%VJH&b2d71bLIM;W}RwPGCqsWu1v4awqgDiUQx6HhL6mn!nB6KSz2WX{{kjNwRq@}_;KaKL5;k*w%bMgVD{MyJ7! z@%hk`f96Q!UojlXdIROQ4EMT=Jn%(J0r3<`seqh`{;TYgv?Nl~)nUQ>;A9lVkHjR(yIHByGUU7 z!3F`n@@2o)cLudxpqeom$JYK;X8v0VdDF?B|Z4!iQb>i-r$3FXJ=#`g+z)H3 zC)hILeJ2iH5OVd1WulO`cqtoY>BN%p@yu7r#ZMHdb7}d+YpC5-WaJ?6c)u;YXO`2* zpK%x?e~psxh5neawh4p6ya7J7{a-9fcv}QY`bQ0V?C_1Qbk!3u!ap4>=|-YIf_iNi zD+wE=1_ONHj&8_3&e^Wm`E|XpdtFufLX6{W7O)%0v%;9)Y}0HRgXw>YuqYE>|5Nu~ z8M@o&NF{U5!n7@n!Q}1Z6bKqgaW(a@SRs4yEq5XYyLD(9=O&v9&?v`dE1HGNt#!`2 zC?al;-AQWDseJGLpUr!REa&uUh^|>L=Yk&%r$`_4@|;F&40F3Fs+|0)DpKlGoW`$= zjz0yN>0{)%)Bne`oB%etbpC{n8VPC}T@BFbRDNpR?n{h^t)98gP#md0=THV|C_WnI z`CoJa#zF3Mj?Vx}G4&>Es&VH7+%{=;OMPB1hY8p25h|IsT%#91zetOfKLeg|82gsk zT6Tv0Iak`arT0g2R{e;7R(iG##V)7Zi?uO#4GyA=c+`_mbjUxMWBsxZtM9Lmp&yAn zz};POaFIrz-trhqCHQ^(tMF>sD_zsSh_Gp0flQQ7#K1j2>9CNrW$Mj&_<;wdFSaD{ z5bt+k|In+-5MWVv68=>rk^d6gjk_A+0aRbPAe0=0DiWrgQ+NTXup!2sA4rq1LSO2; zi}3TuASF*R#ub*J%V2Dz8EIaic-DJK-${RAI!XPg(5tY97u1c`YkN*~5dQWML{;1f zZrZdnrXQn?TVG$$UQ;8^ipSHaRlT=5SKOdq6}j-|T=4MIfqdjEderSLq7R-1>M9}{ z7!pMX3I-fehI#cDCp)_7o)!awOBOZw;2PcI=(}^&cYYQ~qQahTj8i`o z>5B3C#h_p#1n7T%Fn46jpHi8$9H4+GjSvOBPcXeb{)6#$lD*O!?e(W>jQ8Al5E z5{B68do|K|7^eU3Vck{`cN5;H+VHAABGCooQMLz1C3~xX+VK%@I;|@byE-dHJ)9N> zlfw2zoQW%fl5ETD3`I4%#a{?->N#Y}m2225d+?xn!7#mdqStLV-v}SVYx_M-JXhi% z%X`*c^etyYj!nSNs`*Z^5tm$)$*DZxIMPjA$KikoE}H zy|Q-b=m45YWPVSsZ~bC3Ha*8gEIX;9rwse0V6Zt~$o@Hs^G=0f@0*uH1c7A#MfOHd z+jSAp$Kzs$rk)A) z>0_oDn9rME#W>sIZ7$+TvQ88$2u6U#KR;ZM^n2}cf)4Rr5B|4e-W%^#u&c!wkl%XO z@uDU+4*{PwuHNut4yar*oKGQX7VYp2Gmt$m1Cu@QuKs3;GdDAK-exHLI6H32w5;5R z@Pz(bq0kJVsugH z+p*Q-zu`oRl3zC3uVa_l)&nVhCvVrz!bS5E$di?zSoUZ4q2Peg)B$2RogYCYKjzmX zwYW0Fuew3Ik96q2ZOczklH6WfFAkn#1SQI*#j3I6vAQQzM|H8!5^(>p>T+QDQkjwO zZx_$fiCg0k~ko()1G*AB^#~COD4sPrn`K#?k_KCC&h`2ux|R#i$Q&-huH}C zf@<7b&D#ARX>`$buka}@Im(RpwLUXa8WMkyGsLpVy?#D<%(A+Q{e>7xzf_wJ02rVn zm@f4huvv?Im=$CXUmd{Q;hjW+`*4A;`)v+n2Tr7x2qClfs*UpJ@02r~C176X{}~ew zC{F;6Cg5!wqSU>Voa#xqHvcwc|m z3w^wPeHGh(`g6PQ{VaMYu{o-0u_jA5@a23k-%x7d#1^Yrx}{xrvSF2>-I{=(e1aEj zMZQJ_@BR8%o|QLWA!wop9sRWUPU50-PdlYYaGR;_L(7!~W$$bCP z-^KA&HoVsl5y~XEzU&`JT`AWB?e&#g&49sSSGqR5Q=Z@ey|Q4R%h34TR}Km z&AJ@VSaKR(Vri(_7p?SCsP)7M;dhZ^k3JA?;CBy*;AdEzRm(`p&#Gm+)6e`Cr^ia? z@L(Y&x_|j_q0p<~b{WQq76rt?c}v3Xe5-Snc}M@f>R9JPt-%Pco_$4se^fMJekEEAjOi59ee__7$#C;QJ$3yTUw%TyL-j|n^cq-WjM58P{%bxy; zmVS7z3KlbQkCvp-Y3PmpaIT%Y-}I1m^!j)!^~8kF_iS5gP{*O0?xP6%E(EHJzv=nf zh6-QBs$ReAzqn#!8v;wgFAUJ8w;8W}>&K>K)}OMl*UA!WH{5=|=M|*p8%~SO!EiXV z{}^O>)^aS9;B|rdiPeAlj(b=1+g*ItW%>gsq4}!g6A0kEck35^|JNano7uj)C`K7xzt$>kfM{16qC9 zJc98FNE!Z7ZzfP)>K`WuNG0r8X%7v2ia`kQ^B zUG0mBm%0LTb_q0puRD#%Lo%kw`a9mOA{YS^Y*aRCe~InYJJ%ozk(8%)8!*T!Wwwvr zG5qp|A2~e5?|EmUT38yTv+KICAmW)UfGgHL4JMz;L$XTS5Lu)IaC?Y}JcN2p<@SsG zT3gm?MvRk;NgvmtlVk4$g;pFcn{f5+xe(@1V7ZSMH>ddS5w0A8pqV>R4+Y5ar+Y^s z?<%7!!TRi5ZGtu42O%xho5@u{oGmydDN<0yL#DE2-d?OD-6`W)HFRf%3>p!{gBC{4 z20CGkulHVSZEUH7)Tr7L$98?~;e6sg+Km8Xwxa@<6ZC>^?oI!N6s<*CH}*-VqR=ly z&$K*TpHp(UqPYeWvorxrmQ^>p7#A z4hu~L56i5>k%TF*T|((!8pA+s=s|b>sBuSa%6#xL*qi00c)ErIdT4e_91zCAR({zR zb8>MI^Fn(`BVallBhd6%_20A;6-4^m{c*+|MME|b2RZYFXi$%ua{DQP#V5WkX}-K8 zzSwv?@3o$GSSAJiNhI|==x6EnwbaeAko~!v&Q`MD$?s*@d`!P^e3GMBkY`tay55Rj zVVjEX@?vckpHG>0q2O-H5lm)b&${bqhtqj zf_V?wndOcn)oa+~!9YO9Tj-DiuJvC$eG_ZNxLKPK>EARnjSP-;&KXaXXvp-ZJLHYc}MSsUEK$RJF>F5MPmdN})MyKuPi%dxfAZz2T|aeaQ3~Yju)FM? ztE?s&Dm)c`V8ekv6#df&?!UecC`6diJ4RHkP(=01(r&w|U1ZBWD)OIUk8GEbN7-_K~=|n%t!qKyxr;w%&{q#M2wgySBxr^0;?@~77;W5l@*_sRj-28U^hXe_`c7U6g81bxwd)p z!}90U4K;Ea&3uM^r9EtQf(REtyZ~&8&tUOOu4pbgl;gXoohopCmW8UU8D8;KgESgsB>*z<)1y&qJKpX&5U^U5U)z=sZ?>u+&xELU)bu}I_2^t z{_r^|P`ab9hxFA|V{e)VNH3!w#2mp{e^~zt6Mp2zzPNhO6Z3cg!?cF^x}Ue~Lo$Db zgAaP)p!nR!zvNJf=j9@Io#6&8X#}p}gYS-4u{A*(H`(F~fm7=j+R>kMDt+4S(lK|P zR2{%ILNEUNtgfCDqWXWTc|Z7>PWc6LdN#zJiXsmc(ueIxTCnTf*Hwc1m+4{-D|^`> z_UXzN-|Eco-%g>Xb%$U;5+}(Jh2v2y`Y%T8wWFhaf#UFoaq=2Bi&TNyY%V?0X*>#Q zu3OnP(X5dt^5Jo7ruU0C&GeLS>t zkgTEsK0zBl;aG8F*Y|#w@vh&*CQg|is!?1!^seBwZ^6x=;wuIpbUHxA9QA01oR?0D z*^JxO!cy0FkfkP{N10`RSFJKsyR&cJNiF;QUmcQ~1S>7^gHX@Oa8hr6-?0_zi*myZ zJbzy3V^()m8JpkM+bsJ75yy$+3o<+xWS-(0bNi9T44+^TT1|ts&a|L?uFOcU#KIV^4<~Ao6 z-DG{|R|Xq!zUIZ;9$nbiO^cG6w5Zecb2=vxAT=PBwK-S4GI7|+hWd|Y=grlv!8E(7 z(sK}|+O{PJY{Bxolc)73Uuz*_ImJw*bD1mxbC@Bp7t6SdRG1EXv{P8n*cyCzY>=PB zg1=<9kWwiZk68u`A}`LGalaB~8x2PW7~l=)qUY4F6ng@3K83?=cV8{j6P%wO@2#xu zAmyfU*7V$Xcl;$wXb$h(^yuKSR#QacYwhh~L-EZXzd;=t*c`{$rcssQ#^)8=H%yh;n=}Q48fEOH=augQXDlRcB;+R;}`{mAg8}gz&_{ePA zhINXfnB5Ki7790)*C|#3Pu17FbVj)fS8u`yC#2!s;D6&%5v7qB9IBNhNRE+ao#+Ek zDg$3@Bvp{C69#{xDWA3d2z^hsi&&YSiE&kCl|A9jTIFrd9Hw?0NHp<^1$TV)@y_XKvD(ihK$5xzN(RzdnC!G%lbOAD0}y)grc& z@ot}i#gIZb<6+v90?a#FM!HP_!`77iEFeR`h~ujEonr?*5^=`oi;2{0h%fEyXyz7)*bXr??z<#@L;z>oh;&tj+?t@g zqaWVOz1bKyNVm-t6Omwyrv7BG&q$U!(0%kD{WB#x{Y!9!qepN% zt*EEfrE^DZ-Qi$!>BN&okF)o%YehppZHFL`E-AIC^7~%c`hvFS<}U_3wmY$WSs*r} zBH2wVL|Z!>(0*0Id)q~Po7K)S#3-L%oZNMAt>V3KYCr$D>!zpQsJ(9Znc5}nTdAF+ z^VK&;vptAD84K>5?1#vp?;`6Rn=4)&);FO#lm4nx0-KScjm6*VS40kX}f%b zo4rrOr)9DX!xwB?Lf7Jr!DhWm%b~v{w*}fiMh*&xS*3Rbuu3F8xiHUxyNJOmh+J#X z$9;8j>Rs#T%P zXSGmUDI-@b;0jLFYt)+kR@#%_`pd}=T{qoKJ|xa>vTfBLR{zsdVaSR;J3%F{5=gni zdZsx<%dMa5oQqIi*H=7~AcbJ-^3s$Sn%My}hygjveHXg)45nQ6fI|`E%|>Hi_DlsdZBHABnR?|H_XXIeB>=0l(zd@aIUy!4PT>qhxqp6IisP|f9*H=3@Sytnk~I0h zO(Qo|S69oZS*zIQ^`;mG!o=cZV`pGyr1Pb|6CIgJI@i_ae;*ha5X=#UrlzK2i+JWI zSuZm4Xo9+ znZ6f7zDI36Fad_( z&JL$oj0*TX+5L9jlRt9dr@*P7`7k{Zixh(*&%QDge(7}b&!Vfg8SSN)+FnVcIca~~ z5gY#ej7JgjDD4@RjIKhDzOoxfWhRGD*78tWw;r8Vq+QPrOxu7eD;etx+f4w!%6pMv zj1pc3E1@9-qn~B7QJT(&!^0~RJhYmL%d3R9)$K@X*}8QrtHa7N*P&QlQ&Uq2S(u4TXK5+EH#v|F!6m-$fpR4 zuXg6aQQ}ojzp}}-hwU9I!?>t}l3prKi{*Uf1*E-LW|!}}dpB>Yu$2XCLwi14KjiUciA@onirHtAgN%ehtsr437X332 zM!1FVwmde2XcSyf)1T%Eb6P7QF#(qAv#dFh8a(p+>C>lsPQ*VDVO8B)*-FG{Wa(EB zJ;uMi5trQV&zb2z_GE8lYGh<&iop#Cv1c{?c3>$sgCH-ZXtbuPvy(kYz)*{>CX~dp zmB1cn$d^SRnbIK?L*~a`n-d;w(;C1E;Z4Zz-@i{a4*lRo3kT0ogaX5b9Q&e)Czfb^ z03n#c1Fm(BotBXi87~y{_xI21gkU!J@fgG9*B7LMBI&OJg^}dBFNWp>#flq~@w&G7 z!5H1!w{H&&4hAx)j%k+$>Dg3TWqD#p`jcs#mR_#WAaD(sU&!l-6*JGg zc^LMLIauEW_s}&yQO>YPvCp+H3Z&tR(+;&&2=Tux@5x8YRwP*t!Pde2h0cm(Conzu zOGpH;O7f~yonMk0N&Sdi+|-QLhnWZ+ zk7{t;Pd@+?|6jvAtFN&Ef0#q+70_eKLKd<(wlh6m6tEBg=jXv4nDAab0eG^YuyCJk z9vM>c_7_vKkV@$Zj0|fl7n(xq1Ne;&3otG&F8ccVw{G29DJ~WL-~k5==TMSRrOB3B zmS)j=-0Gq;ZAni2#f8Cv0Z43p&)yP(6+JR3|vcM{kjc?Y6 zDy=>qt&<{V49|j$=BuvPh+RoNdi zC$U2NQeSs_`xi-mLRK+_&kzbU#?6!v;%T}JH%RV)qaS2RuURt_J_9flq~8jkOR$@o zW7&J2C^CiW3i)$&E#`Cc?UwEFf80xrgfYt8<9ubz;&?omOr5+}5NS~P)C>RqkN4Ke zs*{&yn}EXV2`C|Vl(^2v@qTHbH&QpOceD8PP=*#Zcgg_mJJj${W?yRf;Pj$EMIjM+w;Qn0{5oM`EMucSfEFYSwDfh70Q#I z_wp^xcA@9X!)RGHxf3{nv`CwuV2SJTC0S0>`DU`qCr$Rse^}Tr%a)|LeY@K2qAe~h zO-(B`fluL>tnv0yl#mlK=2h5b5J+DFi2Nov&=GU+<#a%KZ|>P_8F-HAf)ELq^PJU@J>%9&T#k`w#@%B5DOeT*Cx07ONt># zj7{6Naexw^shzztP#bAtkzyPg@cF5#aVRTU8!khp7;2m^`3uBNchObK%*SMA)@KoEp^Kym(*I-T;TT?5* z2mlocYG<<`dEkWIpk0#n{oW=eFf4bK1S+kAiHCttfB|qqJ{!QRpzGRjQv!Z8B@kjo z+vVe`({|^_p7#!0n)RdSO6Dj z&+202LAL?ideSQS@Q=>5pF5k<9LobzK*y=5yoCWROm@OaX_m=)8)anne0ZV^nHK%c}NLm(IlEV~H!;8n>@*FF1;o}qR1M-Dce`v%5%!2W$hYbz#rb|`k& z5vOqOaBpw#iV8c%XsJhQs-kho`zzlk-Xu6>S!IA_1HTY4C^@-Oyf8fYum+82;>|d^ zR{$vn|HqymZExTEQo|v%dR#gr|FR{u$A!gA?_j77o^4N0d?dx+{>>tN7jV4gK(sTNZ>`y=FVd5 zmIhJ6L}1cN98WM22{(JJM=tAHX(GxxHZ~R%jj&a?)Cz8fBlE$yQ69<`$(PFHumD+r z=wm-SJBuMINaWeih=zky1p~f4%)aYNuIAUT`UUxB4oO4}nar)Ksw(to&9g5$ZYzE+ z?>usRS*M_C8chfiKa@>xX>Q&>b|6TO-OI24Z5ui)==ni+n5a8hXnQLNte9+(!MX1K z>r?$aNXMe9o%Bz#*m*V^6Uv$bN9%a@st5~aP!*vu^?^SJQSV=|WC_dndZs~W)I>;A zy!$@_N_kSr=YUWx(cIsRjUA^vPiLOl)3vHdo6L8WxR%i_eSWLz16^2xp+r^P%3okj?F6JfRDt68cnKeVOI!i zDj2GbhK4^ElS%*~2I~oI5PJZg5bgVJZdSG!`*P*yyMCx$?kh9d$aYN-5zWhC z4qmq9O~ahJ3fuviVzz4!SWZay!^Cd&c?tn&Af}rqgwpkc^&svDi2P*J`(SL!r4$$> zqmt*@oTvo=_gY?h4)GrEAKv5261-N`X+{@6RkcdL)>Xh{GJQ=i0C<&_mO@3UYy8H4 z9F}h`UI_^)yTm05xkQ9z(6|?hBg4avfLD>qsU-=UHSB}tw2bGP4*`W!#+x4Nh0MSG zd8FdZo8tsv7Krch@>o64I&2yq3vh)QHZxj6;%|&vQQkg&{+)9*QFHp${dz(sa1rRa z^CjLdllS?~4mBQd_z*TzEj}MB9P7KESvTm?|pq)R+}zx+kK_uEfbU-6rnSX%EXSo3KDoRv9owkHW4h{5;*5PIDpPzdjtn8d-?CZmV-Zz)L|P_iC7cI|?O;PrF7Td|zkcoL=$PyzQMe;X&-U4# z-ui8qh{)RG$4vjQeiRk;+5?Bf2^UEC#%j&o8X6=hLD(Szv%BTOL=mIKvDX!$>;Qul z+~PdqJ(LKr5hVO)NNmA-*;(&Frdt_~_kl>H0j^j&PTz>!vNr{;P@9`iqEJM@eMvpza~O_)vD<2+7rlb^e#N8LdosF+ZA4n`My;GfKpNCnjK^c3MQ7An1~0YW`}<^ z+t^OXgwcXb9v>ek;#lXry}hBO00?oEpqSfJ9m#E1sjaupvd#pk$vI#8IKZg6smaj$ z*WpW-#)N#D@CLn?OA6DH)~F+lLm{cEs;U6|-t%D^%Bu0nNe2)Bpyzmti_m)+vlzvM zu(yXtR4jL-?*Ue0)Ya*bS!*OED?sPx*VIGcwBiMRpor-8r9OjoP_`38CNhgblT(}D z|LL}|vo^L7SmwlreaW)Q=V<5U9A)zbuZSUL9HkGPW{ zi@8vt#K*<$l75zuP#ipWG5b~6i08Y-*TrGnS;)ipm)cyRNdl-47Z(R1-wgULXHz~0 ztX;?XCo~#2lVV&FL!m*l2dv|mW0$|{$o)j>ml34c2DGQ}Sq^ri; zDExj;^j=^0x&a*suc@hPA{`XK%`zzn{z`C+ak78gpeH&V1YFN_KzEht2vAo=5Od&2 zn0Pw|Lvsd*aRLxJ`Rz>eq1LeZ zHgi|uWBxwqQlP4 zzaZ{hU0nkhR2&K0b|g|4WmW1sTt`6R0oOtPUGVU*g}4Jj40Ir89P;}D{N-wG-2q++ znEL4hga+C)42BHMupLB?lanIHt?a5GmXHZl09_4P4W}hmj~8@$93eLMo_diCf>%xL zp^j!hVO*U#OHdSnhMK&*JVXQui|I>Wvt-V28e}T4IO4f$1fSgCx=C~~qJFbUl?QQZ z`uf3*kN5cRP*j8#858Pk`xpTE=TZs=APGKQEKEV;3{+K*Q8gI-JVT!H;BdWl!G%VE zd>|p zFN>;;Z&h$nf2v%2xDetI$XDJJ`R8mU>0Q7@TBTfQ{u|yZ?GBS8P zUP3~GAg5>kC}mQ|(a9;Vb22+SJ1Q!w<>DNN!KUu{1BOpO zF{zDAYg5yXiv{>=)Xct|G@a0&k_oGC*uAO~SQXy3ce0d;cFz_BmDEgUO81~i*CWn0 zClw#sm_IT3x+l~qujg8kajNzdAp(7*`a)lRO^K=B_Yd5lY2O8(i&V|-4E=Wf*_UWscbYGT^`1^8fW~Tp^n(68ojT zl|~C)ck)L2ozB3XHB0*z*`Em`OvYckce`q@w-sO4*rE#qjC}wfB9UwUExs`edge_node_connectivity`12 ×2 \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg b/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg new file mode 100644 index 00000000000..13b885d6005 --- /dev/null +++ b/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg @@ -0,0 +1 @@ +`face_node_connectivity`xy`node_coordinates`xy`face_coordinates`151512 ×41212`face_coordinates``node_coordinates` \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg b/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg new file mode 100644 index 00000000000..ba72c42ffaf --- /dev/null +++ b/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg @@ -0,0 +1 @@ +` \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg b/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg new file mode 100644 index 00000000000..378978abc39 --- /dev/null +++ b/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg @@ -0,0 +1 @@ +`face_node_connectivity`12 ×6 \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/index.rst b/docs/src/further_topics/ugrid/index.rst new file mode 100644 index 00000000000..81ba24428a9 --- /dev/null +++ b/docs/src/further_topics/ugrid/index.rst @@ -0,0 +1,54 @@ +.. include:: ../../common_links.inc + +.. _ugrid: + +Mesh Support +************ + +Iris includes specialised handling of mesh-located data (as opposed to +grid-located data). Iris and its :ref:`partner packages ` are +designed to make working with mesh-located data as simple as possible, with new +capabilities being added all the time. More detail is in this section and in +the :mod:`iris.experimental.ugrid` API documentation. + +This mesh support is based on the `CF-UGRID Conventions`__; UGRID-conformant +meshes + data can be loaded from a file into Iris' data model, and meshes + +data represented in Iris' data model can be saved as a UGRID-conformant file. + +---- + +Meshes are different + Mesh-located data is fundamentally different to grid-located data. + Many of Iris' existing operations need adapting before they can work with + mesh-located data, and in some cases entirely new concepts are needed. + **Read the detail in these pages before jumping into your own code.** +Iris' mesh support is experimental + This is a rapidly evolving part of the codebase at time of writing + (``Jan 2022``), as we continually expand the operations that work with mesh + data. **Be prepared for breaking changes even in minor releases.** +:ref:`Get involved! ` + We know meshes are an exciting new area for much of Earth science, so we hope + there are a lot of you with new files/ideas/wishlists, and we'd love to hear + more 🙂. + +---- + +Read on to find out more... + +* :doc:`data_model` - learn why the mesh experience is so different. +* :doc:`partner_packages` - meet some optional dependencies that provide powerful mesh operations. +* :doc:`operations` - experience how your workflows will look when written for mesh data. + +.. + Need an actual TOC to get Sphinx working properly, but have hidden it in + favour of the custom bullets above. + +.. toctree:: + :hidden: + :maxdepth: 1 + + data_model + partner_packages + operations + +__ CF-UGRID_ diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/further_topics/ugrid/operations.rst new file mode 100644 index 00000000000..84e817bf488 --- /dev/null +++ b/docs/src/further_topics/ugrid/operations.rst @@ -0,0 +1,995 @@ +.. _ugrid operations: + +Working with Mesh Data +********************** + +.. note:: Several of the operations below rely on the optional dependencies + mentioned in :doc:`partner_packages`. + +Operations Summary +------------------ +.. list-table:: + :align: left + :widths: 35, 75 + + * - `Making a Mesh`_ + - |tagline: making a mesh| + * - `Making a Cube`_ + - |tagline: making a cube| + * - `Save`_ + - |tagline: save| + * - `Load`_ + - |tagline: load| + * - `Plotting`_ + - |tagline: plotting| + * - `Region Extraction`_ + - |tagline: region extraction| + * - `Regridding`_ + - |tagline: regridding| + * - `Equality`_ + - |tagline: equality| + * - `Combining Cubes`_ + - |tagline: combining cubes| + * - `Arithmetic`_ + - |tagline: arithmetic| + +.. + Below: use demo code over prose wherever workable. Headings aren't an + exhaustive list (can you think of any other popular operations?). + +Making a Mesh +------------- +.. |tagline: making a mesh| replace:: |new| + +.. rubric:: |tagline: making a mesh| + +**Already have a file?** Consider skipping to `Load`_. + +Creating Iris objects from scratch is a highly useful skill for testing code +and improving understanding of how Iris works. This knowledge will likely prove +particularly useful when converting data into the Iris mesh data model from +structured formats and non-UGRID mesh formats. + +The objects created in this example will be used where possible in the +subsequent example operations on this page. + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> import numpy as np + + >>> from iris.coords import AuxCoord + >>> from iris.experimental.ugrid import Connectivity, Mesh + + # Going to create the following mesh + # (node indices are shown to aid understanding): + # + # 0----1 + # | |\ + # | + |+\ + # 2----3--4 + + >>> node_x = AuxCoord( + ... points=[0.0, 5.0, 0.0, 5.0, 8.0], + ... standard_name="longitude", + ... units="degrees_east", + ... long_name="node_x_coordinates", + ... ) + >>> node_y = AuxCoord(points=[3.0, 3.0, 0.0, 0.0, 0.0], standard_name="latitude") + + >>> face_x = AuxCoord([2.0, 6.0], "longitude") + >>> face_y = AuxCoord([1.0, 1.0], "latitude") + + >>> edge_node_c = Connectivity( + ... indices=[[0, 1], [0, 2], [1, 3], [1, 4], [2, 3], [3, 4]], + ... cf_role="edge_node_connectivity", + ... attributes={"demo": "Supports every standard CF property"}, + ... ) + + # Create some dead-centre edge coordinates. + >>> edge_x, edge_y = [ + ... AuxCoord( + ... node_coord.points[edge_node_c.indices_by_location()].mean(axis=1), + ... node_coord.standard_name, + ... ) + ... for node_coord in (node_x, node_y) + ... ] + + >>> face_indices = np.ma.masked_equal([[0, 1, 3, 2], [1, 4, 3, 999]], 999) + >>> face_node_c = Connectivity( + ... indices=face_indices, cf_role="face_node_connectivity" + ... ) + + >>> my_mesh = Mesh( + ... long_name="my_mesh", + ... topology_dimension=2, # Supports 2D (face) elements. + ... node_coords_and_axes=[(node_x, "x"), (node_y, "y")], + ... connectivities=[edge_node_c, face_node_c], + ... edge_coords_and_axes=[(edge_x, "x"), (edge_y, "y")], + ... face_coords_and_axes=[(face_x, "x"), (face_y, "y")], + ... ) + + >>> print(my_mesh) + Mesh : 'my_mesh' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + + + edge + edge_dimension: 'Mesh2d_edge' + edge_node_connectivity: + edge coordinates + + + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: + face coordinates + + + long_name: 'my_mesh' + + +.. _making a cube: + +Making a Cube (with a Mesh) +--------------------------- +.. |tagline: making a cube| replace:: |unchanged| + +.. rubric:: |tagline: making a cube| + +Creating a :class:`~iris.cube.Cube` is unchanged; the +:class:`~iris.experimental.ugrid.Mesh` is linked via a +:class:`~iris.experimental.ugrid.MeshCoord` (see :ref:`ugrid MeshCoords`): + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> import numpy as np + + >>> from iris.coords import DimCoord + >>> from iris.cube import Cube, CubeList + + >>> vertical_levels = DimCoord([0, 1, 2], "height") + + >>> my_cubelist = CubeList() + >>> for conn in (edge_node_c, face_node_c): + ... location = conn.location + ... mesh_coord_x, mesh_coord_y = my_mesh.to_MeshCoords(location) + ... data_shape = (len(conn.indices_by_location()), len(vertical_levels.points)) + ... data_array = np.arange(np.prod(data_shape)).reshape(data_shape) + ... + ... my_cubelist.append( + ... Cube( + ... data=data_array, + ... long_name=f"{location}_data", + ... units="K", + ... dim_coords_and_dims=[(vertical_levels, 1)], + ... aux_coords_and_dims=[(mesh_coord_x, 0), (mesh_coord_y, 0)], + ... ) + ... ) + + >>> print(my_cubelist) + 0: edge_data / (K) (-- : 6; height: 3) + 1: face_data / (K) (-- : 2; height: 3) + + >>> for cube in my_cubelist: + ... print(f"{cube.name()}: {cube.mesh.name()}, {cube.location}") + edge_data: my_mesh, edge + face_data: my_mesh, face + + >>> print(my_cubelist.extract_cube("edge_data")) + edge_data / (K) (-- : 6; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + + +Save +---- +.. |tagline: save| replace:: |unchanged| + +.. rubric:: |tagline: save| + +.. note:: UGRID saving support is limited to the NetCDF file format. + +The Iris saving process automatically detects if the :class:`~iris.cube.Cube` +has an associated :class:`~iris.experimental.ugrid.Mesh` and automatically +saves the file in a UGRID-conformant format: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from subprocess import run + + >>> from iris import save + + >>> cubelist_path = "my_cubelist.nc" + >>> save(my_cubelist, cubelist_path) + + >>> ncdump_result = run(["ncdump", "-h", cubelist_path], capture_output=True) + >>> print(ncdump_result.stdout.decode().replace("\t", " ")) + netcdf my_cubelist { + dimensions: + Mesh2d_node = 5 ; + Mesh2d_edge = 6 ; + Mesh2d_face = 2 ; + height = 3 ; + my_mesh_face_N_nodes = 4 ; + my_mesh_edge_N_nodes = 2 ; + variables: + int my_mesh ; + my_mesh:cf_role = "mesh_topology" ; + my_mesh:topology_dimension = 2 ; + my_mesh:long_name = "my_mesh" ; + my_mesh:node_coordinates = "longitude latitude" ; + my_mesh:edge_coordinates = "longitude_0 latitude_0" ; + my_mesh:face_coordinates = "longitude_1 latitude_1" ; + my_mesh:face_node_connectivity = "mesh2d_face" ; + my_mesh:edge_node_connectivity = "mesh2d_edge" ; + double longitude(Mesh2d_node) ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + longitude:long_name = "node_x_coordinates" ; + double latitude(Mesh2d_node) ; + latitude:standard_name = "latitude" ; + double longitude_0(Mesh2d_edge) ; + longitude_0:standard_name = "longitude" ; + double latitude_0(Mesh2d_edge) ; + latitude_0:standard_name = "latitude" ; + double longitude_1(Mesh2d_face) ; + longitude_1:standard_name = "longitude" ; + double latitude_1(Mesh2d_face) ; + latitude_1:standard_name = "latitude" ; + int64 mesh2d_face(Mesh2d_face, my_mesh_face_N_nodes) ; + mesh2d_face:_FillValue = -1LL ; + mesh2d_face:cf_role = "face_node_connectivity" ; + mesh2d_face:start_index = 0LL ; + int64 mesh2d_edge(Mesh2d_edge, my_mesh_edge_N_nodes) ; + mesh2d_edge:demo = "Supports every standard CF property" ; + mesh2d_edge:cf_role = "edge_node_connectivity" ; + mesh2d_edge:start_index = 0LL ; + int64 edge_data(Mesh2d_edge, height) ; + edge_data:long_name = "edge_data" ; + edge_data:units = "K" ; + edge_data:mesh = "my_mesh" ; + edge_data:location = "edge" ; + int64 height(height) ; + height:standard_name = "height" ; + int64 face_data(Mesh2d_face, height) ; + face_data:long_name = "face_data" ; + face_data:units = "K" ; + face_data:mesh = "my_mesh" ; + face_data:location = "face" ; + + // global attributes: + :Conventions = "CF-1.7" ; + } + + +The :func:`iris.experimental.ugrid.save_mesh` function allows +:class:`~iris.experimental.ugrid.Mesh`\es to be saved to file without +associated :class:`~iris.cube.Cube`\s: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from subprocess import run + + >>> from iris.experimental.ugrid import save_mesh + + >>> mesh_path = "my_mesh.nc" + >>> save_mesh(my_mesh, mesh_path) + + >>> ncdump_result = run(["ncdump", "-h", mesh_path], capture_output=True) + >>> print(ncdump_result.stdout.decode().replace("\t", " ")) + netcdf my_mesh { + dimensions: + Mesh2d_node = 5 ; + Mesh2d_edge = 6 ; + Mesh2d_face = 2 ; + my_mesh_face_N_nodes = 4 ; + my_mesh_edge_N_nodes = 2 ; + variables: + int my_mesh ; + my_mesh:cf_role = "mesh_topology" ; + my_mesh:topology_dimension = 2 ; + my_mesh:long_name = "my_mesh" ; + my_mesh:node_coordinates = "longitude latitude" ; + my_mesh:edge_coordinates = "longitude_0 latitude_0" ; + my_mesh:face_coordinates = "longitude_1 latitude_1" ; + my_mesh:face_node_connectivity = "mesh2d_face" ; + my_mesh:edge_node_connectivity = "mesh2d_edge" ; + double longitude(Mesh2d_node) ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + longitude:long_name = "node_x_coordinates" ; + double latitude(Mesh2d_node) ; + latitude:standard_name = "latitude" ; + double longitude_0(Mesh2d_edge) ; + longitude_0:standard_name = "longitude" ; + double latitude_0(Mesh2d_edge) ; + latitude_0:standard_name = "latitude" ; + double longitude_1(Mesh2d_face) ; + longitude_1:standard_name = "longitude" ; + double latitude_1(Mesh2d_face) ; + latitude_1:standard_name = "latitude" ; + int64 mesh2d_face(Mesh2d_face, my_mesh_face_N_nodes) ; + mesh2d_face:_FillValue = -1LL ; + mesh2d_face:cf_role = "face_node_connectivity" ; + mesh2d_face:start_index = 0LL ; + int64 mesh2d_edge(Mesh2d_edge, my_mesh_edge_N_nodes) ; + mesh2d_edge:demo = "Supports every standard CF property" ; + mesh2d_edge:cf_role = "edge_node_connectivity" ; + mesh2d_edge:start_index = 0LL ; + + // global attributes: + :Conventions = "CF-1.7" ; + } + + +Load +---- +.. |tagline: load| replace:: |different| - UGRID parsing is opt-in + +.. rubric:: |tagline: load| + +.. note:: UGRID loading support is limited to the NetCDF file format. + +While Iris' UGRID support remains :mod:`~iris.experimental`, parsing UGRID when +loading a file remains **optional**. To load UGRID data from a file into the +Iris mesh data model, use the +:const:`iris.experimental.ugrid.PARSE_UGRID_ON_LOAD` context manager: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from iris import load + >>> from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... loaded_cubelist = load(cubelist_path) + + # Sort CubeList to ensure consistent result. + >>> loaded_cubelist.sort(key=lambda cube: cube.name()) + >>> print(loaded_cubelist) + 0: edge_data / (K) (-- : 6; height: 3) + 1: face_data / (K) (-- : 2; height: 3) + +All the existing loading functionality still operates on UGRID-compliant +data - :class:`~iris.Constraint`\s, callbacks, :func:`~iris.load_cube` +etcetera: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from iris import Constraint, load_cube + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... ground_cubelist = load(cubelist_path, Constraint(height=0)) + ... face_cube = load_cube(cubelist_path, "face_data") + + # Sort CubeList to ensure consistent result. + >>> ground_cubelist.sort(key=lambda cube: cube.name()) + >>> print(ground_cubelist) + 0: edge_data / (K) (-- : 6) + 1: face_data / (K) (-- : 2) + + >>> print(face_cube) + face_data / (K) (-- : 2; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + Attributes: + Conventions CF-1.7 + +.. note:: + + We recommend caution if constraining on coordinates associated with a + :class:`~iris.experimental.ugrid.Mesh`. An individual coordinate value + might not be shared by any other data points, and using a coordinate range + will demand notably higher performance given the size of the dimension + versus structured grids + (:ref:`see the data model detail `). + +The :func:`iris.experimental.ugrid.load_mesh` and +:func:`~iris.experimental.ugrid.load_meshes` functions allow only +:class:`~iris.experimental.ugrid.Mesh`\es to be loaded from a file without +creating any associated :class:`~iris.cube.Cube`\s: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> from iris.experimental.ugrid import load_mesh + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... loaded_mesh = load_mesh(cubelist_path) + + >>> print(loaded_mesh) + Mesh : 'my_mesh' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + shape(5,)> + shape(5,)> + edge + edge_dimension: 'Mesh2d_edge' + edge_node_connectivity: shape(6, 2)> + edge coordinates + shape(6,)> + shape(6,)> + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: shape(2, 4)> + face coordinates + shape(2,)> + shape(2,)> + long_name: 'my_mesh' + var_name: 'my_mesh' + +Plotting +-------- +.. |tagline: plotting| replace:: |different| - plot with GeoVista + +.. rubric:: |tagline: plotting| + +The Cartopy-Matplotlib combination is not optimised for displaying the high +number of irregular shapes associated with meshes. Thankfully mesh +visualisation is already popular in many other fields (e.g. CGI, gaming, +SEM microscopy), so there is a wealth of tooling available, which +:ref:`ugrid geovista` harnesses for cartographic plotting. + +GeoVista's default behaviour is to convert lat-lon information into full XYZ +coordinates so the data is visualised on the surface of a 3D globe. The plots +are interactive by default, so it's easy to explore the data in detail. + +2D projections have also been demonstrated in proofs of concept, and will +be added to API in the near future. + +This first example uses GeoVista to plot the ``face_cube`` that we created +earlier: + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from geovista import GeoPlotter, Transform + >>> from geovista.common import to_xyz + + + # We'll re-use this to plot some real global data later. + >>> def cube_faces_to_polydata(cube): + ... lons, lats = cube.mesh.node_coords + ... face_node = cube.mesh.face_node_connectivity + ... indices = face_node.indices_by_location() + ... + ... mesh = Transform.from_unstructured( + ... lons.points, + ... lats.points, + ... indices, + ... data=cube.data, + ... name=f"{cube.name()} / {cube.units}", + ... start_index=face_node.start_index, + ... ) + ... return mesh + + >>> print(face_cube) + face_data / (K) (-- : 2; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + Attributes: + Conventions CF-1.7 + + # Convert our mesh+data to a PolyData object. + # Just plotting a single height level. + >>> face_polydata = cube_faces_to_polydata(face_cube[:, 0]) + >>> print(face_polydata) + PolyData (0x7ff4861ff4c0) + N Cells: 2 + N Points: 5 + X Bounds: 9.903e-01, 1.000e+00 + Y Bounds: 0.000e+00, 1.392e-01 + Z Bounds: 6.123e-17, 5.234e-02 + N Arrays: 2 + + # Create the GeoVista plotter and add our mesh+data to it. + >>> my_plotter = GeoPlotter() + >>> my_plotter.add_coastlines(color="black") + >>> my_plotter.add_base_layer(color="grey") + >>> my_plotter.add_mesh(face_polydata) + + # Centre the camera on the data. + >>> camera_region = to_xyz( + ... face_cube.coord("longitude").points, + ... face_cube.coord("latitude").points, + ... radius=3, + ... ) + >>> camera_pos = camera_region.mean(axis=0) + >>> my_plotter.camera.position = camera_pos + + >>> my_plotter.show() + + .. image:: images/plotting_basic.png + :alt: A GeoVista plot of the basic example Mesh. + + This artificial data makes West Africa rather chilly! + +Here's another example using a global cubed-sphere data set: + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from iris import load_cube + >>> from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + + # Demonstrating with a global data set. + # You could also download this file from github.com/SciTools/iris-test-data. + >>> from iris.tests import get_data_path + >>> file_path = get_data_path( + ... [ + ... "NetCDF", + ... "unstructured_grid", + ... "lfric_surface_mean.nc", + ... ] + ... ) + >>> with PARSE_UGRID_ON_LOAD.context(): + ... global_cube = load_cube(file_path, "tstar_sea") + >>> print(global_cube) + sea_surface_temperature / (K) (-- : 1; -- : 13824) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + + >>> global_polydata = cube_faces_to_polydata(global_cube) + >>> print(global_polydata) + PolyData (0x7f761b536160) + N Cells: 13824 + N Points: 13826 + X Bounds: -1.000e+00, 1.000e+00 + Y Bounds: -1.000e+00, 1.000e+00 + Z Bounds: -1.000e+00, 1.000e+00 + N Arrays: 2 + + >>> my_plotter = GeoPlotter() + >>> my_plotter.add_coastlines() + >>> my_plotter.add_mesh(global_polydata, show_edges=True) + + >>> my_plotter.show() + + .. image:: images/plotting_global.png + :alt: A GeoVista plot of a global sea surface temperature Mesh. + +Region Extraction +----------------- +.. |tagline: region extraction| replace:: |different| - use GeoVista for mesh analysis + +.. rubric:: |tagline: region extraction| + +As described in :doc:`data_model`, indexing for a range along a +:class:`~iris.cube.Cube`\'s :meth:`~iris.cube.Cube.mesh_dim` will not provide +a contiguous region, since **position on the unstructured dimension is +unrelated to spatial position**. This means that subsetted +:class:`~iris.experimental.ugrid.MeshCoord`\s cannot be reliably interpreted +as intended, and subsetting a :class:`~iris.experimental.ugrid.MeshCoord` is +therefore set to return an :class:`~iris.coords.AuxCoord` instead - breaking +the link between :class:`~iris.cube.Cube` and +:class:`~iris.experimental.ugrid.Mesh`: + +.. dropdown:: :opticon:`code` + + .. doctest:: ugrid_operations + + >>> edge_cube = my_cubelist.extract_cube("edge_data") + >>> print(edge_cube) + edge_data / (K) (-- : 6; height: 3) + Dimension coordinates: + height - x + Mesh coordinates: + latitude x - + longitude x - + + # Sub-setted MeshCoords have become AuxCoords. + >>> print(edge_cube[:-1]) + edge_data / (K) (-- : 5; height: 3) + Dimension coordinates: + height - x + Auxiliary coordinates: + latitude x - + longitude x - + +Extracting a region therefore requires extra steps - to determine the spatial +position of the data points before they can be analysed as inside/outside the +selected region. The recommended way to do this is using tools provided by +:ref:`ugrid geovista`, which is optimised for performant mesh analysis. + +This approach centres around using :meth:`geovista.geodesic.BBox.enclosed` to +get the subset of the original mesh that is inside the +:class:`~geovista.geodesic.BBox`. This subset :class:`pyvista.PolyData` object +includes the original indices of each datapoint - the ``vtkOriginalCellIds`` +array, which can be used to index the original :class:`~iris.cube.Cube`. Since +we **know** that this subset :class:`~iris.cube.Cube` represents a regional +mesh, we then reconstruct a :class:`~iris.experimental.ugrid.Mesh` from the +:class:`~iris.cube.Cube`\'s :attr:`~iris.cube.Cube.aux_coords` using +:meth:`iris.experimental.ugrid.Mesh.from_coords`: + +.. + Not using doctest here as want to keep GeoVista as optional dependency. + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from geovista import Transform + >>> from geovista.geodesic import BBox + >>> from iris import load_cube + >>> from iris.experimental.ugrid import Mesh, PARSE_UGRID_ON_LOAD + + # Need a larger dataset to demonstrate this operation. + # You could also download this file from github.com/SciTools/iris-test-data. + >>> from iris.tests import get_data_path + >>> file_path = get_data_path( + ... [ + ... "NetCDF", + ... "unstructured_grid", + ... "lfric_ngvat_2D_72t_face_half_levels_main_conv_rain.nc", + ... ] + ... ) + + >>> with PARSE_UGRID_ON_LOAD.context(): + ... global_cube = load_cube(file_path, "conv_rain") + >>> print(global_cube) + surface_convective_rainfall_rate / (kg m-2 s-1) (-- : 72; -- : 864) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + point time + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 300 s + name lfric_ngvat_2D_72t_face_half_levels_main_conv_rain + online_operation instant + timeStamp 2020-Oct-18 21:18:35 GMT + title Created by xios + uuid b3dc0fb4-9828-4663-a5ac-2a5763280159 + + # Convert the Mesh to a GeoVista PolyData object. + >>> lons, lats = global_cube.mesh.node_coords + >>> face_node = global_cube.mesh.face_node_connectivity + >>> indices = face_node.indices_by_location() + >>> global_polydata = Transform.from_unstructured( + ... lons.points, lats.points, indices, start_index=face_node.start_index + ... ) + + # Define a region of 4 corners connected by great circles. + # Specialised sub-classes of BBox are also available e.g. panel/wedge. + >>> region = BBox(lons=[0, 70, 70, 0], lats=[-25, -25, 45, 45]) + # 'Apply' the region to the PolyData object. + >>> region_polydata = region.enclosed(global_polydata, preference="center") + # Get the remaining face indices, to use for indexing the Cube. + >>> indices = region_polydata["vtkOriginalCellIds"] + + >>> print(type(indices)) + + # 101 is smaller than the original 864. + >>> print(len(indices)) + 101 + >>> print(indices[:10]) + [ 6 7 8 9 10 11 18 19 20 21] + + # Use the face indices to subset the global cube. + >>> region_cube = global_cube[:, indices] + + # In this case we **know** the indices correspond to a contiguous + # region, so we will convert the sub-setted Cube back into a + # Cube-with-Mesh. + >>> new_mesh = Mesh.from_coords(*region_cube.coords(dimensions=1)) + >>> new_mesh_coords = new_mesh.to_MeshCoords(global_cube.location) + >>> for coord in new_mesh_coords: + ... region_cube.remove_coord(coord.name()) + ... region_cube.add_aux_coord(coord, 1) + + # A Mesh-Cube with a subset (101) of the original 864 faces. + >>> print(region_cube) + surface_convective_rainfall_rate / (kg m-2 s-1) (-- : 72; -- : 101) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + point time + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 300 s + name lfric_ngvat_2D_72t_face_half_levels_main_conv_rain + online_operation instant + timeStamp 2020-Oct-18 21:18:35 GMT + title Created by xios + uuid b3dc0fb4-9828-4663-a5ac-2a5763280159 + +Regridding +---------- +.. |tagline: regridding| replace:: |different| - use iris-esmf-regrid for mesh regridders + +.. rubric:: |tagline: regridding| + +Regridding to or from a mesh requires different logic than Iris' existing +regridders, which are designed for structured grids. For this we recommend +ESMF's powerful regridding tools, which integrate with Iris' mesh data model +via the :ref:`ugrid iris-esmf-regrid` package. + +.. todo: inter-sphinx links when available. + +Regridding is achieved via the +:class:`esmf_regrid.experimental.unstructured_scheme.MeshToGridESMFRegridder` +and +:class:`~esmf_regrid.experimental.unstructured_scheme.GridToMeshESMFRegridder` +classes. Regridding from a source :class:`~iris.cube.Cube` to a target +:class:`~iris.cube.Cube` involves initialising and then calling one of these +classes. Initialising is done by passing in the source and target +:class:`~iris.cube.Cube` as arguments. The regridder is then called by passing +the source :class:`~iris.cube.Cube` as an argument. We can demonstrate this +with the +:class:`~esmf_regrid.experimental.unstructured_scheme.MeshToGridESMFRegridder`: + +.. + Not using doctest here as want to keep iris-esmf-regrid as optional dependency. + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> from esmf_regrid.experimental.unstructured_scheme import MeshToGridESMFRegridder + >>> from iris import load, load_cube + >>> from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + + # You could also download these files from github.com/SciTools/iris-test-data. + >>> from iris.tests import get_data_path + >>> mesh_file = get_data_path( + ... ["NetCDF", "unstructured_grid", "lfric_surface_mean.nc"] + ... ) + >>> grid_file = get_data_path( + ... ["NetCDF", "regrid", "regrid_template_global_latlon.nc"] + ... ) + + # Load a list of cubes defined on the same Mesh. + >>> with PARSE_UGRID_ON_LOAD.context(): + ... mesh_cubes = load(mesh_file) + + # Extract a specific cube. + >>> mesh_cube1 = mesh_cubes.extract_cube("sea_surface_temperature") + >>> print(mesh_cube1) + sea_surface_temperature / (K) (-- : 1; -- : 13824) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + + # Load the target grid. + >>> sample_grid = load_cube(grid_file) + >>> print(sample_grid) + sample_grid / (unknown) (latitude: 180; longitude: 360) + Dimension coordinates: + latitude x - + longitude - x + Attributes: + Conventions CF-1.7 + + # Initialise the regridder. + >>> rg = MeshToGridESMFRegridder(mesh_cube1, sample_grid) + + # Regrid the mesh cube cube. + >>> result1 = rg(mesh_cube1) + >>> print(result1) + sea_surface_temperature / (K) (-- : 1; latitude: 180; longitude: 360) + Dimension coordinates: + latitude - x - + longitude - - x + Auxiliary coordinates: + time x - - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + +.. note:: + + **All** :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` are + retained when regridding, so watch out for any attributes that reference + the format (there are several in these examples) - you may want to manually + remove them to avoid later confusion. + +The initialisation process is computationally expensive so we use caching to +improve performance. Once a regridder has been initialised, it can be used on +any :class:`~iris.cube.Cube` which has been defined on the same +:class:`~iris.experimental.ugrid.Mesh` (or on the same **grid** in the case of +:class:`~esmf_regrid.experimental.unstructured_scheme.GridToMeshESMFRegridder`). +Since calling a regridder is usually a lot faster than initialising, reusing +regridders can save a lot of time. We can demonstrate the reuse of the +previously initialised regridder: + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + # Extract a different cube defined on te same Mesh. + >>> mesh_cube2 = mesh_cubes.extract_cube("precipitation_flux") + >>> print(mesh_cube2) + precipitation_flux / (kg m-2 s-1) (-- : 1; -- : 13824) + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + time x - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + + # Regrid the new mesh cube using the same regridder. + >>> result2 = rg(mesh_cube2) + >>> print(result2) + precipitation_flux / (kg m-2 s-1) (-- : 1; latitude: 180; longitude: 360) + Dimension coordinates: + latitude - x - + longitude - - x + Auxiliary coordinates: + time x - - + Cell methods: + mean time (300 s) + mean time_counter + Attributes: + Conventions UGRID + description Created by xios + interval_operation 300 s + interval_write 1 d + name lfric_surface + online_operation average + timeStamp 2020-Feb-07 16:23:14 GMT + title Created by xios + uuid 489bcef5-3d1c-4529-be42-4ab5f8c8497b + +Support also exists for saving and loading previously initialised regridders - +:func:`esmf_regrid.experimental.io.save_regridder` and +:func:`~esmf_regrid.experimental.io.load_regridder` - so that they can be +re-used by future scripts. + +Equality +-------- +.. |tagline: equality| replace:: |unchanged| + +.. rubric:: |tagline: equality| + +:class:`~iris.experimental.ugrid.Mesh` comparison is supported, and comparing +two ':class:`~iris.experimental.ugrid.Mesh`-:class:`~iris.cube.Cube`\s' will +include a comparison of the respective +:class:`~iris.experimental.ugrid.Mesh`\es, with no extra action needed by the +user. + +.. note:: + + Keep an eye on memory demand when comparing large + :class:`~iris.experimental.ugrid.Mesh`\es, but note that + :class:`~iris.experimental.ugrid.Mesh`\ equality is enabled for lazy + processing (:doc:`/userguide/real_and_lazy_data`), so if the + :class:`~iris.experimental.ugrid.Mesh`\es being compared are lazy the + process will use less memory than their total size. + +Combining Cubes +--------------- +.. |tagline: combining cubes| replace:: |pending| + +.. rubric:: |tagline: combining cubes| + +Merging or concatenating :class:`~iris.cube.Cube`\s (described in +:doc:`/userguide/merge_and_concat`) with two different +:class:`~iris.experimental.ugrid.Mesh`\es is not possible - a +:class:`~iris.cube.Cube` must be associated with just a single +:class:`~iris.experimental.ugrid.Mesh`, and merge/concatenate are not yet +capable of combining multiple :class:`~iris.experimental.ugrid.Mesh`\es into +one. + +:class:`~iris.cube.Cube`\s that include +:class:`~iris.experimental.ugrid.MeshCoord`\s can still be merged/concatenated +on dimensions other than the :meth:`~iris.cube.Cube.mesh_dim`, since such +:class:`~iris.cube.Cube`\s will by definition share the same +:class:`~iris.experimental.ugrid.Mesh`. + +.. seealso:: + + You may wish to investigate + :func:`iris.experimental.ugrid.recombine_submeshes`, which can be used + for a very specific type of :class:`~iris.experimental.ugrid.Mesh` + combination not detailed here. + +Arithmetic +---------- +.. |tagline: arithmetic| replace:: |pending| + +.. rubric:: |tagline: arithmetic| + +:class:`~iris.cube.Cube` Arithmetic (described in :doc:`/userguide/cube_maths`) +has not yet been adapted to handle :class:`~iris.cube.Cube`\s that include +:class:`~iris.experimental.ugrid.MeshCoord`\s. + + +.. todo: + Enumerate other popular operations that aren't yet possible + (and are they planned soon?) + +.. |new| replace:: ✨ New +.. |unchanged| replace:: ♻️ Unchanged +.. |different| replace:: ⚠️ Different +.. |pending| replace:: 🚧 Support Pending \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/partner_packages.rst b/docs/src/further_topics/ugrid/partner_packages.rst new file mode 100644 index 00000000000..8e36f4ffc2d --- /dev/null +++ b/docs/src/further_topics/ugrid/partner_packages.rst @@ -0,0 +1,100 @@ +.. _ugrid partners: + +Iris' Mesh Partner Packages +**************************** +Python is an easy to use language and has formed a very strong collaborative +scientific community, which is why Iris is written in Python. *Performant* +Python relies on calls down to low level languages like C, which is ideal for +structured grid work since +they can be directly represented as NumPy arrays. This is more difficult when +working with unstructured meshes where extra steps are needed to determine data +position (:ref:`see the data model detail `), and we need +to find ways of again passing the operations down to more optimised languages. + +The Iris team are therefore developing 'wrapper' packages, which make it quick +and easy to analyse Iris mesh data via some popular Python packages that use +powerful tools under the hood, working in C and other languages. + +These solutions have been placed in their own 'partner packages' for several +reasons: + +* Can be useful to others who are not using Iris. + + * Everyone working with multi-dimensional geographic datasets shares common + problems that need solving. + * Wider user base = stronger community = better solutions. + +* Only some Iris users will need them - they are **optional** Iris dependencies. + + * They introduce a lot of new API. + * They introduce new large dependencies that take time to install and need + disk space. + +Below you can learn more about the partner packages and how they are useful. +Specifics of what operations would require their installation can be found in: +:doc:`operations`. + +.. important:: **Experimental** + + As with Iris' mesh support, these packages are still in the + experimental stages. They would love your feedback, but as immature + packages their API, documentation, test coverage and CI are still + 'under construction'. + + +.. _`ugrid geovista`: + +`GeoVista`_ +=========== +.. image:: images/geovistalogo.svg + :width: 300 + :class: no-scaled-link + +.. rubric:: "Cartographic rendering and mesh analytics powered by `PyVista`_" + +PyVista is described as "VTK for humans" - VTK is a very powerful toolkit for +working with meshes, and PyVista brings that power into the Python ecosystem. +GeoVista in turn makes it easy to use PyVista specifically for cartographic +work, designed from the start with the Iris +:class:`~iris.experimental.ugrid.Mesh` in mind. + +Applications +------------ +* Interactively plot mesh data: + + * On a 3D globe. + * On your favourite projection. + +* Extract a specific region from a mesh. +* Combine multiple meshes into one. + +.. _`ugrid iris-esmf-regrid`: + +`iris-esmf-regrid`_ +=================== +.. image:: images/iris-esmf-regrid.svg + :width: 300 + :class: no-scaled-link + +.. rubric:: "A collection of structured and unstructured ESMF regridding schemes for Iris" + +ESMF provide a sophisticated, performant regridding utility that supports a +variety of regridding types with both structured grids and unstructured meshes, +and this also has a flexible Python interface - ESMPy. iris-esmf-regrid takes +advantage of having a specific use-case - regridding Iris +:class:`~iris.cube.Cube`\s - to provide ESMPy-Iris wrappers that make the +process as easy as possible, with highly optimised performance. + +Applications +------------ +* Regrid structured to unstructured. +* Regrid unstructured to structured. +* Regrid with dask integration, computing in parallel and maintaining data + laziness. +* | Save a prepared regridder for re-use in subsequent runs. + | Regridders can even be re-used on sources with different masks - a + significant efficiency gain. + +.. _GeoVista: https://github.com/bjlittle/geovista +.. _PyVista: https://docs.pyvista.org/index.html +.. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid diff --git a/docs/src/index.rst b/docs/src/index.rst index e59a6f0527e..d6fc5f2f7e0 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -131,6 +131,7 @@ For **Iris 2.4** and earlier documentation please see the further_topics/metadata further_topics/lenient_metadata further_topics/lenient_maths + further_topics/ugrid/index .. toctree:: diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 31b22f499a0..b71182b77a6 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -16,8 +16,10 @@ This document explains the changes made to Iris for this release The highlights for this minor release of Iris include: - * We've added experimental support for `UGRID`_ meshes which can now be loaded - and attached to a cube. + * We've added experimental support for + :ref:`Meshes `, which can now be loaded and + attached to a cube. Mesh support is based on the based on `CF-UGRID`_ + model. * We've also dropped support for ``Python 3.7``. And finally, get in touch with us on :issue:`GitHub` if you have @@ -37,7 +39,7 @@ This document explains the changes made to Iris for this release =========== #. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added - support for unstructured meshes, as described by `UGRID`_. This involved + support for :ref:`unstructured meshes `. This involved adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. @@ -54,14 +56,14 @@ This document explains the changes made to Iris for this release :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. #. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, - for files using the `UGRID`_ conventions. + for files using the `CF-UGRID`_ conventions. The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. (:pull:`4058`). -#. `@pp-mo`_ added support to save cubes with meshes to netcdf files, using the - `UGRID`_ conventions. +#. `@pp-mo`_ added support to save cubes with :ref:`meshes ` to netcdf + files, using the `CF-UGRID`_ conventions. The existing :meth:`iris.save` function now does this, when saving cubes with meshes. A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data @@ -82,7 +84,7 @@ This document explains the changes made to Iris for this release :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and :class:`~iris.experimental.ugrid.mesh.Connectivity` :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the - `UGRID`_ model. (:pull:`4375`) + :ref:`mesh model `. (:pull:`4375`) #. `@bsherratt`_ added a `threshold` parameter to :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) @@ -318,7 +320,7 @@ This document explains the changes made to Iris for this release :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests using assertArrayAllClose following :issue:`3993`. (:pull:`4421`) - + #. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) #. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) @@ -351,7 +353,6 @@ This document explains the changes made to Iris for this release Whatsnew resources in alphabetical order: .. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ .. _sort-all: https://github.com/aio-libs/sort-all .. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba .. _Metarelate: http://www.metarelate.net/ From 6bdc6d5da0014278cad810f5bfa1326b5a391b11 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 28 Jan 2022 14:07:13 +0000 Subject: [PATCH 33/69] Deprecate regrid conservative (#4551) * Deprecated experimental regrid_conservative. * Fix link. --- lib/iris/experimental/regrid_conservative.py | 44 ++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index 421bd86c935..bfa048ddf0d 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -6,16 +6,35 @@ """ Support for conservative regridding via ESMPy. +.. note:: + + .. deprecated:: 3.2.0 + + This package will be removed in a future release. + Please use + `iris-esmf-regrid `_ + instead. + """ import cartopy.crs as ccrs import numpy as np import iris +from iris._deprecation import warn_deprecated from iris.analysis._interpolation import get_xy_dim_coords from iris.analysis._regrid import RectilinearRegridder from iris.util import _meshgrid +wmsg = ( + "The 'iris.experimental.regrid_conservative' package is deprecated since " + "version 3.2, and will be removed in a future release. Please use " + "iris-emsf-regrid instead. " + "See https://github.com/SciTools-incubator/iris-esmf-regrid." +) +warn_deprecated(wmsg) + + #: A static Cartopy Geodetic() instance for transforming to true-lat-lons. _CRS_TRUELATLON = ccrs.Geodetic() @@ -131,6 +150,22 @@ def regrid_conservative_via_esmpy(source_cube, grid_cube): """ Perform a conservative regridding with ESMPy. + .. note :: + + .. deprecated:: 3.2.0 + + This function is scheduled to be removed in a future release. + Please use + `iris-esmf-regrid `_ + instead. + + For example : + + .. code:: + + from emsf_regrid.schemes import ESMFAreaWeighted + result = src_cube.regrid(grid_cube, ESMFAreaWeighted()) + Regrids the data of a source cube onto a new grid defined by a destination cube. @@ -169,6 +204,15 @@ def regrid_conservative_via_esmpy(source_cube, grid_cube): To alter this, make a prior call to ESMF.Manager(). """ + wmsg = ( + "The function " + "'iris.experimental.regrid_conservative." + "regrid_weighted_curvilinear_to_rectilinear' " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) + # Lazy import so we can build the docs with no ESMF. import ESMF From 53c93cb29229fd0c60c84388565a1f80e351bae7 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Fri, 28 Jan 2022 14:15:24 +0000 Subject: [PATCH 34/69] minor optimisations (#4549) * minor optimisations * avoid unncecessary dict update * whatsnew entry --- docs/src/whatsnew/latest.rst | 4 +++ lib/iris/_merge.py | 7 +++-- lib/iris/cube.py | 60 ++++++++++++++++++++++-------------- 3 files changed, 45 insertions(+), 26 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index b71182b77a6..62d4e7a0edf 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -193,6 +193,10 @@ This document explains the changes made to Iris for this release :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease from >10s to 625ms. (:issue:`4280`, :pull:`4400`) +#. `@bjlittle`_ included an optimisation to :class:`~iris.cube.Cube.coord_dims` + to avoid unnecessary processing whenever a coordinate instance that already + exists within the cube is provided. (:pull:`4549`) + 🔥 Deprecations =============== diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 6758e9f55d1..bc12080523a 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -1809,7 +1809,8 @@ def key_func(coord): # Order the coordinates by hints, axis, and definition. for coord in sorted(coords, key=key_func): - if not cube.coord_dims(coord) and coord.shape == (1,): + dims = tuple(cube.coord_dims(coord)) + if not dims and coord.shape == (1,): # Extract the scalar coordinate data and metadata. scalar_defns.append(coord.metadata) # Because we know there's a single Cell in the @@ -1834,11 +1835,11 @@ def key_func(coord): # Extract the vector coordinate and metadata. if id(coord) in cube_aux_coord_ids: vector_aux_coords_and_dims.append( - _CoordAndDims(coord, tuple(cube.coord_dims(coord))) + _CoordAndDims(coord, dims) ) else: vector_dim_coords_and_dims.append( - _CoordAndDims(coord, tuple(cube.coord_dims(coord))) + _CoordAndDims(coord, dims) ) factory_defns = [] diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 8183dd385c6..b456bd9663a 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1451,39 +1451,53 @@ def coord_dims(self, coord): The (name of the) coord to look for. """ - - coord = self.coord(coord) - - # Search for existing coordinate (object) on the cube, faster lookup - # than equality - makes no functional difference. - matches = [ - (dim,) - for coord_, dim in self._dim_coords_and_dims - if coord_ is coord - ] - if not matches: - matches = [ - dims - for coord_, dims in self._aux_coords_and_dims - if coord_ is coord - ] - - # Search derived aux coords - if not matches: + name_provided = False + if isinstance(coord, str): + # Forced to look-up the coordinate if we only have the name. + coord = self.coord(coord) + name_provided = True + + coord_id = id(coord) + + # Dimension of dimension coordinate by object id + dims_by_id = {id(c): (d,) for c, d in self._dim_coords_and_dims} + # Check for id match - faster than equality check + match = dims_by_id.get(coord_id) + + if match is None: + # Dimension/s of auxiliary coordinate by object id + aux_dims_by_id = {id(c): d for c, d in self._aux_coords_and_dims} + # Check for id match - faster than equality + match = aux_dims_by_id.get(coord_id) + if match is None: + dims_by_id.update(aux_dims_by_id) + + if match is None and not name_provided: + # We may have an equivalent coordinate but not the actual + # cube coordinate instance - so forced to perform coordinate + # lookup to attempt to retrieve it + coord = self.coord(coord) + # Check for id match - faster than equality + match = dims_by_id.get(id(coord)) + + # Search derived aux coordinates + if match is None: target_metadata = coord.metadata - def match(factory): + def matcher(factory): return factory.metadata == target_metadata - factories = filter(match, self._aux_factories) + factories = filter(matcher, self._aux_factories) matches = [ factory.derived_dims(self.coord_dims) for factory in factories ] + if matches: + match = matches[0] - if not matches: + if match is None: raise iris.exceptions.CoordinateNotFoundError(coord.name()) - return matches[0] + return match def cell_measure_dims(self, cell_measure): """ From 77798309a8bb3f570c37e8c4fe2cd8a43327db98 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Fri, 28 Jan 2022 15:16:36 +0000 Subject: [PATCH 35/69] Update #4505 doctests for #4528. (#4554) --- docs/src/further_topics/ugrid/operations.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/further_topics/ugrid/operations.rst index 84e817bf488..f96e3e406c9 100644 --- a/docs/src/further_topics/ugrid/operations.rst +++ b/docs/src/further_topics/ugrid/operations.rst @@ -393,7 +393,7 @@ etcetera: latitude x - longitude x - Attributes: - Conventions CF-1.7 + Conventions 'CF-1.7' .. note:: @@ -495,7 +495,7 @@ earlier: latitude x - longitude x - Attributes: - Conventions CF-1.7 + Conventions 'CF-1.7' # Convert our mesh+data to a PolyData object. # Just plotting a single height level. @@ -827,7 +827,7 @@ with the latitude x - longitude - x Attributes: - Conventions CF-1.7 + Conventions 'CF-1.7' # Initialise the regridder. >>> rg = MeshToGridESMFRegridder(mesh_cube1, sample_grid) From 3f360414346fa9e8dbd2df32a75eb31d91a43258 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 28 Jan 2022 15:38:55 +0000 Subject: [PATCH 36/69] Added whatsnew for #4551. (#4553) --- docs/src/whatsnew/latest.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 62d4e7a0edf..5bccbe71c59 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -231,6 +231,9 @@ This document explains the changes made to Iris for this release :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in a future release of Iris. (:pull:`4541`) +#. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is + now replaced by `iris-emsf-regrid`_ + 🔗 Dependencies =============== @@ -357,6 +360,8 @@ This document explains the changes made to Iris for this release Whatsnew resources in alphabetical order: .. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _sort-all: https://github.com/aio-libs/sort-all -.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba .. _Metarelate: http://www.metarelate.net/ +.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ +.. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid +.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba +.. _sort-all: https://github.com/aio-libs/sort-all From 39082a0707c3d7a2f7f3dcbfa7454b7167f4780e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 29 Jan 2022 09:50:38 +0000 Subject: [PATCH 37/69] Updated environment lockfiles (#4555) Co-authored-by: Lockfile bot --- requirements/ci/nox.lock/py38-linux-64.lock | 40 ++++++++++----------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index 364b656f5db..368554bb259 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -9,16 +9,16 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_11.tar.bz2#2dcb18a9a0fa31f4f29e5a9b3eade394 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_11.tar.bz2#0bf83958e788f1e75ba26154cb702afe +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_12.tar.bz2#f547bf125ab234cec9c89491b262fc2f +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_12.tar.bz2#7ff3b832ba5e6918c0d026976359d065 https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.28-ha770c72_0.tar.bz2#56594fdd5a80774a80d546fbbccf2c03 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_11.tar.bz2#4ea2f9f83b617a7682e8aa05dcb37c6a -https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_11.tar.bz2#1d16527c76842bf9c41e9399d39d8097 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_12.tar.bz2#33c165be455015cc74e8d857182f3f58 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_12.tar.bz2#763c5ec8116d984b4a33342236d7da36 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_11.tar.bz2#e3495f4f93cfd6b68021cbe2b5844cd5 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_12.tar.bz2#d34efbb8d7d6312c816b4bb647b818b1 https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a @@ -29,7 +29,7 @@ https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/icu-69.1-h9c3ff4c_0.tar.bz2#e0773c9556d588b062a4e1424a6a02fa https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h7f98852_0.tar.bz2#5c214edc675a7fb7cbb34b1d854e5141 https://conda.anaconda.org/conda-forge/linux-64/lerc-3.0-h9c3ff4c_0.tar.bz2#7fcefde484980d23f0ec24c11e314d2e https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h7f98852_6.tar.bz2#b0f44f63f7d771d7670747a1dd5d5ac1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.8-h7f98852_0.tar.bz2#91d22aefa665265e8e31988b15145c8a @@ -44,7 +44,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.18-pthreads_h8fe https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.1-h7f98852_0.tar.bz2#90607c4c0247f04ec98b48997de71c1a +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.2-h7f98852_1.tar.bz2#46cf26ecc8775a0aab300ea1821aaa3c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.11-h36c2ea0_1013.tar.bz2#dcddf696ff5dfcab567100d691678e18 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.3-h846660c_100.tar.bz2#1bb747e2de717cb9a6501d72539d6556 @@ -83,7 +83,7 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#2 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-13_linux64_openblas.tar.bz2#b17676dbd6688396c3a3076259fb7907 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.70.2-h174f98d_1.tar.bz2#d03a54631298fd1ab732ff65f6ed3a07 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-13_linux64_openblas.tar.bz2#018b80e8f21d8560ae4961567e3e00c9 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_1.tar.bz2#d0a7846b7b3b8fb0d8b36904a53b8155 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.46.0-h812cca2_0.tar.bz2#507fa47e9075f889af8e8b72925379be https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-ha56f1ee_2.tar.bz2#6ab4eaa11ff01801cffca0a27489dc04 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-h6f004c6_2.tar.bz2#34fda41ca84e67232888c9a885903055 @@ -101,14 +101,13 @@ https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_3.tar.bz2#511aa83cdfcc0132380db5daf2f15f27 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e29650992ae593bc05fc93722483e5c3 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.1-h3452ae3_0.tar.bz2#6d4bf6265d998b6c975c26a6a24062a2 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.2-h3452ae3_0.tar.bz2#c363665b4aabe56aae4f8981cff5b153 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b https://conda.anaconda.org/conda-forge/linux-64/nss-3.74-hb5efdd6_0.tar.bz2#136876ca50177058594f6c2944e95c40 https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-hb7a2778_2_cpython.tar.bz2#148ea076514259c7f562fbfba956a693 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.10-pyhd8ed1ab_0.tar.bz2#ea77236c8031cfa821720b21b4cb0ceb https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 @@ -116,7 +115,7 @@ https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_1.tar.bz2#d3f5797d3f9625c64860c93fc4359e64 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.94-ha180cfb_0.tar.bz2#c534c5248da4913002473919d76d0161 https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.1.0-pyhd8ed1ab_0.tar.bz2#188e095f4dc38887bb48b065734b9e8d https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_3.tar.bz2#524a9f1718bac53a6cf4906bcc51d044 https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2#40b50b8b030f5f2f22085c062ed013dd @@ -128,6 +127,7 @@ https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.3.0-pyhd8ed1ab_0.tar.bz2#7bc119135be2a43e1701432399d8c28a https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.7-pyhd8ed1ab_0.tar.bz2#727e2216d9c47455d8ddc060eb2caad9 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 @@ -157,7 +157,7 @@ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py38h1fd1430_1. https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h3cfcdeb_1.tar.bz2#37d7568c595f0cfcd0c493f5ca0344ab https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_1.tar.bz2#1ef7b5f4826ca48a15e2cd98a5c3436d https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.0-py38h6ae9a64_1.tar.bz2#e06061f06c024fab614ace4bbb3e34df +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.1-py38h6ae9a64_0.tar.bz2#9ec24c7acb2252816f1f6b6687317432 https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 @@ -169,22 +169,21 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_1.tar.bz2#977d03222271270ea8fe35388bf13752 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h497a2fe_3.tar.bz2#131de7d638aa59fb8afbce59f1a8aa98 https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-ha98a1a1_5.tar.bz2#9b27fa0b1044a2119fb1b290617fe06f -https://conda.anaconda.org/conda-forge/linux-64/setuptools-59.8.0-py38h578d9bd_0.tar.bz2#5c6cd89b15e0059af9dd07cc60a6afbd +https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.5.0-py38h578d9bd_0.tar.bz2#9807c89f3ce846015dbad3c1d04348a5 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_2.tar.bz2#63b3b55c98b4239134e0be080f448944 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h497a2fe_0.tar.bz2#8da7787169411910df2a62dc8ef533e0 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py38h578d9bd_1.tar.bz2#37717ce393db8536ae2b613839af4274 +https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.13.0-py38h578d9bd_0.tar.bz2#561081f4a30990533541979c9ee84732 https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1003.tar.bz2#9189b42c42b9c87b2b2068cbe31901a8 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.1.1-py38h6c62de6_1.tar.bz2#d4a47fd2bbc8292a322d462734b0ada5 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.2-py38h6c62de6_0.tar.bz2#73892e60ccea826c7f7a2215e48d22cf https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py38h3e25421_0.tar.bz2#acc14d0d71dbf74f6a15f2456951b6cf -https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.1.0-pyhd8ed1ab_0.tar.bz2#e7d934ff2c617f0bfc62ab77c160f093 -https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py38h1fd1430_2.tar.bz2#482431310c7b3320a31c8c6ce82a7a15 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.28.5-py38h497a2fe_0.tar.bz2#f611d0be8205d5b0566f9c97e7d66ae3 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.1.1-pyhd8ed1ab_0.tar.bz2#7968db84df10b74d9792d66d7da216df +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.29.0-py38h497a2fe_0.tar.bz2#3d96473ac57b7260a3fc3bdb13d2db79 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.2.0-hb4a5f5f_0.tar.bz2#d03d53e6bcb97e6a97a1659fb38aa76e https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h6c62de6_1006.tar.bz2#829b1209dfadd431a11048d6eeaf5bef https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.3.5-py38h43a58ef_0.tar.bz2#171cc96da3b1a0ebd4bf2b5586b7cda3 +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.0-py38h43a58ef_0.tar.bz2#23427f52c81076594a95c006ebf7552e https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.2-pyhd8ed1ab_0.tar.bz2#caef60540e2239e27bf62569a5015e3b https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38h5383654_1.tar.bz2#5b600e019fa7c33be73bdb626236936b @@ -194,8 +193,9 @@ https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.2.0-py38h6c62de6_1. https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.3-py38h56a6a73_0.tar.bz2#2d318049369bb52d2687b0ac2be82751 https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h596eeab_5.tar.bz2#ec3b783081e14a9dc0eb5ce609649728 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h1fd1430_1.tar.bz2#c494f75082f9c052944fda1b22c83336 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py38h6c62de6_2.tar.bz2#350322b046c129e5802b79358a1343f7 -https://conda.anaconda.org/conda-forge/noarch/identify-2.3.7-pyhd8ed1ab_0.tar.bz2#ae1a5e834fbca62ee88ab55fb276be63 +https://conda.anaconda.org/conda-forge/noarch/identify-2.4.6-pyhd8ed1ab_0.tar.bz2#d4030c75256440b8375b2f32c4ed35cd https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py38hf4fb855_0.tar.bz2#47cf0cab2ae368e1062e75cfbc4277af https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.4-mpi_mpich_h1364a43_0.tar.bz2#b6ba4f487ef9fd5d353ff277df06d133 From 25d3b35edc38f4d567e02ab84cc7e6ffae5d1df7 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Sat, 29 Jan 2022 15:23:49 +0000 Subject: [PATCH 38/69] Update latest.rst (#4556) --- docs/src/whatsnew/latest.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 5bccbe71c59..87832f30253 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -47,7 +47,7 @@ This document explains the changes made to Iris for this release objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, :class:`iris.experimental.ugrid.mesh.MeshCoord` and :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. - A :class:`iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID + A :class:`~iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the @@ -86,7 +86,7 @@ This document explains the changes made to Iris for this release :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the :ref:`mesh model `. (:pull:`4375`) -#. `@bsherratt`_ added a `threshold` parameter to +#. `@bsherratt`_ added a ``threshold`` parameter to :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) #. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to @@ -232,7 +232,7 @@ This document explains the changes made to Iris for this release a future release of Iris. (:pull:`4541`) #. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is - now replaced by `iris-emsf-regrid`_ + now replaced by `iris-emsf-regrid`_. (:pull:`4551`) 🔗 Dependencies @@ -325,7 +325,7 @@ This document explains the changes made to Iris for this release #. `@akuhnregnier`_ removed addition of period from :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests - using assertArrayAllClose following :issue:`3993`. + using ``assertArrayAllClose`` following :issue:`3993`. (:pull:`4421`) #. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) From 2fa7c598d4bcbe7e861b12b28a874360af974421 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 31 Jan 2022 11:15:33 +0000 Subject: [PATCH 39/69] Deprecate regrids (#4548) * Deprecations. * Correct and clarify dosctrings for PointInCell. * Remove reference from UnstructuredNearest to ProjectedUnstructuredNearest. * Added whatsnew content. * Tiny fix to deprecation docs. * Fixes to whatsnew text. * Move still-used code from experimental.regrid to analysis._area_weighted. * Fix minor test problem. * No iris.cube import in iris.analysis._area_weighted, to avoid circular import. --- docs/src/whatsnew/latest.rst | 15 + lib/iris/analysis/__init__.py | 19 +- lib/iris/analysis/_area_weighted.py | 1022 ++++++++++++++- lib/iris/experimental/regrid.py | 1102 ++--------------- .../test_AreaWeightedRegridder.py | 17 +- 5 files changed, 1148 insertions(+), 1027 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 87832f30253..e2d4c2bc0bb 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -234,6 +234,12 @@ This document explains the changes made to Iris for this release #. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is now replaced by `iris-emsf-regrid`_. (:pull:`4551`) +#. `@pp-mo`_ deprecated everything in :mod:`iris.experimental.regrid`. + Most features have a preferred exact alternative, as suggested, *except* + :class:`iris.experimental.regrid.ProjectedUnstructuredLinear` : that has no + identical equivalent, but :class:`iris.analysis.UnstructuredNearest` is + suggested as being quite close (though possibly slower). (:pull:`4548`) + 🔗 Dependencies =============== @@ -289,6 +295,15 @@ This document explains the changes made to Iris for this release contributing to the docs ` to the docs. (:pull:`4461`) +#. `@pp-mo`_ improved and corrected docstrings of + :class:`iris.analysis.PointInCell`, making it clear what is the actual + calculation performed. (:pull:`4548`) + +#. `@pp-mo`_ removed reference in docstring of + :class:`iris.analysis.UnstructuredNearest` to the obsolete (deprecated) + :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. + (:pull:`4548`) + 💼 Internal =========== diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 01c12c748a7..465a5210657 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -2691,14 +2691,6 @@ class UnstructuredNearest: .. Note:: Currently only supports regridding, not interpolation. - .. Note:: - This scheme performs essentially the same job as - :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. - That scheme is faster, but only works well on data in a limited - region of the globe, covered by a specified projection. - This approach is more rigorously correct and can be applied to global - datasets. - """ # Note: the argument requirements are simply those of the underlying @@ -2769,6 +2761,9 @@ class PointInCell: This class describes the point-in-cell regridding scheme for use typically with :meth:`iris.cube.Cube.regrid()`. + Each result datapoint is an average over all source points that fall inside + that (bounded) target cell. + The PointInCell regridder can regrid data from a source grid of any dimensionality and in any coordinate system. The location of each source point is specified by X and Y coordinates @@ -2786,8 +2781,12 @@ class PointInCell: def __init__(self, weights=None): """ - Point-in-cell regridding scheme suitable for regridding over one - or more orthogonal coordinates. + Point-in-cell regridding scheme suitable for regridding from a source + cube with X and Y coordinates all on the same dimensions, to a target + cube with bounded X and Y coordinates on separate X and Y dimensions. + + Each result datapoint is an average over all source points that fall + inside that (bounded) target cell. Optional Args: diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index ae162f6c538..8381185e58e 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -3,8 +3,18 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. +import functools +import cf_units +import numpy as np +import numpy.ma as ma + +from iris._lazy_data import map_complete_blocks from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid +from iris.analysis._regrid import RectilinearRegridder +import iris.analysis.cartography +import iris.coord_systems +from iris.util import _meshgrid class AreaWeightedRegridder: @@ -43,10 +53,6 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): the same coordinate system. """ - from iris.experimental.regrid import ( - _regrid_area_weighted_rectilinear_src_and_grid__prepare, - ) - # Snapshot the state of the source cube to ensure that the regridder is # impervious to external changes to the original cubes. self._src_grid = snapshot_grid(src_grid_cube) @@ -103,10 +109,6 @@ def __call__(self, cube): in the horizontal dimensions will be combined before regridding. """ - from iris.experimental.regrid import ( - _regrid_area_weighted_rectilinear_src_and_grid__perform, - ) - src_x, src_y = get_xy_dim_coords(cube) if (src_x, src_y) != self._src_grid: raise ValueError( @@ -130,3 +132,1007 @@ def __call__(self, cube): return _regrid_area_weighted_rectilinear_src_and_grid__perform( cube, _regrid_info, mdtol=self._mdtol ) + + +# +# Support routines, all originally in iris.experimental.regrid +# + + +def _get_xy_coords(cube): + """ + Return the x and y coordinates from a cube. + + This function will preferentially return a pair of dimension + coordinates (if there are more than one potential x or y dimension + coordinates a ValueError will be raised). If the cube does not have + a pair of x and y dimension coordinates it will return 1D auxiliary + coordinates (including scalars). If there is not one and only one set + of x and y auxiliary coordinates a ValueError will be raised. + + Having identified the x and y coordinates, the function checks that they + have equal coordinate systems and that they do not occupy the same + dimension on the cube. + + Args: + + * cube: + An instance of :class:`iris.cube.Cube`. + + Returns: + A tuple containing the cube's x and y coordinates. + + """ + # Look for a suitable dimension coords first. + x_coords = cube.coords(axis="x", dim_coords=True) + if not x_coords: + # If there is no x coord in dim_coords look for scalars or + # monotonic coords in aux_coords. + x_coords = [ + coord + for coord in cube.coords(axis="x", dim_coords=False) + if coord.ndim == 1 and coord.is_monotonic() + ] + if len(x_coords) != 1: + raise ValueError( + "Cube {!r} must contain a single 1D x " + "coordinate.".format(cube.name()) + ) + x_coord = x_coords[0] + + # Look for a suitable dimension coords first. + y_coords = cube.coords(axis="y", dim_coords=True) + if not y_coords: + # If there is no y coord in dim_coords look for scalars or + # monotonic coords in aux_coords. + y_coords = [ + coord + for coord in cube.coords(axis="y", dim_coords=False) + if coord.ndim == 1 and coord.is_monotonic() + ] + if len(y_coords) != 1: + raise ValueError( + "Cube {!r} must contain a single 1D y " + "coordinate.".format(cube.name()) + ) + y_coord = y_coords[0] + + if x_coord.coord_system != y_coord.coord_system: + raise ValueError( + "The cube's x ({!r}) and y ({!r}) " + "coordinates must have the same coordinate " + "system.".format(x_coord.name(), y_coord.name()) + ) + + # The x and y coordinates must describe different dimensions + # or be scalar coords. + x_dims = cube.coord_dims(x_coord) + x_dim = None + if x_dims: + x_dim = x_dims[0] + + y_dims = cube.coord_dims(y_coord) + y_dim = None + if y_dims: + y_dim = y_dims[0] + + if x_dim is not None and y_dim == x_dim: + raise ValueError( + "The cube's x and y coords must not describe the " + "same data dimension." + ) + + return x_coord, y_coord + + +def _within_bounds(src_bounds, tgt_bounds, orderswap=False): + """ + Determine which target bounds lie within the extremes of the source bounds. + + Args: + + * src_bounds (ndarray): + An (n, 2) shaped array of monotonic contiguous source bounds. + * tgt_bounds (ndarray): + An (n, 2) shaped array corresponding to the target bounds. + + Kwargs: + + * orderswap (bool): + A Boolean indicating whether the target bounds are in descending order + (True). Defaults to False. + + Returns: + Boolean ndarray, indicating whether each target bound is within the + extremes of the source bounds. + + """ + min_bound = np.min(src_bounds) - 1e-14 + max_bound = np.max(src_bounds) + 1e-14 + + # Swap upper-lower is necessary. + if orderswap is True: + upper, lower = tgt_bounds.T + else: + lower, upper = tgt_bounds.T + + return ((lower <= max_bound) * (lower >= min_bound)) * ( + (upper <= max_bound) * (upper >= min_bound) + ) + + +def _cropped_bounds(bounds, lower, upper): + """ + Return a new bounds array and corresponding slice object (or indices) of + the original data array, resulting from cropping the provided bounds + between the specified lower and upper values. The bounds at the + extremities will be truncated so that they start and end with lower and + upper. + + This function will return an empty NumPy array and slice if there is no + overlap between the region covered by bounds and the region from lower to + upper. + + If lower > upper the resulting bounds may not be contiguous and the + indices object will be a tuple of indices rather than a slice object. + + Args: + + * bounds: + An (n, 2) shaped array of monotonic contiguous bounds. + * lower: + Lower bound at which to crop the bounds array. + * upper: + Upper bound at which to crop the bounds array. + + Returns: + A tuple of the new bounds array and the corresponding slice object or + indices from the zeroth axis of the original array. + + """ + reversed_flag = False + # Ensure order is increasing. + if bounds[0, 0] > bounds[-1, 0]: + # Reverse bounds + bounds = bounds[::-1, ::-1] + reversed_flag = True + + # Number of bounds. + n = bounds.shape[0] + + if lower <= upper: + if lower > bounds[-1, 1] or upper < bounds[0, 0]: + new_bounds = bounds[0:0] + indices = slice(0, 0) + else: + # A single region lower->upper. + if lower < bounds[0, 0]: + # Region extends below bounds so use first lower bound. + lindex = 0 + lower = bounds[0, 0] + else: + # Index of last lower bound less than or equal to lower. + lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] + if upper > bounds[-1, 1]: + # Region extends above bounds so use last upper bound. + uindex = n - 1 + upper = bounds[-1, 1] + else: + # Index of first upper bound greater than or equal to + # upper. + uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] + # Extract the bounds in our region defined by lower->upper. + new_bounds = np.copy(bounds[lindex : (uindex + 1), :]) + # Replace first and last values with specified bounds. + new_bounds[0, 0] = lower + new_bounds[-1, 1] = upper + if reversed_flag: + indices = slice(n - (uindex + 1), n - lindex) + else: + indices = slice(lindex, uindex + 1) + else: + # Two regions [0]->upper, lower->[-1] + # [0]->upper + if upper < bounds[0, 0]: + # Region outside src bounds. + new_bounds_left = bounds[0:0] + indices_left = tuple() + slice_left = slice(0, 0) + else: + if upper > bounds[-1, 1]: + # Whole of bounds. + uindex = n - 1 + upper = bounds[-1, 1] + else: + # Index of first upper bound greater than or equal to upper. + uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] + # Extract the bounds in our region defined by [0]->upper. + new_bounds_left = np.copy(bounds[0 : (uindex + 1), :]) + # Replace last value with specified bound. + new_bounds_left[-1, 1] = upper + if reversed_flag: + indices_left = tuple(range(n - (uindex + 1), n)) + slice_left = slice(n - (uindex + 1), n) + else: + indices_left = tuple(range(0, uindex + 1)) + slice_left = slice(0, uindex + 1) + # lower->[-1] + if lower > bounds[-1, 1]: + # Region is outside src bounds. + new_bounds_right = bounds[0:0] + indices_right = tuple() + slice_right = slice(0, 0) + else: + if lower < bounds[0, 0]: + # Whole of bounds. + lindex = 0 + lower = bounds[0, 0] + else: + # Index of last lower bound less than or equal to lower. + lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] + # Extract the bounds in our region defined by lower->[-1]. + new_bounds_right = np.copy(bounds[lindex:, :]) + # Replace first value with specified bound. + new_bounds_right[0, 0] = lower + if reversed_flag: + indices_right = tuple(range(0, n - lindex)) + slice_right = slice(0, n - lindex) + else: + indices_right = tuple(range(lindex, n)) + slice_right = slice(lindex, None) + + if reversed_flag: + # Flip everything around. + indices_left, indices_right = indices_right, indices_left + slice_left, slice_right = slice_right, slice_left + + # Combine regions. + new_bounds = np.concatenate((new_bounds_left, new_bounds_right)) + # Use slices if possible, but if we have two regions use indices. + if indices_left and indices_right: + indices = indices_left + indices_right + elif indices_left: + indices = slice_left + elif indices_right: + indices = slice_right + else: + indices = slice(0, 0) + + if reversed_flag: + new_bounds = new_bounds[::-1, ::-1] + + return new_bounds, indices + + +def _cartesian_area(y_bounds, x_bounds): + """ + Return an array of the areas of each cell given two arrays + of cartesian bounds. + + Args: + + * y_bounds: + An (n, 2) shaped NumPy array. + * x_bounds: + An (m, 2) shaped NumPy array. + + Returns: + An (n, m) shaped Numpy array of areas. + + """ + heights = y_bounds[:, 1] - y_bounds[:, 0] + widths = x_bounds[:, 1] - x_bounds[:, 0] + return np.abs(np.outer(heights, widths)) + + +def _spherical_area(y_bounds, x_bounds, radius=1.0): + """ + Return an array of the areas of each cell on a sphere + given two arrays of latitude and longitude bounds in radians. + + Args: + + * y_bounds: + An (n, 2) shaped NumPy array of latitide bounds in radians. + * x_bounds: + An (m, 2) shaped NumPy array of longitude bounds in radians. + * radius: + Radius of the sphere. Default is 1.0. + + Returns: + An (n, m) shaped Numpy array of areas. + + """ + return iris.analysis.cartography._quadrant_area(y_bounds, x_bounds, radius) + + +def _get_bounds_in_units(coord, units, dtype): + """Return a copy of coord's bounds in the specified units and dtype.""" + # The bounds are cast to dtype before conversion to prevent issues when + # mixing float32 and float64 types. + return coord.units.convert(coord.bounds.astype(dtype), units).astype(dtype) + + +def _weighted_mean_with_mdtol(data, weights, axis=None, mdtol=0): + """ + Return the weighted mean of an array over the specified axis + using the provided weights (if any) and a permitted fraction of + masked data. + + Args: + + * data (array-like): + Data to be averaged. + + * weights (array-like): + An array of the same shape as the data that specifies the contribution + of each corresponding data element to the calculated mean. + + Kwargs: + + * axis (int or tuple of ints): + Axis along which the mean is computed. The default is to compute + the mean of the flattened array. + + * mdtol (float): + Tolerance of missing data. The value returned in each element of the + returned array will be masked if the fraction of masked data exceeds + mdtol. This fraction is weighted by the `weights` array if one is + provided. mdtol=0 means no missing data is tolerated + while mdtol=1 will mean the resulting element will be masked if and + only if all the contributing elements of data are masked. + Defaults to 0. + + Returns: + Numpy array (possibly masked) or scalar. + + """ + if ma.is_masked(data): + res, unmasked_weights_sum = ma.average( + data, weights=weights, axis=axis, returned=True + ) + if mdtol < 1: + weights_sum = weights.sum(axis=axis) + frac_masked = 1 - np.true_divide(unmasked_weights_sum, weights_sum) + mask_pt = frac_masked > mdtol + if np.any(mask_pt) and not isinstance(res, ma.core.MaskedConstant): + if np.isscalar(res): + res = ma.masked + elif ma.isMaskedArray(res): + res.mask |= mask_pt + else: + res = ma.masked_array(res, mask=mask_pt) + else: + res = np.average(data, weights=weights, axis=axis) + return res + + +def _regrid_area_weighted_array( + src_data, x_dim, y_dim, weights_info, index_info, mdtol=0 +): + """ + Regrid the given data from its source grid to a new grid using + an area weighted mean to determine the resulting data values. + + .. note:: + + Elements in the returned array that lie either partially + or entirely outside of the extent of the source grid will + be masked irrespective of the value of mdtol. + + Args: + + * src_data: + An N-dimensional NumPy array. + * x_dim: + The X dimension within `src_data`. + * y_dim: + The Y dimension within `src_data`. + * weights_info: + The area weights information to be used for area-weighted + regridding. + + Kwargs: + + * mdtol: + Tolerance of missing data. The value returned in each element of the + returned array will be masked if the fraction of missing data exceeds + mdtol. This fraction is calculated based on the area of masked cells + within each target cell. mdtol=0 means no missing data is tolerated + while mdtol=1 will mean the resulting element will be masked if and + only if all the overlapping elements of the source grid are masked. + Defaults to 0. + + Returns: + The regridded data as an N-dimensional NumPy array. The lengths + of the X and Y dimensions will now match those of the target + grid. + + """ + ( + blank_weights, + src_area_weights, + new_data_mask_basis, + ) = weights_info + + ( + result_x_extent, + result_y_extent, + square_data_indices_y, + square_data_indices_x, + src_area_datas_required, + ) = index_info + + # Ensure we have x_dim and y_dim. + x_dim_orig = x_dim + y_dim_orig = y_dim + if y_dim is None: + src_data = np.expand_dims(src_data, axis=src_data.ndim) + y_dim = src_data.ndim - 1 + if x_dim is None: + src_data = np.expand_dims(src_data, axis=src_data.ndim) + x_dim = src_data.ndim - 1 + # Move y_dim and x_dim to last dimensions + if not x_dim == src_data.ndim - 1: + src_data = np.moveaxis(src_data, x_dim, -1) + if not y_dim == src_data.ndim - 2: + if x_dim < y_dim: + # note: y_dim was shifted along by one position when + # x_dim was moved to the last dimension + src_data = np.moveaxis(src_data, y_dim - 1, -2) + elif x_dim > y_dim: + src_data = np.moveaxis(src_data, y_dim, -2) + x_dim = src_data.ndim - 1 + y_dim = src_data.ndim - 2 + + # Create empty "pre-averaging" data array that will enable the + # src_data data coresponding to a given target grid point, + # to be stacked per point. + # Note that dtype is not preserved and that the array mask + # allows for regions that do not overlap. + new_shape = list(src_data.shape) + new_shape[x_dim] = result_x_extent + new_shape[y_dim] = result_y_extent + + # Use input cube dtype or convert values to the smallest possible float + # dtype when necessary. + dtype = np.promote_types(src_data.dtype, np.float16) + + # Axes of data over which the weighted mean is calculated. + axis = (y_dim, x_dim) + + # Use previously established indices + + src_area_datas_square = src_data[ + ..., square_data_indices_y, square_data_indices_x + ] + + _, src_area_datas_required = np.broadcast_arrays( + src_area_datas_square, src_area_datas_required + ) + + src_area_datas = np.where( + src_area_datas_required, src_area_datas_square, 0 + ) + + # Flag to indicate whether the original data was a masked array. + src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False + if src_masked: + src_area_masks_square = src_data.mask[ + ..., square_data_indices_y, square_data_indices_x + ] + src_area_masks = np.where( + src_area_datas_required, src_area_masks_square, True + ) + + else: + # If the weights were originally blank, set the weights to all 1 to + # avoid divide by 0 error and set the new data mask for making the + # values 0 + src_area_weights = np.where(blank_weights, 1, src_area_weights) + + new_data_mask = np.broadcast_to(new_data_mask_basis, new_shape) + + # Broadcast the weights array to allow numpy's ma.average + # to be called. + # Assign new shape to raise error on copy. + src_area_weights.shape = src_area_datas.shape[-3:] + # Broadcast weights to match shape of data. + _, src_area_weights = np.broadcast_arrays(src_area_datas, src_area_weights) + + # Mask the data points + if src_masked: + src_area_datas = np.ma.array(src_area_datas, mask=src_area_masks) + + # Calculate weighted mean taking into account missing data. + new_data = _weighted_mean_with_mdtol( + src_area_datas, weights=src_area_weights, axis=axis, mdtol=mdtol + ) + new_data = new_data.reshape(new_shape) + if src_masked: + new_data_mask = new_data.mask + + # Mask the data if originally masked or if the result has masked points + if ma.isMaskedArray(src_data): + new_data = ma.array( + new_data, + mask=new_data_mask, + fill_value=src_data.fill_value, + dtype=dtype, + ) + elif new_data_mask.any(): + new_data = ma.array(new_data, mask=new_data_mask, dtype=dtype) + else: + new_data = new_data.astype(dtype) + + # Restore data to original form + if x_dim_orig is None and y_dim_orig is None: + new_data = np.squeeze(new_data, axis=x_dim) + new_data = np.squeeze(new_data, axis=y_dim) + elif y_dim_orig is None: + new_data = np.squeeze(new_data, axis=y_dim) + new_data = np.moveaxis(new_data, -1, x_dim_orig) + elif x_dim_orig is None: + new_data = np.squeeze(new_data, axis=x_dim) + new_data = np.moveaxis(new_data, -1, y_dim_orig) + elif x_dim_orig < y_dim_orig: + # move the x_dim back first, so that the y_dim will + # then be moved to its original position + new_data = np.moveaxis(new_data, -1, x_dim_orig) + new_data = np.moveaxis(new_data, -1, y_dim_orig) + else: + # move the y_dim back first, so that the x_dim will + # then be moved to its original position + new_data = np.moveaxis(new_data, -2, y_dim_orig) + new_data = np.moveaxis(new_data, -1, x_dim_orig) + + return new_data + + +def _regrid_area_weighted_rectilinear_src_and_grid__prepare( + src_cube, grid_cube +): + """ + First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'. + + Check inputs and calculate related info. The 'regrid info' returned + can be re-used over many 2d slices. + + """ + # Get the 1d monotonic (or scalar) src and grid coordinates. + src_x, src_y = _get_xy_coords(src_cube) + grid_x, grid_y = _get_xy_coords(grid_cube) + + # Condition 1: All x and y coordinates must have contiguous bounds to + # define areas. + if ( + not src_x.is_contiguous() + or not src_y.is_contiguous() + or not grid_x.is_contiguous() + or not grid_y.is_contiguous() + ): + raise ValueError( + "The horizontal grid coordinates of both the source " + "and grid cubes must have contiguous bounds." + ) + + # Condition 2: Everything must have the same coordinate system. + src_cs = src_x.coord_system + grid_cs = grid_x.coord_system + if src_cs != grid_cs: + raise ValueError( + "The horizontal grid coordinates of both the source " + "and grid cubes must have the same coordinate " + "system." + ) + + # Condition 3: cannot create vector coords from scalars. + src_x_dims = src_cube.coord_dims(src_x) + src_x_dim = None + if src_x_dims: + src_x_dim = src_x_dims[0] + src_y_dims = src_cube.coord_dims(src_y) + src_y_dim = None + if src_y_dims: + src_y_dim = src_y_dims[0] + if ( + src_x_dim is None + and grid_x.shape[0] != 1 + or src_y_dim is None + and grid_y.shape[0] != 1 + ): + raise ValueError( + "The horizontal grid coordinates of source cube " + "includes scalar coordinates, but the new grid does " + "not. The new grid must not require additional data " + "dimensions to be created." + ) + + # Determine whether to calculate flat or spherical areas. + # Don't only rely on coord system as it may be None. + spherical = ( + isinstance( + src_cs, + (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS), + ) + or src_x.units == "degrees" + or src_x.units == "radians" + ) + + # Get src and grid bounds in the same units. + x_units = cf_units.Unit("radians") if spherical else src_x.units + y_units = cf_units.Unit("radians") if spherical else src_y.units + + # Operate in highest precision. + src_dtype = np.promote_types(src_x.bounds.dtype, src_y.bounds.dtype) + grid_dtype = np.promote_types(grid_x.bounds.dtype, grid_y.bounds.dtype) + dtype = np.promote_types(src_dtype, grid_dtype) + + src_x_bounds = _get_bounds_in_units(src_x, x_units, dtype) + src_y_bounds = _get_bounds_in_units(src_y, y_units, dtype) + grid_x_bounds = _get_bounds_in_units(grid_x, x_units, dtype) + grid_y_bounds = _get_bounds_in_units(grid_y, y_units, dtype) + + # Create 2d meshgrids as required by _create_cube func. + meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points) + + # Determine whether target grid bounds are decreasing. This must + # be determined prior to wrap_lons being called. + grid_x_decreasing = grid_x_bounds[-1, 0] < grid_x_bounds[0, 0] + grid_y_decreasing = grid_y_bounds[-1, 0] < grid_y_bounds[0, 0] + + # Wrapping of longitudes. + if spherical: + base = np.min(src_x_bounds) + modulus = x_units.modulus + # Only wrap if necessary to avoid introducing floating + # point errors. + if np.min(grid_x_bounds) < base or np.max(grid_x_bounds) > ( + base + modulus + ): + grid_x_bounds = iris.analysis.cartography.wrap_lons( + grid_x_bounds, base, modulus + ) + + # Determine whether the src_x coord has periodic boundary conditions. + circular = getattr(src_x, "circular", False) + + # Use simple cartesian area function or one that takes into + # account the curved surface if coord system is spherical. + if spherical: + area_func = _spherical_area + else: + area_func = _cartesian_area + + def _calculate_regrid_area_weighted_weights( + src_x_bounds, + src_y_bounds, + grid_x_bounds, + grid_y_bounds, + grid_x_decreasing, + grid_y_decreasing, + area_func, + circular=False, + ): + """ + Compute the area weights used for area-weighted regridding. + Args: + * src_x_bounds: + A NumPy array of bounds along the X axis defining the source grid. + * src_y_bounds: + A NumPy array of bounds along the Y axis defining the source grid. + * grid_x_bounds: + A NumPy array of bounds along the X axis defining the new grid. + * grid_y_bounds: + A NumPy array of bounds along the Y axis defining the new grid. + * grid_x_decreasing: + Boolean indicating whether the X coordinate of the new grid is + in descending order. + * grid_y_decreasing: + Boolean indicating whether the Y coordinate of the new grid is + in descending order. + * area_func: + A function that returns an (p, q) array of weights given an (p, 2) + shaped array of Y bounds and an (q, 2) shaped array of X bounds. + Kwargs: + * circular: + A boolean indicating whether the `src_x_bounds` are periodic. + Default is False. + Returns: + The area weights to be used for area-weighted regridding. + """ + # Determine which grid bounds are within src extent. + y_within_bounds = _within_bounds( + src_y_bounds, grid_y_bounds, grid_y_decreasing + ) + x_within_bounds = _within_bounds( + src_x_bounds, grid_x_bounds, grid_x_decreasing + ) + + # Cache which src_bounds are within grid bounds + cached_x_bounds = [] + cached_x_indices = [] + max_x_indices = 0 + for (x_0, x_1) in grid_x_bounds: + if grid_x_decreasing: + x_0, x_1 = x_1, x_0 + x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1) + cached_x_bounds.append(x_bounds) + cached_x_indices.append(x_indices) + # Keep record of the largest slice + if isinstance(x_indices, slice): + x_indices_size = np.sum(x_indices.stop - x_indices.start) + else: # is tuple of indices + x_indices_size = len(x_indices) + if x_indices_size > max_x_indices: + max_x_indices = x_indices_size + + # Cache which y src_bounds areas and weights are within grid bounds + cached_y_indices = [] + cached_weights = [] + max_y_indices = 0 + for j, (y_0, y_1) in enumerate(grid_y_bounds): + # Reverse lower and upper if dest grid is decreasing. + if grid_y_decreasing: + y_0, y_1 = y_1, y_0 + y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1) + cached_y_indices.append(y_indices) + # Keep record of the largest slice + if isinstance(y_indices, slice): + y_indices_size = np.sum(y_indices.stop - y_indices.start) + else: # is tuple of indices + y_indices_size = len(y_indices) + if y_indices_size > max_y_indices: + max_y_indices = y_indices_size + + weights_i = [] + for i, (x_0, x_1) in enumerate(grid_x_bounds): + # Reverse lower and upper if dest grid is decreasing. + if grid_x_decreasing: + x_0, x_1 = x_1, x_0 + x_bounds = cached_x_bounds[i] + x_indices = cached_x_indices[i] + + # Determine whether element i, j overlaps with src and hence + # an area weight should be computed. + # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in the case + # of wrapped longitudes. However if the src grid is not global + # (i.e. circular) this new cell would include a region outside of + # the extent of the src grid and thus the weight is therefore + # invalid. + outside_extent = x_0 > x_1 and not circular + if ( + outside_extent + or not y_within_bounds[j] + or not x_within_bounds[i] + ): + weights = False + else: + # Calculate weights based on areas of cropped bounds. + if isinstance(x_indices, tuple) and isinstance( + y_indices, tuple + ): + raise RuntimeError( + "Cannot handle split bounds " "in both x and y." + ) + weights = area_func(y_bounds, x_bounds) + weights_i.append(weights) + cached_weights.append(weights_i) + return ( + tuple(cached_x_indices), + tuple(cached_y_indices), + max_x_indices, + max_y_indices, + tuple(cached_weights), + ) + + ( + cached_x_indices, + cached_y_indices, + max_x_indices, + max_y_indices, + cached_weights, + ) = _calculate_regrid_area_weighted_weights( + src_x_bounds, + src_y_bounds, + grid_x_bounds, + grid_y_bounds, + grid_x_decreasing, + grid_y_decreasing, + area_func, + circular, + ) + + # Go further, calculating the full weights array that we'll need in the + # perform step and the indices we'll need to extract from the cube we're + # regridding (src_data) + + result_y_extent = len(grid_y_bounds) + result_x_extent = len(grid_x_bounds) + + # Total number of points + num_target_pts = result_y_extent * result_x_extent + + # Create empty array to hold weights + src_area_weights = np.zeros( + list((max_y_indices, max_x_indices, num_target_pts)) + ) + + # Built for the case where the source cube isn't masked + blank_weights = np.zeros((num_target_pts,)) + new_data_mask_basis = np.full( + (len(cached_y_indices), len(cached_x_indices)), False, dtype=np.bool_ + ) + + # To permit fancy indexing, we need to store our data in an array whose + # first two dimensions represent the indices needed for the target cell. + # Since target cells can require a different number of indices, the size of + # these dimensions should be the maximum of this number. + # This means we need to track whether the data in + # that array is actually required and build those squared-off arrays + # TODO: Consider if a proper mask would be better + src_area_datas_required = np.full( + (max_y_indices, max_x_indices, num_target_pts), False + ) + square_data_indices_y = np.zeros( + (max_y_indices, max_x_indices, num_target_pts), dtype=int + ) + square_data_indices_x = np.zeros( + (max_y_indices, max_x_indices, num_target_pts), dtype=int + ) + + # Stack the weights for each target point and build the indices we'll need + # to extract the src_area_data + target_pt_ji = -1 + for j, y_indices in enumerate(cached_y_indices): + for i, x_indices in enumerate(cached_x_indices): + target_pt_ji += 1 + # Determine whether to mask element i, j based on whether + # there are valid weights. + weights = cached_weights[j][i] + if weights is False: + # Prepare for the src_data not being masked by storing the + # information that will let us fill the data with zeros and + # weights as one. The weighted average result will be the same, + # but we avoid dividing by zero. + blank_weights[target_pt_ji] = True + new_data_mask_basis[j, i] = True + else: + # Establish which indices are actually in y_indices and x_indices + if isinstance(y_indices, slice): + y_indices = list( + range( + y_indices.start, + y_indices.stop, + y_indices.step or 1, + ) + ) + else: + y_indices = list(y_indices) + + if isinstance(x_indices, slice): + x_indices = list( + range( + x_indices.start, + x_indices.stop, + x_indices.step or 1, + ) + ) + else: + x_indices = list(x_indices) + + # For the weights, we just need the lengths of these as we're + # dropping them into a pre-made array + + len_y = len(y_indices) + len_x = len(x_indices) + + src_area_weights[0:len_y, 0:len_x, target_pt_ji] = weights + + # To build the indices for the source cube, we need equal + # shaped array so we pad with 0s and record the need to mask + # them in src_area_datas_required + padded_y_indices = y_indices + [0] * (max_y_indices - len_y) + padded_x_indices = x_indices + [0] * (max_x_indices - len_x) + + square_data_indices_y[..., target_pt_ji] = np.array( + padded_y_indices + )[:, np.newaxis] + square_data_indices_x[..., target_pt_ji] = padded_x_indices + + src_area_datas_required[0:len_y, 0:len_x, target_pt_ji] = True + + # Package up the return data + + weights_info = ( + blank_weights, + src_area_weights, + new_data_mask_basis, + ) + + index_info = ( + result_x_extent, + result_y_extent, + square_data_indices_y, + square_data_indices_x, + src_area_datas_required, + ) + + # Now return it + + return ( + src_x, + src_y, + src_x_dim, + src_y_dim, + grid_x, + grid_y, + meshgrid_x, + meshgrid_y, + weights_info, + index_info, + ) + + +def _regrid_area_weighted_rectilinear_src_and_grid__perform( + src_cube, regrid_info, mdtol +): + """ + Second (regrid) part of 'regrid_area_weighted_rectilinear_src_and_grid'. + + Perform the prepared regrid calculation on a single 2d cube. + + """ + ( + src_x, + src_y, + src_x_dim, + src_y_dim, + grid_x, + grid_y, + meshgrid_x, + meshgrid_y, + weights_info, + index_info, + ) = regrid_info + + # Calculate new data array for regridded cube. + regrid = functools.partial( + _regrid_area_weighted_array, + x_dim=src_x_dim, + y_dim=src_y_dim, + weights_info=weights_info, + index_info=index_info, + mdtol=mdtol, + ) + + new_data = map_complete_blocks( + src_cube, regrid, (src_y_dim, src_x_dim), meshgrid_x.shape + ) + + # Wrap up the data as a Cube. + regrid_callback = RectilinearRegridder._regrid + new_cube = RectilinearRegridder._create_cube( + new_data, + src_cube, + src_x_dim, + src_y_dim, + src_x, + src_y, + grid_x, + grid_y, + meshgrid_x, + meshgrid_y, + regrid_callback, + ) + + # Slice out any length 1 dimensions. + indices = [slice(None, None)] * new_data.ndim + if src_x_dim is not None and new_cube.shape[src_x_dim] == 1: + indices[src_x_dim] = 0 + if src_y_dim is not None and new_cube.shape[src_y_dim] == 1: + indices[src_y_dim] = 0 + if 0 in indices: + new_cube = new_cube[tuple(indices)] + + return new_cube diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 9a02fbd3b16..7c5d8e99cc7 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -6,26 +6,37 @@ """ Regridding functions. -""" +.. note:: + + .. deprecated:: 3.2.0 + + This package will be removed in a future release. + The PointInCell class has now moved to :class:`iris.analysis.PointInCell`. + All the other content will be withdrawn. + + If you still use any of this, please contact the Iris Developers to + discuss how to replace it or to retain it. +""" import copy import functools import warnings import cartopy.crs as ccrs -import cf_units import numpy as np -import numpy.ma as ma import scipy.interpolate -from iris._lazy_data import map_complete_blocks +from iris._deprecation import warn_deprecated +from iris.analysis._area_weighted import ( + _regrid_area_weighted_rectilinear_src_and_grid__perform, + _regrid_area_weighted_rectilinear_src_and_grid__prepare, +) from iris.analysis._interpolation import ( get_xy_coords, get_xy_dim_coords, snapshot_grid, ) from iris.analysis._regrid import ( - RectilinearRegridder, _regrid_weighted_curvilinear_to_rectilinear__perform, _regrid_weighted_curvilinear_to_rectilinear__prepare, ) @@ -34,564 +45,38 @@ import iris.cube from iris.util import _meshgrid +wmsg = ( + "The 'iris.experimental.regrid' package is deprecated since version 3.2, " + "and will be removed in a future release. The PointInCell class has now " + "moved into iris.analysis. All its other content will be withdrawn. " + "If you still use any of this, please contact the Iris Developers to " + "discuss how to replace it or to retain it (reverse the deprecation)." +) +warn_deprecated(wmsg) -def _get_xy_coords(cube): - """ - Return the x and y coordinates from a cube. - - This function will preferentially return a pair of dimension - coordinates (if there are more than one potential x or y dimension - coordinates a ValueError will be raised). If the cube does not have - a pair of x and y dimension coordinates it will return 1D auxiliary - coordinates (including scalars). If there is not one and only one set - of x and y auxiliary coordinates a ValueError will be raised. - - Having identified the x and y coordinates, the function checks that they - have equal coordinate systems and that they do not occupy the same - dimension on the cube. - - Args: - - * cube: - An instance of :class:`iris.cube.Cube`. - - Returns: - A tuple containing the cube's x and y coordinates. - - """ - # Look for a suitable dimension coords first. - x_coords = cube.coords(axis="x", dim_coords=True) - if not x_coords: - # If there is no x coord in dim_coords look for scalars or - # monotonic coords in aux_coords. - x_coords = [ - coord - for coord in cube.coords(axis="x", dim_coords=False) - if coord.ndim == 1 and coord.is_monotonic() - ] - if len(x_coords) != 1: - raise ValueError( - "Cube {!r} must contain a single 1D x " - "coordinate.".format(cube.name()) - ) - x_coord = x_coords[0] - - # Look for a suitable dimension coords first. - y_coords = cube.coords(axis="y", dim_coords=True) - if not y_coords: - # If there is no y coord in dim_coords look for scalars or - # monotonic coords in aux_coords. - y_coords = [ - coord - for coord in cube.coords(axis="y", dim_coords=False) - if coord.ndim == 1 and coord.is_monotonic() - ] - if len(y_coords) != 1: - raise ValueError( - "Cube {!r} must contain a single 1D y " - "coordinate.".format(cube.name()) - ) - y_coord = y_coords[0] - - if x_coord.coord_system != y_coord.coord_system: - raise ValueError( - "The cube's x ({!r}) and y ({!r}) " - "coordinates must have the same coordinate " - "system.".format(x_coord.name(), y_coord.name()) - ) - - # The x and y coordinates must describe different dimensions - # or be scalar coords. - x_dims = cube.coord_dims(x_coord) - x_dim = None - if x_dims: - x_dim = x_dims[0] - - y_dims = cube.coord_dims(y_coord) - y_dim = None - if y_dims: - y_dim = y_dims[0] - - if x_dim is not None and y_dim == x_dim: - raise ValueError( - "The cube's x and y coords must not describe the " - "same data dimension." - ) - - return x_coord, y_coord - - -def _within_bounds(src_bounds, tgt_bounds, orderswap=False): - """ - Determine which target bounds lie within the extremes of the source bounds. - - Args: - - * src_bounds (ndarray): - An (n, 2) shaped array of monotonic contiguous source bounds. - * tgt_bounds (ndarray): - An (n, 2) shaped array corresponding to the target bounds. - - Kwargs: - - * orderswap (bool): - A Boolean indicating whether the target bounds are in descending order - (True). Defaults to False. - - Returns: - Boolean ndarray, indicating whether each target bound is within the - extremes of the source bounds. - - """ - min_bound = np.min(src_bounds) - 1e-14 - max_bound = np.max(src_bounds) + 1e-14 - - # Swap upper-lower is necessary. - if orderswap is True: - upper, lower = tgt_bounds.T - else: - lower, upper = tgt_bounds.T - - return ((lower <= max_bound) * (lower >= min_bound)) * ( - (upper <= max_bound) * (upper >= min_bound) - ) - - -def _cropped_bounds(bounds, lower, upper): - """ - Return a new bounds array and corresponding slice object (or indices) of - the original data array, resulting from cropping the provided bounds - between the specified lower and upper values. The bounds at the - extremities will be truncated so that they start and end with lower and - upper. - - This function will return an empty NumPy array and slice if there is no - overlap between the region covered by bounds and the region from lower to - upper. - - If lower > upper the resulting bounds may not be contiguous and the - indices object will be a tuple of indices rather than a slice object. - - Args: - - * bounds: - An (n, 2) shaped array of monotonic contiguous bounds. - * lower: - Lower bound at which to crop the bounds array. - * upper: - Upper bound at which to crop the bounds array. - - Returns: - A tuple of the new bounds array and the corresponding slice object or - indices from the zeroth axis of the original array. - - """ - reversed_flag = False - # Ensure order is increasing. - if bounds[0, 0] > bounds[-1, 0]: - # Reverse bounds - bounds = bounds[::-1, ::-1] - reversed_flag = True - - # Number of bounds. - n = bounds.shape[0] - - if lower <= upper: - if lower > bounds[-1, 1] or upper < bounds[0, 0]: - new_bounds = bounds[0:0] - indices = slice(0, 0) - else: - # A single region lower->upper. - if lower < bounds[0, 0]: - # Region extends below bounds so use first lower bound. - lindex = 0 - lower = bounds[0, 0] - else: - # Index of last lower bound less than or equal to lower. - lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] - if upper > bounds[-1, 1]: - # Region extends above bounds so use last upper bound. - uindex = n - 1 - upper = bounds[-1, 1] - else: - # Index of first upper bound greater than or equal to - # upper. - uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] - # Extract the bounds in our region defined by lower->upper. - new_bounds = np.copy(bounds[lindex : (uindex + 1), :]) - # Replace first and last values with specified bounds. - new_bounds[0, 0] = lower - new_bounds[-1, 1] = upper - if reversed_flag: - indices = slice(n - (uindex + 1), n - lindex) - else: - indices = slice(lindex, uindex + 1) - else: - # Two regions [0]->upper, lower->[-1] - # [0]->upper - if upper < bounds[0, 0]: - # Region outside src bounds. - new_bounds_left = bounds[0:0] - indices_left = tuple() - slice_left = slice(0, 0) - else: - if upper > bounds[-1, 1]: - # Whole of bounds. - uindex = n - 1 - upper = bounds[-1, 1] - else: - # Index of first upper bound greater than or equal to upper. - uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] - # Extract the bounds in our region defined by [0]->upper. - new_bounds_left = np.copy(bounds[0 : (uindex + 1), :]) - # Replace last value with specified bound. - new_bounds_left[-1, 1] = upper - if reversed_flag: - indices_left = tuple(range(n - (uindex + 1), n)) - slice_left = slice(n - (uindex + 1), n) - else: - indices_left = tuple(range(0, uindex + 1)) - slice_left = slice(0, uindex + 1) - # lower->[-1] - if lower > bounds[-1, 1]: - # Region is outside src bounds. - new_bounds_right = bounds[0:0] - indices_right = tuple() - slice_right = slice(0, 0) - else: - if lower < bounds[0, 0]: - # Whole of bounds. - lindex = 0 - lower = bounds[0, 0] - else: - # Index of last lower bound less than or equal to lower. - lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] - # Extract the bounds in our region defined by lower->[-1]. - new_bounds_right = np.copy(bounds[lindex:, :]) - # Replace first value with specified bound. - new_bounds_right[0, 0] = lower - if reversed_flag: - indices_right = tuple(range(0, n - lindex)) - slice_right = slice(0, n - lindex) - else: - indices_right = tuple(range(lindex, n)) - slice_right = slice(lindex, None) - - if reversed_flag: - # Flip everything around. - indices_left, indices_right = indices_right, indices_left - slice_left, slice_right = slice_right, slice_left - - # Combine regions. - new_bounds = np.concatenate((new_bounds_left, new_bounds_right)) - # Use slices if possible, but if we have two regions use indices. - if indices_left and indices_right: - indices = indices_left + indices_right - elif indices_left: - indices = slice_left - elif indices_right: - indices = slice_right - else: - indices = slice(0, 0) - - if reversed_flag: - new_bounds = new_bounds[::-1, ::-1] - - return new_bounds, indices - - -def _cartesian_area(y_bounds, x_bounds): - """ - Return an array of the areas of each cell given two arrays - of cartesian bounds. - - Args: - - * y_bounds: - An (n, 2) shaped NumPy array. - * x_bounds: - An (m, 2) shaped NumPy array. - - Returns: - An (n, m) shaped Numpy array of areas. - - """ - heights = y_bounds[:, 1] - y_bounds[:, 0] - widths = x_bounds[:, 1] - x_bounds[:, 0] - return np.abs(np.outer(heights, widths)) - - -def _spherical_area(y_bounds, x_bounds, radius=1.0): - """ - Return an array of the areas of each cell on a sphere - given two arrays of latitude and longitude bounds in radians. - - Args: - - * y_bounds: - An (n, 2) shaped NumPy array of latitide bounds in radians. - * x_bounds: - An (m, 2) shaped NumPy array of longitude bounds in radians. - * radius: - Radius of the sphere. Default is 1.0. - - Returns: - An (n, m) shaped Numpy array of areas. - - """ - return iris.analysis.cartography._quadrant_area(y_bounds, x_bounds, radius) - - -def _get_bounds_in_units(coord, units, dtype): - """Return a copy of coord's bounds in the specified units and dtype.""" - # The bounds are cast to dtype before conversion to prevent issues when - # mixing float32 and float64 types. - return coord.units.convert(coord.bounds.astype(dtype), units).astype(dtype) - - -def _weighted_mean_with_mdtol(data, weights, axis=None, mdtol=0): - """ - Return the weighted mean of an array over the specified axis - using the provided weights (if any) and a permitted fraction of - masked data. - - Args: - - * data (array-like): - Data to be averaged. - - * weights (array-like): - An array of the same shape as the data that specifies the contribution - of each corresponding data element to the calculated mean. - - Kwargs: - - * axis (int or tuple of ints): - Axis along which the mean is computed. The default is to compute - the mean of the flattened array. - - * mdtol (float): - Tolerance of missing data. The value returned in each element of the - returned array will be masked if the fraction of masked data exceeds - mdtol. This fraction is weighted by the `weights` array if one is - provided. mdtol=0 means no missing data is tolerated - while mdtol=1 will mean the resulting element will be masked if and - only if all the contributing elements of data are masked. - Defaults to 0. - - Returns: - Numpy array (possibly masked) or scalar. - """ - if ma.is_masked(data): - res, unmasked_weights_sum = ma.average( - data, weights=weights, axis=axis, returned=True - ) - if mdtol < 1: - weights_sum = weights.sum(axis=axis) - frac_masked = 1 - np.true_divide(unmasked_weights_sum, weights_sum) - mask_pt = frac_masked > mdtol - if np.any(mask_pt) and not isinstance(res, ma.core.MaskedConstant): - if np.isscalar(res): - res = ma.masked - elif ma.isMaskedArray(res): - res.mask |= mask_pt - else: - res = ma.masked_array(res, mask=mask_pt) - else: - res = np.average(data, weights=weights, axis=axis) - return res - - -def _regrid_area_weighted_array( - src_data, x_dim, y_dim, weights_info, index_info, mdtol=0 +def regrid_area_weighted_rectilinear_src_and_grid( + src_cube, grid_cube, mdtol=0 ): """ - Regrid the given data from its source grid to a new grid using - an area weighted mean to determine the resulting data values. + Return a new cube with data values calculated using the area weighted + mean of data values from src_grid regridded onto the horizontal grid of + grid_cube. .. note:: - Elements in the returned array that lie either partially - or entirely outside of the extent of the source grid will - be masked irrespective of the value of mdtol. + .. deprecated:: 3.2.0 - Args: + This function is scheduled to be removed in a future release. + Please use :meth:`iris.cube.Cube.regrid` with the + :class:`iris.analysis.AreaWeighted` scheme instead : this is an exact + replacement. - * src_data: - An N-dimensional NumPy array. - * x_dim: - The X dimension within `src_data`. - * y_dim: - The Y dimension within `src_data`. - * weights_info: - The area weights information to be used for area-weighted - regridding. + For example : - Kwargs: + .. code:: - * mdtol: - Tolerance of missing data. The value returned in each element of the - returned array will be masked if the fraction of missing data exceeds - mdtol. This fraction is calculated based on the area of masked cells - within each target cell. mdtol=0 means no missing data is tolerated - while mdtol=1 will mean the resulting element will be masked if and - only if all the overlapping elements of the source grid are masked. - Defaults to 0. - - Returns: - The regridded data as an N-dimensional NumPy array. The lengths - of the X and Y dimensions will now match those of the target - grid. - - """ - ( - blank_weights, - src_area_weights, - new_data_mask_basis, - ) = weights_info - - ( - result_x_extent, - result_y_extent, - square_data_indices_y, - square_data_indices_x, - src_area_datas_required, - ) = index_info - - # Ensure we have x_dim and y_dim. - x_dim_orig = x_dim - y_dim_orig = y_dim - if y_dim is None: - src_data = np.expand_dims(src_data, axis=src_data.ndim) - y_dim = src_data.ndim - 1 - if x_dim is None: - src_data = np.expand_dims(src_data, axis=src_data.ndim) - x_dim = src_data.ndim - 1 - # Move y_dim and x_dim to last dimensions - if not x_dim == src_data.ndim - 1: - src_data = np.moveaxis(src_data, x_dim, -1) - if not y_dim == src_data.ndim - 2: - if x_dim < y_dim: - # note: y_dim was shifted along by one position when - # x_dim was moved to the last dimension - src_data = np.moveaxis(src_data, y_dim - 1, -2) - elif x_dim > y_dim: - src_data = np.moveaxis(src_data, y_dim, -2) - x_dim = src_data.ndim - 1 - y_dim = src_data.ndim - 2 - - # Create empty "pre-averaging" data array that will enable the - # src_data data coresponding to a given target grid point, - # to be stacked per point. - # Note that dtype is not preserved and that the array mask - # allows for regions that do not overlap. - new_shape = list(src_data.shape) - new_shape[x_dim] = result_x_extent - new_shape[y_dim] = result_y_extent - - # Use input cube dtype or convert values to the smallest possible float - # dtype when necessary. - dtype = np.promote_types(src_data.dtype, np.float16) - - # Axes of data over which the weighted mean is calculated. - axis = (y_dim, x_dim) - - # Use previously established indices - - src_area_datas_square = src_data[ - ..., square_data_indices_y, square_data_indices_x - ] - - _, src_area_datas_required = np.broadcast_arrays( - src_area_datas_square, src_area_datas_required - ) - - src_area_datas = np.where( - src_area_datas_required, src_area_datas_square, 0 - ) - - # Flag to indicate whether the original data was a masked array. - src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False - if src_masked: - src_area_masks_square = src_data.mask[ - ..., square_data_indices_y, square_data_indices_x - ] - src_area_masks = np.where( - src_area_datas_required, src_area_masks_square, True - ) - - else: - # If the weights were originally blank, set the weights to all 1 to - # avoid divide by 0 error and set the new data mask for making the - # values 0 - src_area_weights = np.where(blank_weights, 1, src_area_weights) - - new_data_mask = np.broadcast_to(new_data_mask_basis, new_shape) - - # Broadcast the weights array to allow numpy's ma.average - # to be called. - # Assign new shape to raise error on copy. - src_area_weights.shape = src_area_datas.shape[-3:] - # Broadcast weights to match shape of data. - _, src_area_weights = np.broadcast_arrays(src_area_datas, src_area_weights) - - # Mask the data points - if src_masked: - src_area_datas = np.ma.array(src_area_datas, mask=src_area_masks) - - # Calculate weighted mean taking into account missing data. - new_data = _weighted_mean_with_mdtol( - src_area_datas, weights=src_area_weights, axis=axis, mdtol=mdtol - ) - new_data = new_data.reshape(new_shape) - if src_masked: - new_data_mask = new_data.mask - - # Mask the data if originally masked or if the result has masked points - if ma.isMaskedArray(src_data): - new_data = ma.array( - new_data, - mask=new_data_mask, - fill_value=src_data.fill_value, - dtype=dtype, - ) - elif new_data_mask.any(): - new_data = ma.array(new_data, mask=new_data_mask, dtype=dtype) - else: - new_data = new_data.astype(dtype) - - # Restore data to original form - if x_dim_orig is None and y_dim_orig is None: - new_data = np.squeeze(new_data, axis=x_dim) - new_data = np.squeeze(new_data, axis=y_dim) - elif y_dim_orig is None: - new_data = np.squeeze(new_data, axis=y_dim) - new_data = np.moveaxis(new_data, -1, x_dim_orig) - elif x_dim_orig is None: - new_data = np.squeeze(new_data, axis=x_dim) - new_data = np.moveaxis(new_data, -1, y_dim_orig) - elif x_dim_orig < y_dim_orig: - # move the x_dim back first, so that the y_dim will - # then be moved to its original position - new_data = np.moveaxis(new_data, -1, x_dim_orig) - new_data = np.moveaxis(new_data, -1, y_dim_orig) - else: - # move the y_dim back first, so that the x_dim will - # then be moved to its original position - new_data = np.moveaxis(new_data, -2, y_dim_orig) - new_data = np.moveaxis(new_data, -1, x_dim_orig) - - return new_data - - -def regrid_area_weighted_rectilinear_src_and_grid( - src_cube, grid_cube, mdtol=0 -): - """ - Return a new cube with data values calculated using the area weighted - mean of data values from src_grid regridded onto the horizontal grid of - grid_cube. + result = src_cube.regrid(grid_cube, AreaWeighted()) This function requires that the horizontal grids of both cubes are rectilinear (i.e. expressed in terms of two orthogonal 1D coordinates) @@ -629,6 +114,15 @@ def regrid_area_weighted_rectilinear_src_and_grid( A new :class:`iris.cube.Cube` instance. """ + wmsg = ( + "The function " + "'iris.experimental.regrid." + "regrid_area_weighted_rectilinear_src_and_grid' " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) + regrid_info = _regrid_area_weighted_rectilinear_src_and_grid__prepare( src_cube, grid_cube ) @@ -638,460 +132,26 @@ def regrid_area_weighted_rectilinear_src_and_grid( return result -def _regrid_area_weighted_rectilinear_src_and_grid__prepare( - src_cube, grid_cube -): - """ - First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'. - - Check inputs and calculate related info. The 'regrid info' returned - can be re-used over many 2d slices. - - """ - # Get the 1d monotonic (or scalar) src and grid coordinates. - src_x, src_y = _get_xy_coords(src_cube) - grid_x, grid_y = _get_xy_coords(grid_cube) - - # Condition 1: All x and y coordinates must have contiguous bounds to - # define areas. - if ( - not src_x.is_contiguous() - or not src_y.is_contiguous() - or not grid_x.is_contiguous() - or not grid_y.is_contiguous() - ): - raise ValueError( - "The horizontal grid coordinates of both the source " - "and grid cubes must have contiguous bounds." - ) - - # Condition 2: Everything must have the same coordinate system. - src_cs = src_x.coord_system - grid_cs = grid_x.coord_system - if src_cs != grid_cs: - raise ValueError( - "The horizontal grid coordinates of both the source " - "and grid cubes must have the same coordinate " - "system." - ) - - # Condition 3: cannot create vector coords from scalars. - src_x_dims = src_cube.coord_dims(src_x) - src_x_dim = None - if src_x_dims: - src_x_dim = src_x_dims[0] - src_y_dims = src_cube.coord_dims(src_y) - src_y_dim = None - if src_y_dims: - src_y_dim = src_y_dims[0] - if ( - src_x_dim is None - and grid_x.shape[0] != 1 - or src_y_dim is None - and grid_y.shape[0] != 1 - ): - raise ValueError( - "The horizontal grid coordinates of source cube " - "includes scalar coordinates, but the new grid does " - "not. The new grid must not require additional data " - "dimensions to be created." - ) - - # Determine whether to calculate flat or spherical areas. - # Don't only rely on coord system as it may be None. - spherical = ( - isinstance( - src_cs, - (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS), - ) - or src_x.units == "degrees" - or src_x.units == "radians" - ) - - # Get src and grid bounds in the same units. - x_units = cf_units.Unit("radians") if spherical else src_x.units - y_units = cf_units.Unit("radians") if spherical else src_y.units - - # Operate in highest precision. - src_dtype = np.promote_types(src_x.bounds.dtype, src_y.bounds.dtype) - grid_dtype = np.promote_types(grid_x.bounds.dtype, grid_y.bounds.dtype) - dtype = np.promote_types(src_dtype, grid_dtype) - - src_x_bounds = _get_bounds_in_units(src_x, x_units, dtype) - src_y_bounds = _get_bounds_in_units(src_y, y_units, dtype) - grid_x_bounds = _get_bounds_in_units(grid_x, x_units, dtype) - grid_y_bounds = _get_bounds_in_units(grid_y, y_units, dtype) - - # Create 2d meshgrids as required by _create_cube func. - meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points) - - # Determine whether target grid bounds are decreasing. This must - # be determined prior to wrap_lons being called. - grid_x_decreasing = grid_x_bounds[-1, 0] < grid_x_bounds[0, 0] - grid_y_decreasing = grid_y_bounds[-1, 0] < grid_y_bounds[0, 0] - - # Wrapping of longitudes. - if spherical: - base = np.min(src_x_bounds) - modulus = x_units.modulus - # Only wrap if necessary to avoid introducing floating - # point errors. - if np.min(grid_x_bounds) < base or np.max(grid_x_bounds) > ( - base + modulus - ): - grid_x_bounds = iris.analysis.cartography.wrap_lons( - grid_x_bounds, base, modulus - ) - - # Determine whether the src_x coord has periodic boundary conditions. - circular = getattr(src_x, "circular", False) - - # Use simple cartesian area function or one that takes into - # account the curved surface if coord system is spherical. - if spherical: - area_func = _spherical_area - else: - area_func = _cartesian_area - - def _calculate_regrid_area_weighted_weights( - src_x_bounds, - src_y_bounds, - grid_x_bounds, - grid_y_bounds, - grid_x_decreasing, - grid_y_decreasing, - area_func, - circular=False, - ): - """ - Compute the area weights used for area-weighted regridding. - Args: - * src_x_bounds: - A NumPy array of bounds along the X axis defining the source grid. - * src_y_bounds: - A NumPy array of bounds along the Y axis defining the source grid. - * grid_x_bounds: - A NumPy array of bounds along the X axis defining the new grid. - * grid_y_bounds: - A NumPy array of bounds along the Y axis defining the new grid. - * grid_x_decreasing: - Boolean indicating whether the X coordinate of the new grid is - in descending order. - * grid_y_decreasing: - Boolean indicating whether the Y coordinate of the new grid is - in descending order. - * area_func: - A function that returns an (p, q) array of weights given an (p, 2) - shaped array of Y bounds and an (q, 2) shaped array of X bounds. - Kwargs: - * circular: - A boolean indicating whether the `src_x_bounds` are periodic. - Default is False. - Returns: - The area weights to be used for area-weighted regridding. - """ - # Determine which grid bounds are within src extent. - y_within_bounds = _within_bounds( - src_y_bounds, grid_y_bounds, grid_y_decreasing - ) - x_within_bounds = _within_bounds( - src_x_bounds, grid_x_bounds, grid_x_decreasing - ) - - # Cache which src_bounds are within grid bounds - cached_x_bounds = [] - cached_x_indices = [] - max_x_indices = 0 - for (x_0, x_1) in grid_x_bounds: - if grid_x_decreasing: - x_0, x_1 = x_1, x_0 - x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1) - cached_x_bounds.append(x_bounds) - cached_x_indices.append(x_indices) - # Keep record of the largest slice - if isinstance(x_indices, slice): - x_indices_size = np.sum(x_indices.stop - x_indices.start) - else: # is tuple of indices - x_indices_size = len(x_indices) - if x_indices_size > max_x_indices: - max_x_indices = x_indices_size - - # Cache which y src_bounds areas and weights are within grid bounds - cached_y_indices = [] - cached_weights = [] - max_y_indices = 0 - for j, (y_0, y_1) in enumerate(grid_y_bounds): - # Reverse lower and upper if dest grid is decreasing. - if grid_y_decreasing: - y_0, y_1 = y_1, y_0 - y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1) - cached_y_indices.append(y_indices) - # Keep record of the largest slice - if isinstance(y_indices, slice): - y_indices_size = np.sum(y_indices.stop - y_indices.start) - else: # is tuple of indices - y_indices_size = len(y_indices) - if y_indices_size > max_y_indices: - max_y_indices = y_indices_size - - weights_i = [] - for i, (x_0, x_1) in enumerate(grid_x_bounds): - # Reverse lower and upper if dest grid is decreasing. - if grid_x_decreasing: - x_0, x_1 = x_1, x_0 - x_bounds = cached_x_bounds[i] - x_indices = cached_x_indices[i] - - # Determine whether element i, j overlaps with src and hence - # an area weight should be computed. - # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in the case - # of wrapped longitudes. However if the src grid is not global - # (i.e. circular) this new cell would include a region outside of - # the extent of the src grid and thus the weight is therefore - # invalid. - outside_extent = x_0 > x_1 and not circular - if ( - outside_extent - or not y_within_bounds[j] - or not x_within_bounds[i] - ): - weights = False - else: - # Calculate weights based on areas of cropped bounds. - if isinstance(x_indices, tuple) and isinstance( - y_indices, tuple - ): - raise RuntimeError( - "Cannot handle split bounds " "in both x and y." - ) - weights = area_func(y_bounds, x_bounds) - weights_i.append(weights) - cached_weights.append(weights_i) - return ( - tuple(cached_x_indices), - tuple(cached_y_indices), - max_x_indices, - max_y_indices, - tuple(cached_weights), - ) - - ( - cached_x_indices, - cached_y_indices, - max_x_indices, - max_y_indices, - cached_weights, - ) = _calculate_regrid_area_weighted_weights( - src_x_bounds, - src_y_bounds, - grid_x_bounds, - grid_y_bounds, - grid_x_decreasing, - grid_y_decreasing, - area_func, - circular, - ) - - # Go further, calculating the full weights array that we'll need in the - # perform step and the indices we'll need to extract from the cube we're - # regridding (src_data) - - result_y_extent = len(grid_y_bounds) - result_x_extent = len(grid_x_bounds) - - # Total number of points - num_target_pts = result_y_extent * result_x_extent - - # Create empty array to hold weights - src_area_weights = np.zeros( - list((max_y_indices, max_x_indices, num_target_pts)) - ) - - # Built for the case where the source cube isn't masked - blank_weights = np.zeros((num_target_pts,)) - new_data_mask_basis = np.full( - (len(cached_y_indices), len(cached_x_indices)), False, dtype=np.bool_ - ) - - # To permit fancy indexing, we need to store our data in an array whose - # first two dimensions represent the indices needed for the target cell. - # Since target cells can require a different number of indices, the size of - # these dimensions should be the maximum of this number. - # This means we need to track whether the data in - # that array is actually required and build those squared-off arrays - # TODO: Consider if a proper mask would be better - src_area_datas_required = np.full( - (max_y_indices, max_x_indices, num_target_pts), False - ) - square_data_indices_y = np.zeros( - (max_y_indices, max_x_indices, num_target_pts), dtype=int - ) - square_data_indices_x = np.zeros( - (max_y_indices, max_x_indices, num_target_pts), dtype=int - ) - - # Stack the weights for each target point and build the indices we'll need - # to extract the src_area_data - target_pt_ji = -1 - for j, y_indices in enumerate(cached_y_indices): - for i, x_indices in enumerate(cached_x_indices): - target_pt_ji += 1 - # Determine whether to mask element i, j based on whether - # there are valid weights. - weights = cached_weights[j][i] - if weights is False: - # Prepare for the src_data not being masked by storing the - # information that will let us fill the data with zeros and - # weights as one. The weighted average result will be the same, - # but we avoid dividing by zero. - blank_weights[target_pt_ji] = True - new_data_mask_basis[j, i] = True - else: - # Establish which indices are actually in y_indices and x_indices - if isinstance(y_indices, slice): - y_indices = list( - range( - y_indices.start, - y_indices.stop, - y_indices.step or 1, - ) - ) - else: - y_indices = list(y_indices) - - if isinstance(x_indices, slice): - x_indices = list( - range( - x_indices.start, - x_indices.stop, - x_indices.step or 1, - ) - ) - else: - x_indices = list(x_indices) - - # For the weights, we just need the lengths of these as we're - # dropping them into a pre-made array - - len_y = len(y_indices) - len_x = len(x_indices) - - src_area_weights[0:len_y, 0:len_x, target_pt_ji] = weights - - # To build the indices for the source cube, we need equal - # shaped array so we pad with 0s and record the need to mask - # them in src_area_datas_required - padded_y_indices = y_indices + [0] * (max_y_indices - len_y) - padded_x_indices = x_indices + [0] * (max_x_indices - len_x) - - square_data_indices_y[..., target_pt_ji] = np.array( - padded_y_indices - )[:, np.newaxis] - square_data_indices_x[..., target_pt_ji] = padded_x_indices - - src_area_datas_required[0:len_y, 0:len_x, target_pt_ji] = True - - # Package up the return data - - weights_info = ( - blank_weights, - src_area_weights, - new_data_mask_basis, - ) - - index_info = ( - result_x_extent, - result_y_extent, - square_data_indices_y, - square_data_indices_x, - src_area_datas_required, - ) - - # Now return it - - return ( - src_x, - src_y, - src_x_dim, - src_y_dim, - grid_x, - grid_y, - meshgrid_x, - meshgrid_y, - weights_info, - index_info, - ) - - -def _regrid_area_weighted_rectilinear_src_and_grid__perform( - src_cube, regrid_info, mdtol -): - """ - Second (regrid) part of 'regrid_area_weighted_rectilinear_src_and_grid'. - - Perform the prepared regrid calculation on a single 2d cube. +def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): + r""" + Return a new cube with the data values calculated using the weighted + mean of data values from :data:`src_cube` and the weights from + :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. - """ - ( - src_x, - src_y, - src_x_dim, - src_y_dim, - grid_x, - grid_y, - meshgrid_x, - meshgrid_y, - weights_info, - index_info, - ) = regrid_info - - # Calculate new data array for regridded cube. - regrid = functools.partial( - _regrid_area_weighted_array, - x_dim=src_x_dim, - y_dim=src_y_dim, - weights_info=weights_info, - index_info=index_info, - mdtol=mdtol, - ) + .. note :: - new_data = map_complete_blocks( - src_cube, regrid, (src_y_dim, src_x_dim), meshgrid_x.shape - ) + .. deprecated:: 3.2.0 - # Wrap up the data as a Cube. - regrid_callback = RectilinearRegridder._regrid - new_cube = RectilinearRegridder._create_cube( - new_data, - src_cube, - src_x_dim, - src_y_dim, - src_x, - src_y, - grid_x, - grid_y, - meshgrid_x, - meshgrid_y, - regrid_callback, - ) + This function is scheduled to be removed in a future release. + Please use :meth:`iris.cube.Cube.regrid` with the + :class:`iris.analysis.PointInCell` scheme instead : this is an exact + replacement. - # Slice out any length 1 dimensions. - indices = [slice(None, None)] * new_data.ndim - if src_x_dim is not None and new_cube.shape[src_x_dim] == 1: - indices[src_x_dim] = 0 - if src_y_dim is not None and new_cube.shape[src_y_dim] == 1: - indices[src_y_dim] = 0 - if 0 in indices: - new_cube = new_cube[tuple(indices)] + For example : - return new_cube + .. code:: - -def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): - r""" - Return a new cube with the data values calculated using the weighted - mean of data values from :data:`src_cube` and the weights from - :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. + result = src_cube.regrid(grid_cube, PointInCell()) This function requires that the :data:`src_cube` has a horizontal grid defined by a pair of X- and Y-axis coordinates which are mapped over the @@ -1134,6 +194,14 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): A :class:`iris.cube.Cube` instance. """ + wmsg = ( + "The function " + "'iris.experimental.regrid." + "regrid_weighted_curvilinear_to_rectilinear' " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) regrid_info = _regrid_weighted_curvilinear_to_rectilinear__prepare( src_cube, weights, grid_cube ) @@ -1572,6 +640,16 @@ def __init__(self, projection=None): Linear regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. + .. note:: + + .. deprecated:: 3.2.0 + + This class is scheduled to be removed in a future release, and no + replacement is currently planned. + If you make use of this functionality, please contact the Iris + Developers to discuss how to retain it (which could include + reversing the deprecation). + Optional Args: * projection: `cartopy.crs instance` @@ -1581,6 +659,12 @@ def __init__(self, projection=None): """ self.projection = projection + wmsg = ( + "The class iris.experimental.regrid.ProjectedUnstructuredLinear " + "has been deprecated, and will be removed in a future release. " + "Please consult the docstring for details." + ) + warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): """ @@ -1639,6 +723,17 @@ def __init__(self, projection=None): Nearest regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. + .. note:: + + .. deprecated:: 3.2.0 + + This class is scheduled to be removed in a future release, and no + exact replacement is currently planned. + Please use :class:`iris.analysis.UnstructuredNearest` instead, if + possible. If you have a need for this exact functionality, please + contact the Iris Developers to discuss how to retain it (which + could include reversing the deprecation). + Optional Args: * projection: `cartopy.crs instance` @@ -1648,6 +743,13 @@ def __init__(self, projection=None): """ self.projection = projection + wmsg = ( + "iris.experimental.regrid.ProjectedUnstructuredNearest has been " + "deprecated, and will be removed in a future release. " + "Please use 'iris.analysis.UnstructuredNearest' instead, where " + "possible. Consult the docstring for details." + ) + warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): """ diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py index a44ccb32bda..ecaa028ab3d 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py +++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py @@ -16,11 +16,13 @@ import numpy as np from iris import load_cube -from iris.analysis._area_weighted import AreaWeightedRegridder +from iris.analysis._area_weighted import ( + AreaWeightedRegridder, + _regrid_area_weighted_rectilinear_src_and_grid__prepare, +) from iris.coord_systems import GeogCS from iris.coords import DimCoord from iris.cube import Cube -import iris.experimental.regrid as eregrid class Test(tests.IrisTest): @@ -46,19 +48,17 @@ def extract_grid(self, cube): def check_mdtol(self, mdtol=None): src_grid, target_grid = self.grids() # Get _regrid_info result - _regrid_info = ( - eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare( - src_grid, target_grid - ) + _regrid_info = _regrid_area_weighted_rectilinear_src_and_grid__prepare( + src_grid, target_grid ) self.assertEqual(len(_regrid_info), 10) with mock.patch( - "iris.experimental.regrid." + "iris.analysis._area_weighted." "_regrid_area_weighted_rectilinear_src_and_grid__prepare", return_value=_regrid_info, ) as prepare: with mock.patch( - "iris.experimental.regrid." + "iris.analysis._area_weighted." "_regrid_area_weighted_rectilinear_src_and_grid__perform", return_value=mock.sentinel.result, ) as perform: @@ -253,7 +253,6 @@ class TestLazy(tests.IrisTest): # Setup def setUp(self) -> None: # Prepare a cube and a template - cube_file_path = tests.get_data_path( ["NetCDF", "regrid", "regrid_xyt.nc"] ) From 3cbb687d80025febf184f9d31b75b15ac0701d13 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 31 Jan 2022 11:49:10 +0000 Subject: [PATCH 40/69] docs: move whatsnew (#4540) * docs: move whatsnew * rebase changes * rebase latest changes --- .../documenting/whats_new_contributions.rst | 21 +- docs/src/developers_guide/release.rst | 23 +- docs/src/index.rst | 10 +- docs/src/whatsnew/dev.rst | 382 +++++++++++++++++ .../{latest.rst.template => dev.rst.template} | 0 docs/src/whatsnew/index.rst | 2 +- docs/src/whatsnew/latest.rst | 383 +----------------- 7 files changed, 415 insertions(+), 406 deletions(-) create mode 100644 docs/src/whatsnew/dev.rst rename docs/src/whatsnew/{latest.rst.template => dev.rst.template} (100%) mode change 100644 => 120000 docs/src/whatsnew/latest.rst diff --git a/docs/src/developers_guide/documenting/whats_new_contributions.rst b/docs/src/developers_guide/documenting/whats_new_contributions.rst index ebb553024bc..576fc5f6a68 100644 --- a/docs/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/src/developers_guide/documenting/whats_new_contributions.rst @@ -4,16 +4,21 @@ Contributing a "What's New" Entry ================================= -Iris uses a file named ``latest.rst`` to keep a draft of upcoming changes -that will form the next release. Contributions to the :ref:`iris_whatsnew` +Iris uses a file named ``dev.rst`` to keep a draft of upcoming development changes +that will form the next stable release. Contributions to the :ref:`iris_whatsnew` document are written by the developer most familiar with the change made. The contribution should be included as part of the Iris Pull Request that introduces the change. -The ``latest.rst`` and the past release notes are kept in -``docs/src/whatsnew/``. If you are writing the first contribution after -an Iris release: **create the new** ``latest.rst`` by copying the content from -``latest.rst.template`` in the same directory. +The ``dev.rst`` and the past release notes are kept in the +``docs/src/whatsnew/`` directory. If you are writing the first contribution after +an Iris release: **create the new** ``dev.rst`` by copying the content from +``dev.rst.template`` in the same directory. + +.. note:: + + Ensure that the symbolic link ``latest.rst`` references the ``dev.rst`` file + within the ``docs/src/whatsnew`` directory. Since the `Contribution categories`_ include Internal changes, **all** Iris Pull Requests should be accompanied by a "What's New" contribution. @@ -22,7 +27,7 @@ Pull Requests should be accompanied by a "What's New" contribution. Git Conflicts ============= -If changes to ``latest.rst`` are being suggested in several simultaneous +If changes to ``dev.rst`` are being suggested in several simultaneous Iris Pull Requests, Git will likely encounter merge conflicts. If this situation is thought likely (large PR, high repo activity etc.): @@ -43,7 +48,7 @@ situation is thought likely (large PR, high repo activity etc.): * PR reviewer: review the "What's New" PR, merge once acceptable -These measures should mean the suggested ``latest.rst`` changes are outstanding +These measures should mean the suggested ``dev.rst`` changes are outstanding for the minimum time, minimising conflicts and minimising the need to rebase or merge from trunk. diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 09b884302b8..f4d44781fc6 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -183,9 +183,9 @@ back onto the ``SciTools/iris`` ``main`` branch. To achieve this, first cut a local branch from the latest ``main`` branch, and `git merge` the :literal:`.x` release branch into it. Ensure that the -``iris.__version__``, ``docs/src/whatsnew/index.rst`` and ``docs/src/whatsnew/latest.rst`` -are correct, before committing these changes and then proposing a pull-request -on the ``main`` branch of ``SciTools/iris``. +``iris.__version__``, ``docs/src/whatsnew/index.rst``, ``docs/src/whatsnew/dev.rst``, +and ``docs/src/whatsnew/latest.rst`` are correct, before committing these changes +and then proposing a pull-request on the ``main`` branch of ``SciTools/iris``. Point Releases @@ -218,9 +218,11 @@ Release Steps #. Update the ``iris.__init__.py`` version string e.g., to ``1.9.0`` #. Update the ``whatsnew`` for the release: - * Use ``git`` to rename ``docs/src/whatsnew/latest.rst`` to the release + * Use ``git`` to rename ``docs/src/whatsnew/dev.rst`` to the release version file ``v1.9.rst`` - * Use ``git`` to delete the ``docs/src/whatsnew/latest.rst.template`` file + * Update the symbolic link ``latest.rst`` to reference the latest + whatsnew ``v1.9.rst`` + * Use ``git`` to delete the ``docs/src/whatsnew/dev.rst.template`` file * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. Note that, the Iris version and release date are updated automatically when the documentation is built @@ -229,11 +231,11 @@ Release Steps dropdown at the top of the file, which provides extra detail on notable changes * Use ``git`` to add and commit all changes, including removal of - ``latest.rst.template`` + ``dev.rst.template`` and update to the ``latest.rst`` symbolic link. #. Update the ``whatsnew`` index ``docs/src/whatsnew/index.rst`` - * Remove the reference to ``latest.rst`` + * Remove the reference to ``dev.rst`` * Add a reference to ``v1.9.rst`` to the top of the list #. Check your changes by building the documentation and reviewing @@ -254,13 +256,6 @@ Post Release Steps `Read The Docs`_ to ensure that the appropriate versions are ``Active`` and/or ``Hidden``. To do this ``Edit`` the appropriate version e.g., see `Editing v3.0.0rc0`_ (must be logged into Read the Docs). -#. Copy ``docs/src/whatsnew/latest.rst.template`` to - ``docs/src/whatsnew/latest.rst``. This will reset - the file with the ``unreleased`` heading and placeholders for the - ``whatsnew`` headings -#. Add back in the reference to ``latest.rst`` to the ``whatsnew`` index - ``docs/src/whatsnew/index.rst`` -#. Update ``iris.__init__.py`` version string to show as ``1.10.dev0`` #. Merge back to ``main`` diff --git a/docs/src/index.rst b/docs/src/index.rst index d6fc5f2f7e0..e6a787a2200 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -98,6 +98,15 @@ For **Iris 2.4** and earlier documentation please see the generated/gallery/index +.. toctree:: + :maxdepth: 1 + :caption: What's New in Iris + :hidden: + + whatsnew/latest + Archive + + .. toctree:: :maxdepth: 1 :caption: User Guide @@ -154,6 +163,5 @@ For **Iris 2.4** and earlier documentation please see the :hidden: generated/api/iris - whatsnew/index techpapers/index copyright diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/dev.rst new file mode 100644 index 00000000000..e2d4c2bc0bb --- /dev/null +++ b/docs/src/whatsnew/dev.rst @@ -0,0 +1,382 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` |iris_version| Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this minor release of Iris include: + + * We've added experimental support for + :ref:`Meshes `, which can now be loaded and + attached to a cube. Mesh support is based on the based on `CF-UGRID`_ + model. + * We've also dropped support for ``Python 3.7``. + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@bsherratt`_ and + `@aaronspring`_ who made their first contributions to Iris. The first of + many we hope! +#. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 + + +✨ Features +=========== + +#. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added + support for :ref:`unstructured meshes `. This involved + adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, + :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and + supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. + Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key + objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, + :class:`iris.experimental.ugrid.mesh.MeshCoord` and + :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. + A :class:`~iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID + type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that + reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use + on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the + property :attr:`~iris.cube.Cube.mesh` which returns a + :class:`~iris.experimental.ugrid.mesh.Mesh` if one is attached to the + :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + +#. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, + for files using the `CF-UGRID`_ conventions. + The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` + provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be + returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. + (:pull:`4058`). + +#. `@pp-mo`_ added support to save cubes with :ref:`meshes ` to netcdf + files, using the `CF-UGRID`_ conventions. + The existing :meth:`iris.save` function now does this, when saving cubes with meshes. + A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving + :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data + (i.e. not attached to cubes). + (:pull:`4318` and :pull:`4339`). + +#. `@trexfeathers`_ added :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords` + for inferring a :class:`~iris.experimental.ugrid.mesh.Mesh` from an + appropriate collection of :class:`iris.coords.Coord`\ s. + +#. `@larsbarring`_ updated :func:`~iris.util.equalise_attributes` to return a list of dictionaries + containing the attributes removed from each :class:`~iris.cube.Cube`. (:pull:`4357`) + +#. `@trexfeathers`_ enabled streaming of **all** lazy arrays when saving to + NetCDF files (was previously just :class:`~iris.cube.Cube` + :attr:`~iris.cube.Cube.data`). This is + important given the much greater size of + :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and + :class:`~iris.experimental.ugrid.mesh.Connectivity` + :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the + :ref:`mesh model `. (:pull:`4375`) + +#. `@bsherratt`_ added a ``threshold`` parameter to + :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) + +#. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to + benchmark scripts. Also added a regridding benchmark that uses this data + (:pull:`4402`) + +#. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). + (:issue:`4479`, :pull:`4483`) + +#. `@SimonPeatman`_ added support for filenames in the form of a :class:`~pathlib.PurePath` + in :func:`~iris.load`, :func:`~iris.load_cube`, :func:`~iris.load_cubes`, + :func:`~iris.load_raw` and :func:`~iris.save` (:issue:`3411`, :pull:`3917`). + Support for :class:`~pathlib.PurePath` is yet to be implemented across the rest + of Iris (:issue:`4523`). + +#. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations + from `Metarelate`_. From now we intend to manage phenonemon translation + in Iris itself. (:pull:`4484`) + +#. `@pp-mo`_ improved printout of various cube data component objects : + :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, + :class:`~iris.coords.AncillaryVariable`, + :class:`~iris.experimental.ugrid.mesh.MeshCoord` and + :class:`~iris.experimental.ugrid.mesh.Mesh`. + These now all provide a more controllable ``summary()`` method, and + more convenient and readable ``str()`` and ``repr()`` output in the style of + the :class:`iris.cube.Cube`. + They also no longer realise lazy data. (:pull:`4499`). + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.intersection` for special cases where + one cell's bounds align with the requested maximum and negative minimum, fixing + :issue:`4221`. (:pull:`4278`) + +#. `@bsherratt`_ fixed further edge cases in + :meth:`~iris.cube.Cube.intersection`, including :issue:`3698` (:pull:`4363`) + +#. `@tinyendian`_ fixed the error message produced by :meth:`~iris.cube.CubeList.concatenate_cube` + when a cube list contains cubes with different names, which will no longer report + "Cube names differ: var1 != var1" if var1 appears multiple times in the list + (:issue:`4342`, :pull:`4345`) + +#. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid + parameter inverse_flattening = 0 (:issue:`4146`, :pull:`4348`) + +#. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in + :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate + means for DJF (:pull:`4391`) + +#. `@wjbenfold`_ improved the error message for failure of :meth:`~iris.cube.CubeList.concatenate` + to indicate that the value of a scalar coordinate may be mismatched, rather than the metadata + (:issue:`4096`, :pull:`4387`) + +#. `@bsherratt`_ fixed a regression to the NAME file loader introduced in 3.0.4, + as well as some long-standing bugs with vertical coordinates and number + formats. (:pull:`4411`) + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if + no value match is found. (:pull:`4417`) + +#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning + when applied to a single point (:issue:`4250`, :pull:`4367`) + +#. `@trexfeathers`_ changed :class:`~iris.coords._DimensionalMetadata` and + :class:`~iris.experimental.ugrid.Connectivity` equality methods to preserve + array laziness, allowing efficient comparisons even with larger-than-memory + objects. (:pull:`4439`) + +#. `@rcomer`_ modified :meth:`~iris.cube.Cube.aggregated_by` to calculate new + coordinate bounds using minimum and maximum for unordered coordinates, + fixing :issue:`1528`. (:pull:`4315`) + +#. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube + so that a cube with lazy data awaiting a unit conversion can be pickled. + (:issue:`4354`, :pull:`4377`) + +#. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate + was mistakenly interpreted as a latitude, usually resulting in two 'latitude's + instead of one latitude and one longitude. + (:issue:`4460`, :pull:`4470`) + +#. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` + from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) + +#. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` + to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 + (:issue:`3305`, :pull:`4535`) + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. `@wjbenfold`_ resolved an issue that previously caused regridding with lazy + data to take significantly longer than with real data. Benchmark + :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease + from >10s to 625ms. (:issue:`4280`, :pull:`4400`) + +#. `@bjlittle`_ included an optimisation to :class:`~iris.cube.Cube.coord_dims` + to avoid unnecessary processing whenever a coordinate instance that already + exists within the cube is provided. (:pull:`4549`) + + +🔥 Deprecations +=============== + +#. `@wjbenfold`_ removed :mod:`iris.experimental.equalise_cubes`. In ``v3.0`` + the experimental ``equalise_attributes`` functionality was moved to the + :mod:`iris.util.equalise_attributes` function. Since then, calling the + :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised + an exception. (:issue:`3528`, :pull:`4496`) + +#. `@wjbenfold`_ deprecated :func:`iris.util.approx_equal` in preference for + :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be + removed in a future release of Iris. (:pull:`4514`) + +#. `@wjbenfold`_ deprecated :mod:`iris.experimental.raster` as it is not + believed to still be in use. The deprecation warnings invite users to contact + the Iris Developers if this isn't the case. (:pull:`4525`) + +#. `@wjbenfold`_ deprecated :mod:`iris.fileformats.abf` and + :mod:`iris.fileformats.dot` as they are not believed to still be in use. The + deprecation warnings invite users to contact the Iris Developers if this + isn't the case. (:pull:`4515`) + +#. `@wjbenfold`_ removed the :func:`iris.util.as_compatible_shape` function, + which was deprecated in ``v3.0``. Instead use + :class:`iris.common.resolve.Resolve`. For example, rather than calling + ``as_compatible_shape(src_cube, target_cube)`` replace with + ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) + +#. `@wjbenfold`_ deprecated :func:`iris.analysis.maths.intersection_of_cubes` in + preference for :meth:`iris.cube.CubeList.extract_overlapping`. The + :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in + a future release of Iris. (:pull:`4541`) + +#. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is + now replaced by `iris-emsf-regrid`_. (:pull:`4551`) + +#. `@pp-mo`_ deprecated everything in :mod:`iris.experimental.regrid`. + Most features have a preferred exact alternative, as suggested, *except* + :class:`iris.experimental.regrid.ProjectedUnstructuredLinear` : that has no + identical equivalent, but :class:`iris.analysis.UnstructuredNearest` is + suggested as being quite close (though possibly slower). (:pull:`4548`) + + +🔗 Dependencies +=============== + +#. `@bjlittle`_ introduced the ``cartopy >=0.20`` minimum pin. + (:pull:`4331`) + +#. `@trexfeathers`_ introduced the ``cf-units >=3`` and ``nc-time-axis >=1.3`` + minimum pins. (:pull:`4356`) + +#. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in + accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) + +#. `@bjlittle`_ dropped support for ``Python 3.7``, as per the `NEP-29`_ + backwards compatibility and deprecation policy schedule. (:pull:`4481`) + + +📚 Documentation +================ + +#. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery + example. (:pull:`4120`) + +#. `@trexfeathers`_ included `Iris GitHub Discussions`_ in + :ref:`get involved `. (:pull:`4307`) + +#. `@wjbenfold`_ improved readability in :ref:`userguide interpolation + section `. (:pull:`4314`) + +#. `@wjbenfold`_ added explanation about the absence of | operator for + :class:`iris.Constraint` to :ref:`userguide loading section + ` and to api reference documentation. (:pull:`4321`) + +#. `@trexfeathers`_ added more detail on making `iris-test-data`_ available + during :ref:`developer_running_tests`. (:pull:`4359`) + +#. `@lbdreyer`_ added a section to the release documentation outlining the role + of the :ref:`release_manager`. (:pull:`4413`) + +#. `@trexfeathers`_ encouraged contributors to include type hinting in code + they are working on - :ref:`code_formatting`. (:pull:`4390`) + +#. `@wjbenfold`_ updated Cartopy documentation links to point to the renamed + :class:`cartopy.mpl.geoaxes.GeoAxes`. (:pull:`4464`) + +#. `@wjbenfold`_ clarified behaviour of :func:`iris.load` in :ref:`userguide + loading section `. (:pull:`4462`) + +#. `@bjlittle`_ migrated readthedocs to use mambaforge for `faster documentation building`_. + (:pull:`4476`) + +#. `@wjbenfold`_ contributed `@alastair-gemmell`_'s :ref:`step-by-step guide to + contributing to the docs ` to the docs. + (:pull:`4461`) + +#. `@pp-mo`_ improved and corrected docstrings of + :class:`iris.analysis.PointInCell`, making it clear what is the actual + calculation performed. (:pull:`4548`) + +#. `@pp-mo`_ removed reference in docstring of + :class:`iris.analysis.UnstructuredNearest` to the obsolete (deprecated) + :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. + (:pull:`4548`) + + +💼 Internal +=========== + +#. `@trexfeathers`_ set the linkcheck to ignore + http://www.nationalarchives.gov.uk/doc/open-government-licence since this + always works locally, but never within CI. (:pull:`4307`) + +#. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if + test data is missing (:pull:`4319`) + +#. `@wjbenfold`_ excluded ``Good First Issue`` labelled issues from being + marked stale. (:pull:`4317`) + +#. `@tkknight`_ added additional make targets for reducing the time of the + documentation build including ``html-noapi`` and ``html-quick``. + Useful for development purposes only. For more information see + :ref:`contributing.documentation.building` the documentation. (:pull:`4333`) + +#. `@rcomer`_ modified the ``animation`` test to prevent it throwing a warning + that sometimes interferes with unrelated tests. (:pull:`4330`) + +#. `@rcomer`_ removed a now redundant workaround in :func:`~iris.plot.contourf`. + (:pull:`4349`) + +#. `@trexfeathers`_ refactored :mod:`iris.experimental.ugrid` into sub-modules. + (:pull:`4347`). + +#. `@bjlittle`_ enabled the `sort-all`_ `pre-commit`_ hook to automatically + sort ``__all__`` entries into alphabetical order. (:pull:`4353`) + +#. `@rcomer`_ modified a NetCDF saver test to prevent it triggering a numpy + deprecation warning. (:issue:`4374`, :pull:`4376`) + +#. `@akuhnregnier`_ removed addition of period from + :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests + using ``assertArrayAllClose`` following :issue:`3993`. + (:pull:`4421`) + +#. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) + +#. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) + +#. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) + +#. `@aaronspring `_ exchanged ``dask`` with + ``dask-core`` in testing environments reducing the number of dependencies + installed for testing. (:pull:`4434`) + +#. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, + :pull:`4444`) + +#. `@wjbenfold`_ fixed tests for hybrid formulae that weren't being found by + nose (:issue:`4431`, :pull:`4450`) + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@aaronspring: https://github.com/aaronspring +.. _@akuhnregnier: https://github.com/akuhnregnier +.. _@bsherratt: https://github.com/bsherratt +.. _@larsbarring: https://github.com/larsbarring +.. _@pdearnshaw: https://github.com/pdearnshaw +.. _@SimonPeatman: https://github.com/SimonPeatman +.. _@tinyendian: https://github.com/tinyendian + +.. comment + Whatsnew resources in alphabetical order: + +.. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html +.. _Metarelate: http://www.metarelate.net/ +.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ +.. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid +.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba +.. _sort-all: https://github.com/aio-libs/sort-all diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/dev.rst.template similarity index 100% rename from docs/src/whatsnew/latest.rst.template rename to docs/src/whatsnew/dev.rst.template diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index fabb0564843..51f03e8d8ff 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -10,7 +10,7 @@ Iris versions. .. toctree:: :maxdepth: 1 - latest.rst + dev.rst 3.1.rst 3.0.rst 2.4.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst deleted file mode 100644 index e2d4c2bc0bb..00000000000 --- a/docs/src/whatsnew/latest.rst +++ /dev/null @@ -1,382 +0,0 @@ -.. include:: ../common_links.inc - -|iris_version| |build_date| [unreleased] -**************************************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -.. dropdown:: :opticon:`report` |iris_version| Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light - :animate: fade-in - :open: - - The highlights for this minor release of Iris include: - - * We've added experimental support for - :ref:`Meshes `, which can now be loaded and - attached to a cube. Mesh support is based on the based on `CF-UGRID`_ - model. - * We've also dropped support for ``Python 3.7``. - - And finally, get in touch with us on :issue:`GitHub` if you have - any issues or feature requests for improving Iris. Enjoy! - - -📢 Announcements -================ - -#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@bsherratt`_ and - `@aaronspring`_ who made their first contributions to Iris. The first of - many we hope! -#. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 - - -✨ Features -=========== - -#. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added - support for :ref:`unstructured meshes `. This involved - adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, - :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and - supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. - Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key - objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, - :class:`iris.experimental.ugrid.mesh.MeshCoord` and - :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. - A :class:`~iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID - type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that - reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use - on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the - property :attr:`~iris.cube.Cube.mesh` which returns a - :class:`~iris.experimental.ugrid.mesh.Mesh` if one is attached to the - :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. - -#. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, - for files using the `CF-UGRID`_ conventions. - The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` - provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be - returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. - (:pull:`4058`). - -#. `@pp-mo`_ added support to save cubes with :ref:`meshes ` to netcdf - files, using the `CF-UGRID`_ conventions. - The existing :meth:`iris.save` function now does this, when saving cubes with meshes. - A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving - :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data - (i.e. not attached to cubes). - (:pull:`4318` and :pull:`4339`). - -#. `@trexfeathers`_ added :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords` - for inferring a :class:`~iris.experimental.ugrid.mesh.Mesh` from an - appropriate collection of :class:`iris.coords.Coord`\ s. - -#. `@larsbarring`_ updated :func:`~iris.util.equalise_attributes` to return a list of dictionaries - containing the attributes removed from each :class:`~iris.cube.Cube`. (:pull:`4357`) - -#. `@trexfeathers`_ enabled streaming of **all** lazy arrays when saving to - NetCDF files (was previously just :class:`~iris.cube.Cube` - :attr:`~iris.cube.Cube.data`). This is - important given the much greater size of - :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and - :class:`~iris.experimental.ugrid.mesh.Connectivity` - :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the - :ref:`mesh model `. (:pull:`4375`) - -#. `@bsherratt`_ added a ``threshold`` parameter to - :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) - -#. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to - benchmark scripts. Also added a regridding benchmark that uses this data - (:pull:`4402`) - -#. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). - (:issue:`4479`, :pull:`4483`) - -#. `@SimonPeatman`_ added support for filenames in the form of a :class:`~pathlib.PurePath` - in :func:`~iris.load`, :func:`~iris.load_cube`, :func:`~iris.load_cubes`, - :func:`~iris.load_raw` and :func:`~iris.save` (:issue:`3411`, :pull:`3917`). - Support for :class:`~pathlib.PurePath` is yet to be implemented across the rest - of Iris (:issue:`4523`). - -#. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations - from `Metarelate`_. From now we intend to manage phenonemon translation - in Iris itself. (:pull:`4484`) - -#. `@pp-mo`_ improved printout of various cube data component objects : - :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, - :class:`~iris.coords.AncillaryVariable`, - :class:`~iris.experimental.ugrid.mesh.MeshCoord` and - :class:`~iris.experimental.ugrid.mesh.Mesh`. - These now all provide a more controllable ``summary()`` method, and - more convenient and readable ``str()`` and ``repr()`` output in the style of - the :class:`iris.cube.Cube`. - They also no longer realise lazy data. (:pull:`4499`). - - -🐛 Bugs Fixed -============= - -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.intersection` for special cases where - one cell's bounds align with the requested maximum and negative minimum, fixing - :issue:`4221`. (:pull:`4278`) - -#. `@bsherratt`_ fixed further edge cases in - :meth:`~iris.cube.Cube.intersection`, including :issue:`3698` (:pull:`4363`) - -#. `@tinyendian`_ fixed the error message produced by :meth:`~iris.cube.CubeList.concatenate_cube` - when a cube list contains cubes with different names, which will no longer report - "Cube names differ: var1 != var1" if var1 appears multiple times in the list - (:issue:`4342`, :pull:`4345`) - -#. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid - parameter inverse_flattening = 0 (:issue:`4146`, :pull:`4348`) - -#. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in - :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate - means for DJF (:pull:`4391`) - -#. `@wjbenfold`_ improved the error message for failure of :meth:`~iris.cube.CubeList.concatenate` - to indicate that the value of a scalar coordinate may be mismatched, rather than the metadata - (:issue:`4096`, :pull:`4387`) - -#. `@bsherratt`_ fixed a regression to the NAME file loader introduced in 3.0.4, - as well as some long-standing bugs with vertical coordinates and number - formats. (:pull:`4411`) - -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if - no value match is found. (:pull:`4417`) - -#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning - when applied to a single point (:issue:`4250`, :pull:`4367`) - -#. `@trexfeathers`_ changed :class:`~iris.coords._DimensionalMetadata` and - :class:`~iris.experimental.ugrid.Connectivity` equality methods to preserve - array laziness, allowing efficient comparisons even with larger-than-memory - objects. (:pull:`4439`) - -#. `@rcomer`_ modified :meth:`~iris.cube.Cube.aggregated_by` to calculate new - coordinate bounds using minimum and maximum for unordered coordinates, - fixing :issue:`1528`. (:pull:`4315`) - -#. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube - so that a cube with lazy data awaiting a unit conversion can be pickled. - (:issue:`4354`, :pull:`4377`) - -#. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate - was mistakenly interpreted as a latitude, usually resulting in two 'latitude's - instead of one latitude and one longitude. - (:issue:`4460`, :pull:`4470`) - -#. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` - from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) - -#. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` - to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 - (:issue:`3305`, :pull:`4535`) - - -💣 Incompatible Changes -======================= - -#. N/A - - -🚀 Performance Enhancements -=========================== - -#. `@wjbenfold`_ resolved an issue that previously caused regridding with lazy - data to take significantly longer than with real data. Benchmark - :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease - from >10s to 625ms. (:issue:`4280`, :pull:`4400`) - -#. `@bjlittle`_ included an optimisation to :class:`~iris.cube.Cube.coord_dims` - to avoid unnecessary processing whenever a coordinate instance that already - exists within the cube is provided. (:pull:`4549`) - - -🔥 Deprecations -=============== - -#. `@wjbenfold`_ removed :mod:`iris.experimental.equalise_cubes`. In ``v3.0`` - the experimental ``equalise_attributes`` functionality was moved to the - :mod:`iris.util.equalise_attributes` function. Since then, calling the - :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised - an exception. (:issue:`3528`, :pull:`4496`) - -#. `@wjbenfold`_ deprecated :func:`iris.util.approx_equal` in preference for - :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be - removed in a future release of Iris. (:pull:`4514`) - -#. `@wjbenfold`_ deprecated :mod:`iris.experimental.raster` as it is not - believed to still be in use. The deprecation warnings invite users to contact - the Iris Developers if this isn't the case. (:pull:`4525`) - -#. `@wjbenfold`_ deprecated :mod:`iris.fileformats.abf` and - :mod:`iris.fileformats.dot` as they are not believed to still be in use. The - deprecation warnings invite users to contact the Iris Developers if this - isn't the case. (:pull:`4515`) - -#. `@wjbenfold`_ removed the :func:`iris.util.as_compatible_shape` function, - which was deprecated in ``v3.0``. Instead use - :class:`iris.common.resolve.Resolve`. For example, rather than calling - ``as_compatible_shape(src_cube, target_cube)`` replace with - ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) - -#. `@wjbenfold`_ deprecated :func:`iris.analysis.maths.intersection_of_cubes` in - preference for :meth:`iris.cube.CubeList.extract_overlapping`. The - :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in - a future release of Iris. (:pull:`4541`) - -#. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is - now replaced by `iris-emsf-regrid`_. (:pull:`4551`) - -#. `@pp-mo`_ deprecated everything in :mod:`iris.experimental.regrid`. - Most features have a preferred exact alternative, as suggested, *except* - :class:`iris.experimental.regrid.ProjectedUnstructuredLinear` : that has no - identical equivalent, but :class:`iris.analysis.UnstructuredNearest` is - suggested as being quite close (though possibly slower). (:pull:`4548`) - - -🔗 Dependencies -=============== - -#. `@bjlittle`_ introduced the ``cartopy >=0.20`` minimum pin. - (:pull:`4331`) - -#. `@trexfeathers`_ introduced the ``cf-units >=3`` and ``nc-time-axis >=1.3`` - minimum pins. (:pull:`4356`) - -#. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in - accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) - -#. `@bjlittle`_ dropped support for ``Python 3.7``, as per the `NEP-29`_ - backwards compatibility and deprecation policy schedule. (:pull:`4481`) - - -📚 Documentation -================ - -#. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery - example. (:pull:`4120`) - -#. `@trexfeathers`_ included `Iris GitHub Discussions`_ in - :ref:`get involved `. (:pull:`4307`) - -#. `@wjbenfold`_ improved readability in :ref:`userguide interpolation - section `. (:pull:`4314`) - -#. `@wjbenfold`_ added explanation about the absence of | operator for - :class:`iris.Constraint` to :ref:`userguide loading section - ` and to api reference documentation. (:pull:`4321`) - -#. `@trexfeathers`_ added more detail on making `iris-test-data`_ available - during :ref:`developer_running_tests`. (:pull:`4359`) - -#. `@lbdreyer`_ added a section to the release documentation outlining the role - of the :ref:`release_manager`. (:pull:`4413`) - -#. `@trexfeathers`_ encouraged contributors to include type hinting in code - they are working on - :ref:`code_formatting`. (:pull:`4390`) - -#. `@wjbenfold`_ updated Cartopy documentation links to point to the renamed - :class:`cartopy.mpl.geoaxes.GeoAxes`. (:pull:`4464`) - -#. `@wjbenfold`_ clarified behaviour of :func:`iris.load` in :ref:`userguide - loading section `. (:pull:`4462`) - -#. `@bjlittle`_ migrated readthedocs to use mambaforge for `faster documentation building`_. - (:pull:`4476`) - -#. `@wjbenfold`_ contributed `@alastair-gemmell`_'s :ref:`step-by-step guide to - contributing to the docs ` to the docs. - (:pull:`4461`) - -#. `@pp-mo`_ improved and corrected docstrings of - :class:`iris.analysis.PointInCell`, making it clear what is the actual - calculation performed. (:pull:`4548`) - -#. `@pp-mo`_ removed reference in docstring of - :class:`iris.analysis.UnstructuredNearest` to the obsolete (deprecated) - :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. - (:pull:`4548`) - - -💼 Internal -=========== - -#. `@trexfeathers`_ set the linkcheck to ignore - http://www.nationalarchives.gov.uk/doc/open-government-licence since this - always works locally, but never within CI. (:pull:`4307`) - -#. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if - test data is missing (:pull:`4319`) - -#. `@wjbenfold`_ excluded ``Good First Issue`` labelled issues from being - marked stale. (:pull:`4317`) - -#. `@tkknight`_ added additional make targets for reducing the time of the - documentation build including ``html-noapi`` and ``html-quick``. - Useful for development purposes only. For more information see - :ref:`contributing.documentation.building` the documentation. (:pull:`4333`) - -#. `@rcomer`_ modified the ``animation`` test to prevent it throwing a warning - that sometimes interferes with unrelated tests. (:pull:`4330`) - -#. `@rcomer`_ removed a now redundant workaround in :func:`~iris.plot.contourf`. - (:pull:`4349`) - -#. `@trexfeathers`_ refactored :mod:`iris.experimental.ugrid` into sub-modules. - (:pull:`4347`). - -#. `@bjlittle`_ enabled the `sort-all`_ `pre-commit`_ hook to automatically - sort ``__all__`` entries into alphabetical order. (:pull:`4353`) - -#. `@rcomer`_ modified a NetCDF saver test to prevent it triggering a numpy - deprecation warning. (:issue:`4374`, :pull:`4376`) - -#. `@akuhnregnier`_ removed addition of period from - :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests - using ``assertArrayAllClose`` following :issue:`3993`. - (:pull:`4421`) - -#. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) - -#. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) - -#. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) - -#. `@aaronspring `_ exchanged ``dask`` with - ``dask-core`` in testing environments reducing the number of dependencies - installed for testing. (:pull:`4434`) - -#. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, - :pull:`4444`) - -#. `@wjbenfold`_ fixed tests for hybrid formulae that weren't being found by - nose (:issue:`4431`, :pull:`4450`) - -.. comment - Whatsnew author names (@github name) in alphabetical order. Note that, - core dev names are automatically included by the common_links.inc: - -.. _@aaronspring: https://github.com/aaronspring -.. _@akuhnregnier: https://github.com/akuhnregnier -.. _@bsherratt: https://github.com/bsherratt -.. _@larsbarring: https://github.com/larsbarring -.. _@pdearnshaw: https://github.com/pdearnshaw -.. _@SimonPeatman: https://github.com/SimonPeatman -.. _@tinyendian: https://github.com/tinyendian - -.. comment - Whatsnew resources in alphabetical order: - -.. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _Metarelate: http://www.metarelate.net/ -.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ -.. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid -.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba -.. _sort-all: https://github.com/aio-libs/sort-all diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst new file mode 120000 index 00000000000..56aebe92dd4 --- /dev/null +++ b/docs/src/whatsnew/latest.rst @@ -0,0 +1 @@ +dev.rst \ No newline at end of file From 9d598f199806ad2f16cd826b4108cbba5f687952 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Mon, 31 Jan 2022 17:23:53 +0000 Subject: [PATCH 41/69] 3.2 version and whats new (#4559) --- docs/src/whatsnew/{dev.rst => 3.2.rst} | 9 +- docs/src/whatsnew/dev.rst.template | 112 ------------------------- docs/src/whatsnew/index.rst | 2 +- docs/src/whatsnew/latest.rst | 2 +- lib/iris/__init__.py | 2 +- 5 files changed, 7 insertions(+), 120 deletions(-) rename docs/src/whatsnew/{dev.rst => 3.2.rst} (98%) delete mode 100644 docs/src/whatsnew/dev.rst.template diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/3.2.rst similarity index 98% rename from docs/src/whatsnew/dev.rst rename to docs/src/whatsnew/3.2.rst index e2d4c2bc0bb..c78e1283d6c 100644 --- a/docs/src/whatsnew/dev.rst +++ b/docs/src/whatsnew/3.2.rst @@ -1,13 +1,13 @@ .. include:: ../common_links.inc -|iris_version| |build_date| [unreleased] -**************************************** +v3.2 (31 Jan 2022) [unreleased] +******************************* This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: :opticon:`report` |iris_version| Release Highlights +.. dropdown:: :opticon:`report` v3.2.0 Release Highlights :container: + shadow :title: text-primary text-center font-weight-bold :body: bg-light @@ -18,8 +18,7 @@ This document explains the changes made to Iris for this release * We've added experimental support for :ref:`Meshes `, which can now be loaded and - attached to a cube. Mesh support is based on the based on `CF-UGRID`_ - model. + attached to a cube. Mesh support is based on the `CF-UGRID`_ model. * We've also dropped support for ``Python 3.7``. And finally, get in touch with us on :issue:`GitHub` if you have diff --git a/docs/src/whatsnew/dev.rst.template b/docs/src/whatsnew/dev.rst.template deleted file mode 100644 index 79c578ca655..00000000000 --- a/docs/src/whatsnew/dev.rst.template +++ /dev/null @@ -1,112 +0,0 @@ -.. include:: ../common_links.inc - -|iris_version| |build_date| [unreleased] -**************************************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -.. dropdown:: :opticon:`report` |iris_version| Release Highlights - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light - :animate: fade-in - :open: - - The highlights for this major/minor release of Iris include: - - * N/A - - And finally, get in touch with us on :issue:`GitHub` if you have - any issues or feature requests for improving Iris. Enjoy! - - -NOTE: section below is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst') - -v3.X.X (DD MMM YYYY) -==================== - -.. dropdown:: :opticon:`alert` v3.X.X Patches - :container: + shadow - :title: text-primary text-center font-weight-bold - :body: bg-light - :animate: fade-in - - The patches in this release of Iris include: - - #. N/A - -NOTE: section above is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst') - - - -📢 Announcements -================ - -#. N/A - - -✨ Features -=========== - -#. N/A - - -🐛 Bugs Fixed -============= - -#. N/A - - -💣 Incompatible Changes -======================= - -#. N/A - - -🚀 Performance Enhancements -=========================== - -#. N/A - - -🔥 Deprecations -=============== - -#. N/A - - -🔗 Dependencies -=============== - -#. N/A - - -📚 Documentation -================ - -#. N/A - - -💼 Internal -=========== - -#. N/A - - -.. comment - Whatsnew author names (@github name) in alphabetical order. Note that, - core dev names are automatically included by the common_links.inc: - - - - -.. comment - Whatsnew resources in alphabetical order: - - diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index 51f03e8d8ff..f425e649b94 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -10,7 +10,7 @@ Iris versions. .. toctree:: :maxdepth: 1 - dev.rst + 3.2.rst 3.1.rst 3.0.rst 2.4.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 56aebe92dd4..2bdbea5d853 120000 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -1 +1 @@ -dev.rst \ No newline at end of file +3.2.rst \ No newline at end of file diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 26f03c05663..aca4e77e888 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -104,7 +104,7 @@ def callback(cube, field, filename): # Iris revision. -__version__ = "3.2.dev0" +__version__ = "3.2.0rc0" # Restrict the names imported when using "from iris import *" __all__ = [ From 678f4b523ea41c1f77581b7910c6b3d11d598f72 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Fri, 4 Feb 2022 09:27:16 +0000 Subject: [PATCH 42/69] Reset whats new (#4563) * reset whats new * Specify minor in whats new --- docs/src/whatsnew/dev.rst | 317 ++--------------------------- docs/src/whatsnew/dev.rst.template | 4 +- 2 files changed, 14 insertions(+), 307 deletions(-) diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/dev.rst index e2d4c2bc0bb..27ed876a204 100644 --- a/docs/src/whatsnew/dev.rst +++ b/docs/src/whatsnew/dev.rst @@ -16,11 +16,7 @@ This document explains the changes made to Iris for this release The highlights for this minor release of Iris include: - * We've added experimental support for - :ref:`Meshes `, which can now be loaded and - attached to a cube. Mesh support is based on the based on `CF-UGRID`_ - model. - * We've also dropped support for ``Python 3.7``. + * N/A And finally, get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! @@ -29,154 +25,19 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@bsherratt`_ and - `@aaronspring`_ who made their first contributions to Iris. The first of - many we hope! -#. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 +#. N/A ✨ Features =========== -#. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added - support for :ref:`unstructured meshes `. This involved - adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, - :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and - supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. - Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key - objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, - :class:`iris.experimental.ugrid.mesh.MeshCoord` and - :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. - A :class:`~iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID - type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that - reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use - on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the - property :attr:`~iris.cube.Cube.mesh` which returns a - :class:`~iris.experimental.ugrid.mesh.Mesh` if one is attached to the - :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. - -#. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, - for files using the `CF-UGRID`_ conventions. - The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` - provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be - returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. - (:pull:`4058`). - -#. `@pp-mo`_ added support to save cubes with :ref:`meshes ` to netcdf - files, using the `CF-UGRID`_ conventions. - The existing :meth:`iris.save` function now does this, when saving cubes with meshes. - A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving - :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data - (i.e. not attached to cubes). - (:pull:`4318` and :pull:`4339`). - -#. `@trexfeathers`_ added :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords` - for inferring a :class:`~iris.experimental.ugrid.mesh.Mesh` from an - appropriate collection of :class:`iris.coords.Coord`\ s. - -#. `@larsbarring`_ updated :func:`~iris.util.equalise_attributes` to return a list of dictionaries - containing the attributes removed from each :class:`~iris.cube.Cube`. (:pull:`4357`) - -#. `@trexfeathers`_ enabled streaming of **all** lazy arrays when saving to - NetCDF files (was previously just :class:`~iris.cube.Cube` - :attr:`~iris.cube.Cube.data`). This is - important given the much greater size of - :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and - :class:`~iris.experimental.ugrid.mesh.Connectivity` - :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the - :ref:`mesh model `. (:pull:`4375`) - -#. `@bsherratt`_ added a ``threshold`` parameter to - :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) - -#. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to - benchmark scripts. Also added a regridding benchmark that uses this data - (:pull:`4402`) - -#. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). - (:issue:`4479`, :pull:`4483`) - -#. `@SimonPeatman`_ added support for filenames in the form of a :class:`~pathlib.PurePath` - in :func:`~iris.load`, :func:`~iris.load_cube`, :func:`~iris.load_cubes`, - :func:`~iris.load_raw` and :func:`~iris.save` (:issue:`3411`, :pull:`3917`). - Support for :class:`~pathlib.PurePath` is yet to be implemented across the rest - of Iris (:issue:`4523`). - -#. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations - from `Metarelate`_. From now we intend to manage phenonemon translation - in Iris itself. (:pull:`4484`) - -#. `@pp-mo`_ improved printout of various cube data component objects : - :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, - :class:`~iris.coords.AncillaryVariable`, - :class:`~iris.experimental.ugrid.mesh.MeshCoord` and - :class:`~iris.experimental.ugrid.mesh.Mesh`. - These now all provide a more controllable ``summary()`` method, and - more convenient and readable ``str()`` and ``repr()`` output in the style of - the :class:`iris.cube.Cube`. - They also no longer realise lazy data. (:pull:`4499`). +#. N/A 🐛 Bugs Fixed ============= -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.intersection` for special cases where - one cell's bounds align with the requested maximum and negative minimum, fixing - :issue:`4221`. (:pull:`4278`) - -#. `@bsherratt`_ fixed further edge cases in - :meth:`~iris.cube.Cube.intersection`, including :issue:`3698` (:pull:`4363`) - -#. `@tinyendian`_ fixed the error message produced by :meth:`~iris.cube.CubeList.concatenate_cube` - when a cube list contains cubes with different names, which will no longer report - "Cube names differ: var1 != var1" if var1 appears multiple times in the list - (:issue:`4342`, :pull:`4345`) - -#. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid - parameter inverse_flattening = 0 (:issue:`4146`, :pull:`4348`) - -#. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in - :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate - means for DJF (:pull:`4391`) - -#. `@wjbenfold`_ improved the error message for failure of :meth:`~iris.cube.CubeList.concatenate` - to indicate that the value of a scalar coordinate may be mismatched, rather than the metadata - (:issue:`4096`, :pull:`4387`) - -#. `@bsherratt`_ fixed a regression to the NAME file loader introduced in 3.0.4, - as well as some long-standing bugs with vertical coordinates and number - formats. (:pull:`4411`) - -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if - no value match is found. (:pull:`4417`) - -#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning - when applied to a single point (:issue:`4250`, :pull:`4367`) - -#. `@trexfeathers`_ changed :class:`~iris.coords._DimensionalMetadata` and - :class:`~iris.experimental.ugrid.Connectivity` equality methods to preserve - array laziness, allowing efficient comparisons even with larger-than-memory - objects. (:pull:`4439`) - -#. `@rcomer`_ modified :meth:`~iris.cube.Cube.aggregated_by` to calculate new - coordinate bounds using minimum and maximum for unordered coordinates, - fixing :issue:`1528`. (:pull:`4315`) - -#. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube - so that a cube with lazy data awaiting a unit conversion can be pickled. - (:issue:`4354`, :pull:`4377`) - -#. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate - was mistakenly interpreted as a latitude, usually resulting in two 'latitude's - instead of one latitude and one longitude. - (:issue:`4460`, :pull:`4470`) - -#. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` - from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) - -#. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` - to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 - (:issue:`3305`, :pull:`4535`) +#. N/A 💣 Incompatible Changes @@ -188,195 +49,41 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. `@wjbenfold`_ resolved an issue that previously caused regridding with lazy - data to take significantly longer than with real data. Benchmark - :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease - from >10s to 625ms. (:issue:`4280`, :pull:`4400`) - -#. `@bjlittle`_ included an optimisation to :class:`~iris.cube.Cube.coord_dims` - to avoid unnecessary processing whenever a coordinate instance that already - exists within the cube is provided. (:pull:`4549`) +#. N/A 🔥 Deprecations =============== -#. `@wjbenfold`_ removed :mod:`iris.experimental.equalise_cubes`. In ``v3.0`` - the experimental ``equalise_attributes`` functionality was moved to the - :mod:`iris.util.equalise_attributes` function. Since then, calling the - :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised - an exception. (:issue:`3528`, :pull:`4496`) - -#. `@wjbenfold`_ deprecated :func:`iris.util.approx_equal` in preference for - :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be - removed in a future release of Iris. (:pull:`4514`) - -#. `@wjbenfold`_ deprecated :mod:`iris.experimental.raster` as it is not - believed to still be in use. The deprecation warnings invite users to contact - the Iris Developers if this isn't the case. (:pull:`4525`) - -#. `@wjbenfold`_ deprecated :mod:`iris.fileformats.abf` and - :mod:`iris.fileformats.dot` as they are not believed to still be in use. The - deprecation warnings invite users to contact the Iris Developers if this - isn't the case. (:pull:`4515`) - -#. `@wjbenfold`_ removed the :func:`iris.util.as_compatible_shape` function, - which was deprecated in ``v3.0``. Instead use - :class:`iris.common.resolve.Resolve`. For example, rather than calling - ``as_compatible_shape(src_cube, target_cube)`` replace with - ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) - -#. `@wjbenfold`_ deprecated :func:`iris.analysis.maths.intersection_of_cubes` in - preference for :meth:`iris.cube.CubeList.extract_overlapping`. The - :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in - a future release of Iris. (:pull:`4541`) - -#. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is - now replaced by `iris-emsf-regrid`_. (:pull:`4551`) - -#. `@pp-mo`_ deprecated everything in :mod:`iris.experimental.regrid`. - Most features have a preferred exact alternative, as suggested, *except* - :class:`iris.experimental.regrid.ProjectedUnstructuredLinear` : that has no - identical equivalent, but :class:`iris.analysis.UnstructuredNearest` is - suggested as being quite close (though possibly slower). (:pull:`4548`) +#. N/A 🔗 Dependencies =============== -#. `@bjlittle`_ introduced the ``cartopy >=0.20`` minimum pin. - (:pull:`4331`) - -#. `@trexfeathers`_ introduced the ``cf-units >=3`` and ``nc-time-axis >=1.3`` - minimum pins. (:pull:`4356`) - -#. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in - accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) - -#. `@bjlittle`_ dropped support for ``Python 3.7``, as per the `NEP-29`_ - backwards compatibility and deprecation policy schedule. (:pull:`4481`) +#. N/A 📚 Documentation ================ -#. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery - example. (:pull:`4120`) - -#. `@trexfeathers`_ included `Iris GitHub Discussions`_ in - :ref:`get involved `. (:pull:`4307`) - -#. `@wjbenfold`_ improved readability in :ref:`userguide interpolation - section `. (:pull:`4314`) - -#. `@wjbenfold`_ added explanation about the absence of | operator for - :class:`iris.Constraint` to :ref:`userguide loading section - ` and to api reference documentation. (:pull:`4321`) - -#. `@trexfeathers`_ added more detail on making `iris-test-data`_ available - during :ref:`developer_running_tests`. (:pull:`4359`) - -#. `@lbdreyer`_ added a section to the release documentation outlining the role - of the :ref:`release_manager`. (:pull:`4413`) - -#. `@trexfeathers`_ encouraged contributors to include type hinting in code - they are working on - :ref:`code_formatting`. (:pull:`4390`) - -#. `@wjbenfold`_ updated Cartopy documentation links to point to the renamed - :class:`cartopy.mpl.geoaxes.GeoAxes`. (:pull:`4464`) - -#. `@wjbenfold`_ clarified behaviour of :func:`iris.load` in :ref:`userguide - loading section `. (:pull:`4462`) - -#. `@bjlittle`_ migrated readthedocs to use mambaforge for `faster documentation building`_. - (:pull:`4476`) - -#. `@wjbenfold`_ contributed `@alastair-gemmell`_'s :ref:`step-by-step guide to - contributing to the docs ` to the docs. - (:pull:`4461`) - -#. `@pp-mo`_ improved and corrected docstrings of - :class:`iris.analysis.PointInCell`, making it clear what is the actual - calculation performed. (:pull:`4548`) - -#. `@pp-mo`_ removed reference in docstring of - :class:`iris.analysis.UnstructuredNearest` to the obsolete (deprecated) - :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. - (:pull:`4548`) +#. N/A 💼 Internal =========== -#. `@trexfeathers`_ set the linkcheck to ignore - http://www.nationalarchives.gov.uk/doc/open-government-licence since this - always works locally, but never within CI. (:pull:`4307`) - -#. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if - test data is missing (:pull:`4319`) - -#. `@wjbenfold`_ excluded ``Good First Issue`` labelled issues from being - marked stale. (:pull:`4317`) - -#. `@tkknight`_ added additional make targets for reducing the time of the - documentation build including ``html-noapi`` and ``html-quick``. - Useful for development purposes only. For more information see - :ref:`contributing.documentation.building` the documentation. (:pull:`4333`) - -#. `@rcomer`_ modified the ``animation`` test to prevent it throwing a warning - that sometimes interferes with unrelated tests. (:pull:`4330`) - -#. `@rcomer`_ removed a now redundant workaround in :func:`~iris.plot.contourf`. - (:pull:`4349`) - -#. `@trexfeathers`_ refactored :mod:`iris.experimental.ugrid` into sub-modules. - (:pull:`4347`). - -#. `@bjlittle`_ enabled the `sort-all`_ `pre-commit`_ hook to automatically - sort ``__all__`` entries into alphabetical order. (:pull:`4353`) - -#. `@rcomer`_ modified a NetCDF saver test to prevent it triggering a numpy - deprecation warning. (:issue:`4374`, :pull:`4376`) - -#. `@akuhnregnier`_ removed addition of period from - :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests - using ``assertArrayAllClose`` following :issue:`3993`. - (:pull:`4421`) - -#. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) - -#. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) - -#. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) - -#. `@aaronspring `_ exchanged ``dask`` with - ``dask-core`` in testing environments reducing the number of dependencies - installed for testing. (:pull:`4434`) - -#. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, - :pull:`4444`) +#. N/A -#. `@wjbenfold`_ fixed tests for hybrid formulae that weren't being found by - nose (:issue:`4431`, :pull:`4450`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: -.. _@aaronspring: https://github.com/aaronspring -.. _@akuhnregnier: https://github.com/akuhnregnier -.. _@bsherratt: https://github.com/bsherratt -.. _@larsbarring: https://github.com/larsbarring -.. _@pdearnshaw: https://github.com/pdearnshaw -.. _@SimonPeatman: https://github.com/SimonPeatman -.. _@tinyendian: https://github.com/tinyendian + + .. comment Whatsnew resources in alphabetical order: -.. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _Metarelate: http://www.metarelate.net/ -.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ -.. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid -.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba -.. _sort-all: https://github.com/aio-libs/sort-all + diff --git a/docs/src/whatsnew/dev.rst.template b/docs/src/whatsnew/dev.rst.template index 79c578ca655..1b36d3f0b01 100644 --- a/docs/src/whatsnew/dev.rst.template +++ b/docs/src/whatsnew/dev.rst.template @@ -24,7 +24,7 @@ This document explains the changes made to Iris for this release NOTE: section below is a template for bugfix patches ==================================================== - (Please remove this section when creating an initial 'latest.rst') + (Please remove this section when creating an initial 'dev.rst') v3.X.X (DD MMM YYYY) ==================== @@ -41,7 +41,7 @@ v3.X.X (DD MMM YYYY) NOTE: section above is a template for bugfix patches ==================================================== - (Please remove this section when creating an initial 'latest.rst') + (Please remove this section when creating an initial 'dev.rst') From 8838e23f7461c575adc841fc5c3304c975805d6a Mon Sep 17 00:00:00 2001 From: Bill Little Date: Fri, 4 Feb 2022 10:24:11 +0000 Subject: [PATCH 43/69] update trove classifiers (#4564) --- setup.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 1d3fb8b7c90..c2d31a5ddba 100644 --- a/setup.cfg +++ b/setup.cfg @@ -11,7 +11,6 @@ classifiers = Operating System :: Unix Programming Language :: Python Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: Implementation :: CPython Topic :: Scientific/Engineering From 48d354f29f6f21700dbf3b9fdbbe5716150b1525 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Fri, 4 Feb 2022 23:22:56 +0000 Subject: [PATCH 44/69] Update version to 3.3.dev0 (#4565) --- lib/iris/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 26f03c05663..713c163debe 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -104,7 +104,7 @@ def callback(cube, field, filename): # Iris revision. -__version__ = "3.2.dev0" +__version__ = "3.3.dev0" # Restrict the names imported when using "from iris import *" __all__ = [ From 1d5eb3eaf9d183af4e1849e82798bedf876ab53e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 5 Feb 2022 23:25:38 +0000 Subject: [PATCH 45/69] Updated environment lockfiles (#4567) Co-authored-by: Lockfile bot --- requirements/ci/nox.lock/py38-linux-64.lock | 24 ++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index 368554bb259..caf6a739b30 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -22,7 +22,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_12.tar https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.3-h9c3ff4c_0.tar.bz2#bd783d12b65023e333bb7016de41570b +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.4-h9c3ff4c_0.tar.bz2#3cedab1fd76644efd516e1b271f2da95 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.2-h9c3ff4c_0.tar.bz2#fe9a66a351bfa7a84c3108304c7bcba5 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d @@ -74,7 +74,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.b https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-hc3e0081_3.tar.bz2#a47110f41fcbf88fcdf8549d7f69a6d8 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2#cf7190238072a41e9579e4476a6a60b8 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-ha95c52a_0.tar.bz2#5222b231b1ef49a7f60d40b363469b70 @@ -104,12 +104,12 @@ https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.2-h3452ae3_0.tar.bz2#c363665b4aabe56aae4f8981cff5b153 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b https://conda.anaconda.org/conda-forge/linux-64/nss-3.74-hb5efdd6_0.tar.bz2#136876ca50177058594f6c2944e95c40 -https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-hb7a2778_2_cpython.tar.bz2#148ea076514259c7f562fbfba956a693 +https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-ha38a3c6_3_cpython.tar.bz2#bed445cebcd8f97dce76dc06201928ee https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.10-pyhd8ed1ab_0.tar.bz2#ea77236c8031cfa821720b21b4cb0ceb +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.11-pyhd8ed1ab_0.tar.bz2#e51530e33440ea8044edb0076cb40a0f https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb @@ -143,7 +143,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.ta https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.2-pyhd8ed1ab_0.tar.bz2#f348d1590550371edfac5ed3c1d44f7e https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.7.0-pyhd8ed1ab_0.tar.bz2#947f7f41958eabc0f6e886557512bb76 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.7.0-pyhd8ed1ab_1.tar.bz2#b689b2cbc8481b224777415e1a193170 https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha00ac49_1009.tar.bz2#d1dff57b8731c245d3247b46d002e1c9 @@ -157,7 +157,7 @@ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py38h1fd1430_1. https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h3cfcdeb_1.tar.bz2#37d7568c595f0cfcd0c493f5ca0344ab https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_1.tar.bz2#1ef7b5f4826ca48a15e2cd98a5c3436d https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.1-py38h6ae9a64_0.tar.bz2#9ec24c7acb2252816f1f6b6687317432 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.2-py38h6ae9a64_0.tar.bz2#065a900932f904e0182acfcfadc467e3 https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 @@ -169,7 +169,7 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_1.tar.bz2#977d03222271270ea8fe35388bf13752 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h497a2fe_3.tar.bz2#131de7d638aa59fb8afbce59f1a8aa98 https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-ha98a1a1_5.tar.bz2#9b27fa0b1044a2119fb1b290617fe06f -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.5.0-py38h578d9bd_0.tar.bz2#9807c89f3ce846015dbad3c1d04348a5 +https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.7.1-py38h578d9bd_0.tar.bz2#8bf9c51a7e371df1673de909c1f46e6c https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_2.tar.bz2#63b3b55c98b4239134e0be080f448944 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h497a2fe_0.tar.bz2#8da7787169411910df2a62dc8ef533e0 https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.13.0-py38h578d9bd_0.tar.bz2#561081f4a30990533541979c9ee84732 @@ -177,14 +177,14 @@ https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1003 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.2-py38h6c62de6_0.tar.bz2#73892e60ccea826c7f7a2215e48d22cf https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py38h3e25421_0.tar.bz2#acc14d0d71dbf74f6a15f2456951b6cf https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.1.1-pyhd8ed1ab_0.tar.bz2#7968db84df10b74d9792d66d7da216df -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.29.0-py38h497a2fe_0.tar.bz2#3d96473ac57b7260a3fc3bdb13d2db79 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.2.0-hb4a5f5f_0.tar.bz2#d03d53e6bcb97e6a97a1659fb38aa76e +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.29.1-py38h497a2fe_0.tar.bz2#121e02be214af4980911bb2cbd5b2742 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.3.1-hb4a5f5f_0.tar.bz2#abe529a4b140720078f0febe1b6014a4 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h6c62de6_1006.tar.bz2#829b1209dfadd431a11048d6eeaf5bef https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.0-py38h43a58ef_0.tar.bz2#23427f52c81076594a95c006ebf7552e -https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 +https://conda.anaconda.org/conda-forge/noarch/pip-22.0.3-pyhd8ed1ab_0.tar.bz2#45dedae69a0ea21cb8566d04b2ca5536 https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.2-pyhd8ed1ab_0.tar.bz2#caef60540e2239e27bf62569a5015e3b https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38h5383654_1.tar.bz2#5b600e019fa7c33be73bdb626236936b https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba @@ -195,13 +195,13 @@ https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h596eeab_5.tar https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h1fd1430_1.tar.bz2#c494f75082f9c052944fda1b22c83336 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py38h6c62de6_2.tar.bz2#350322b046c129e5802b79358a1343f7 -https://conda.anaconda.org/conda-forge/noarch/identify-2.4.6-pyhd8ed1ab_0.tar.bz2#d4030c75256440b8375b2f32c4ed35cd +https://conda.anaconda.org/conda-forge/noarch/identify-2.4.8-pyhd8ed1ab_0.tar.bz2#d4d25c0b7c1a7a1b0442e061fdd49260 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py38hf4fb855_0.tar.bz2#47cf0cab2ae368e1062e75cfbc4277af https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.4-mpi_mpich_h1364a43_0.tar.bz2#b6ba4f487ef9fd5d353ff277df06d133 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.8-nompi_py38h2823cc8_101.tar.bz2#1dfe1cdee4532c72f893955259eb3de9 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.3-h9967ed3_0.tar.bz2#37f1c68380bc5dfe0f5bb2655e207a73 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-21.0.0-pyhd8ed1ab_0.tar.bz2#8c49efecb7dca466e18b06015e8c88ce +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_0.tar.bz2#1d7e241dfaf5475e893d4b824bb71b44 https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.2-py38ha217159_3.tar.bz2#d7461e191f7a0522e4709612786bdf4e From 8404ff6402488d720daff0062ab81060382bf087 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Thu, 10 Feb 2022 11:44:56 +0000 Subject: [PATCH 46/69] New tool-agnostic ASV environment management (#4571) * New tool-agnostic ASV env management. * Benchmarking only build the latest Python. * Increase benchmark accuracy by increasing rounds. * Fix ASV rounds mistake. * ASV clearer use of _interpolate_commands. --- benchmarks/asv.conf.json | 23 ++- benchmarks/asv_delegated_conda.py | 208 +++++++++++++++++++++++++ benchmarks/nox_asv_plugin.py | 249 ------------------------------ noxfile.py | 9 +- 4 files changed, 231 insertions(+), 258 deletions(-) create mode 100644 benchmarks/asv_delegated_conda.py delete mode 100644 benchmarks/nox_asv_plugin.py diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 9ea1cdb101d..3468b2fca99 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -3,18 +3,25 @@ "project": "scitools-iris", "project_url": "https://github.com/SciTools/iris", "repo": "..", - "environment_type": "nox-conda", + "environment_type": "conda-delegated", "show_commit_url": "http://github.com/scitools/iris/commit/", "benchmark_dir": "./benchmarks", "env_dir": ".asv/env", "results_dir": ".asv/results", "html_dir": ".asv/html", - "plugins": [".nox_asv_plugin"], - // The commit to checkout to first run Nox to set up the environment. - "nox_setup_commit": "HEAD", - // The path of the noxfile's location relative to the project root. - "noxfile_rel_path": "noxfile.py", - // The ``--session`` arg to be used with ``--install-only`` to prep an environment. - "nox_session_name": "tests" + "plugins": [".asv_delegated_conda"], + + // The command(s) that create/update an environment correctly for the + // checked-out commit. + // Interpreted the same as build_command, with following exceptions: + // * No build-time environment variables. + // * Is run in the same environment as the ASV install itself. + "delegated_env_commands": [ + "sed -i 's/_PY_VERSIONS_ALL/_PY_VERSION_LATEST/g' noxfile.py", + "nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" + ], + // The parent directory of the above environment. + // The most recently modified environment in the directory will be used. + "delegated_env_parent": "{conf_dir}/.asv/env/nox01" } diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py new file mode 100644 index 00000000000..250a4e032d8 --- /dev/null +++ b/benchmarks/asv_delegated_conda.py @@ -0,0 +1,208 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` +subclass that manages the Conda environment via custom user scripts. + +""" + +from os import environ +from os.path import getmtime +from pathlib import Path +from shutil import copy2, copytree, rmtree +from tempfile import TemporaryDirectory + +from asv import util as asv_util +from asv.config import Config +from asv.console import log +from asv.plugins.conda import Conda +from asv.repo import Repo + + +class CondaDelegated(Conda): + """ + Manage a Conda environment using custom user scripts, run at each commit. + + Ignores user input variations - ``matrix`` / ``pythons`` / + ``conda_environment_file``, since environment is being managed outside ASV. + + Original environment creation behaviour is inherited, but upon checking out + a commit the custom script(s) are run and the original environment is + replaced with a symlink to the custom environment. This arrangement is then + re-used in subsequent runs. + + """ + + tool_name = "conda-delegated" + + def __init__( + self, + conf: Config, + python: str, + requirements: dict, + tagged_env_vars: dict, + ) -> None: + """ + Parameters + ---------- + conf : Config instance + + python : str + Version of Python. Must be of the form "MAJOR.MINOR". + + requirements : dict + Dictionary mapping a PyPI package name to a version + identifier string. + + tagged_env_vars : dict + Environment variables, tagged for build vs. non-build + + """ + ignored = ["`python`"] + if requirements: + ignored.append("`requirements`") + if tagged_env_vars: + ignored.append("`tagged_env_vars`") + if conf.conda_environment_file: + ignored.append("`conda_environment_file`") + message = ( + f"Ignoring ASV setting(s): {', '.join(ignored)}. Benchmark " + "environment management is delegated to third party script(s)." + ) + log.warning(message) + requirements = {} + tagged_env_vars = {} + conf.conda_environment_file = None + + super().__init__(conf, python, requirements, tagged_env_vars) + self._update_info() + + self._env_commands = self._interpolate_commands( + conf.delegated_env_commands + ) + # Again using _interpolate_commands to get env parent path - allows use + # of the same ASV env variables. + env_parent_interpolated = self._interpolate_commands( + conf.delegated_env_parent + ) + # Returns list of tuples, we just want the first. + env_parent_first = env_parent_interpolated[0] + # The 'command' is the first item in the returned tuple. + env_parent_string = " ".join(env_parent_first[0]) + self._delegated_env_parent = Path(env_parent_string).resolve() + + @property + def name(self): + """Get a name to uniquely identify this environment.""" + return asv_util.sanitize_filename(self.tool_name) + + def _update_info(self) -> None: + """Make sure class properties reflect the actual environment being used.""" + # Follow symlink if it has been created. + actual_path = Path(self._path).resolve() + self._path = str(actual_path) + + # Get custom environment's Python version if it exists yet. + try: + get_version = ( + "from sys import version_info; " + "print(f'{version_info.major}.{version_info.minor}')" + ) + actual_python = self.run(["-c", get_version]) + self._python = actual_python + except OSError: + pass + + def _prep_env(self) -> None: + """Run the custom environment script(s) and switch to using that environment.""" + message = f"Running delegated environment management for: {self.name}" + log.info(message) + env_path = Path(self._path) + + def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: + """For copying between self._path and a temporary cache.""" + asv_files = list(src_parent.glob("asv*")) + # build_root_path.name usually == "project" . + asv_files += [src_parent / Path(self._build_root).name] + for src_path in asv_files: + dst_path = dst_parent / src_path.name + if not dst_path.exists(): + # Only caching in case the environment has been rebuilt. + # If the dst_path already exists: rebuilding hasn't + # happened. Also a non-issue when copying in the reverse + # direction because the cache dir is temporary. + if src_path.is_dir(): + func = copytree + else: + func = copy2 + func(src_path, dst_path) + + with TemporaryDirectory(prefix="delegated_asv_cache_") as asv_cache: + asv_cache_path = Path(asv_cache) + # Cache all of ASV's files as delegated command may remove and + # re-build the environment. + copy_asv_files(env_path.resolve(), asv_cache_path) + + # Adapt the build_dir to the cache location. + build_root_path = Path(self._build_root) + build_dir_original = build_root_path / self._repo_subdir + build_dir_subpath = build_dir_original.relative_to( + build_root_path.parent + ) + build_dir = asv_cache_path / build_dir_subpath + + # Run the script(s) for delegated environment creation/updating. + # (An adaptation of self._interpolate_and_run_commands). + for command, env, return_codes, cwd in self._env_commands: + local_envs = dict(environ) + local_envs.update(env) + if cwd is None: + cwd = str(build_dir) + _ = asv_util.check_output( + command, + timeout=self._install_timeout, + cwd=cwd, + env=local_envs, + valid_return_codes=return_codes, + ) + + # Replace the env that ASV created with a symlink to the env + # created/updated by the custom script. + delegated_env_path = sorted( + self._delegated_env_parent.glob("*"), + key=getmtime, + reverse=True, + )[0] + if env_path.resolve() != delegated_env_path: + try: + env_path.unlink(missing_ok=True) + except IsADirectoryError: + rmtree(env_path) + env_path.symlink_to( + delegated_env_path, target_is_directory=True + ) + + # Check that environment exists. + try: + env_path.resolve(strict=True) + except FileNotFoundError: + message = f"Path does not resolve to environment: {env_path}" + log.error(message) + raise RuntimeError(message) + + # Restore ASV's files from the cache (if necessary). + copy_asv_files(asv_cache_path, env_path.resolve()) + + # Record new environment information in properties. + self._update_info() + + def checkout_project(self, repo: Repo, commit_hash: str) -> None: + """Check out the working tree of the project at given commit hash.""" + super().checkout_project(repo, commit_hash) + self._prep_env() + log.info( + f"Environment {self.name} updated to spec at {commit_hash[:8]}" + ) diff --git a/benchmarks/nox_asv_plugin.py b/benchmarks/nox_asv_plugin.py deleted file mode 100644 index 6c9ce142721..00000000000 --- a/benchmarks/nox_asv_plugin.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -ASV plug-in providing an alternative ``Environment`` subclass, which uses Nox -for environment management. - -""" -from importlib.util import find_spec -from pathlib import Path -from shutil import copy2, copytree -from tempfile import TemporaryDirectory - -from asv import util as asv_util -from asv.config import Config -from asv.console import log -from asv.environment import get_env_name -from asv.plugins.conda import Conda, _find_conda -from asv.repo import Repo, get_repo - - -class NoxConda(Conda): - """ - Manage a Conda environment using Nox, updating environment at each commit. - - Defers environment management to the project's noxfile, which must be able - to create/update the benchmarking environment using ``nox --install-only``, - with the ``--session`` specified in ``asv.conf.json.nox_session_name``. - - Notes - ----- - If not all benchmarked commits support this use of Nox: the plugin will - need to be modified to prep the environment in other ways. - - """ - - tool_name = "nox-conda" - - @classmethod - def matches(cls, python: str) -> bool: - """Used by ASV to work out if this type of environment can be used.""" - result = find_spec("nox") is not None - if result: - result = super().matches(python) - - if result: - message = ( - f"NOTE: ASV env match check incomplete. Not possible to know " - f"if selected Nox session (asv.conf.json.nox_session_name) is " - f"compatible with ``--python={python}`` until project is " - f"checked out." - ) - log.warning(message) - - return result - - def __init__(self, conf: Config, python: str, requirements: dict) -> None: - """ - Parameters - ---------- - conf: Config instance - - python : str - Version of Python. Must be of the form "MAJOR.MINOR". - - requirements : dict - Dictionary mapping a PyPI package name to a version - identifier string. - - """ - from nox.sessions import _normalize_path - - # Need to checkout the project BEFORE the benchmark run - to access a noxfile. - self.project_temp_checkout = TemporaryDirectory( - prefix="nox_asv_checkout_" - ) - repo = get_repo(conf) - repo.checkout(self.project_temp_checkout.name, conf.nox_setup_commit) - self.noxfile_rel_path = conf.noxfile_rel_path - self.setup_noxfile = ( - Path(self.project_temp_checkout.name) / self.noxfile_rel_path - ) - self.nox_session_name = conf.nox_session_name - - # Some duplication of parent code - need these attributes BEFORE - # running inherited code. - self._python = python - self._requirements = requirements - self._env_dir = conf.env_dir - - # Prepare the actual environment path, to override self._path. - nox_envdir = str(Path(self._env_dir).absolute() / self.hashname) - nox_friendly_name = self._get_nox_session_name(python) - self._nox_path = Path(_normalize_path(nox_envdir, nox_friendly_name)) - - # For storing any extra conda requirements from asv.conf.json. - self._extra_reqs_path = self._nox_path / "asv-extra-reqs.yaml" - - super().__init__(conf, python, requirements) - - @property - def _path(self) -> str: - """ - Using a property to override getting and setting in parent classes - - unable to modify parent classes as this is a plugin. - - """ - return str(self._nox_path) - - @_path.setter - def _path(self, value) -> None: - """Enforce overriding of this variable by disabling modification.""" - pass - - @property - def name(self) -> str: - """Overridden to prevent inclusion of user input requirements.""" - return get_env_name(self.tool_name, self._python, {}) - - def _get_nox_session_name(self, python: str) -> str: - nox_cmd_substring = ( - f"--noxfile={self.setup_noxfile} " - f"--session={self.nox_session_name} " - f"--python={python}" - ) - - list_output = asv_util.check_output( - ["nox", "--list", *nox_cmd_substring.split(" ")], - display_error=False, - dots=False, - ) - list_output = list_output.split("\n") - list_matches = list(filter(lambda s: s.startswith("*"), list_output)) - matches_count = len(list_matches) - - if matches_count == 0: - message = f"No Nox sessions found for: {nox_cmd_substring} ." - log.error(message) - raise RuntimeError(message) - elif matches_count > 1: - message = ( - f"Ambiguous - >1 Nox session found for: {nox_cmd_substring} ." - ) - log.error(message) - raise RuntimeError(message) - else: - line = list_matches[0] - session_name = line.split(" ")[1] - assert isinstance(session_name, str) - return session_name - - def _nox_prep_env(self, setup: bool = False) -> None: - message = f"Running Nox environment update for: {self.name}" - log.info(message) - - build_root_path = Path(self._build_root) - env_path = Path(self._path) - - def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: - """For copying between self._path and a temporary cache.""" - asv_files = list(src_parent.glob("asv*")) - # build_root_path.name usually == "project" . - asv_files += [src_parent / build_root_path.name] - for src_path in asv_files: - dst_path = dst_parent / src_path.name - if not dst_path.exists(): - # Only cache-ing in case Nox has rebuilt the env @ - # self._path. If the dst_path already exists: rebuilding - # hasn't happened. Also a non-issue when copying in the - # reverse direction because the cache dir is temporary. - if src_path.is_dir(): - func = copytree - else: - func = copy2 - func(src_path, dst_path) - - with TemporaryDirectory(prefix="nox_asv_cache_") as asv_cache: - asv_cache_path = Path(asv_cache) - if setup: - noxfile = self.setup_noxfile - else: - # Cache all of ASV's files as Nox may remove and re-build the environment. - copy_asv_files(env_path, asv_cache_path) - # Get location of noxfile in cache. - noxfile_original = ( - build_root_path / self._repo_subdir / self.noxfile_rel_path - ) - noxfile_subpath = noxfile_original.relative_to( - build_root_path.parent - ) - noxfile = asv_cache_path / noxfile_subpath - - nox_cmd = [ - "nox", - f"--noxfile={noxfile}", - # Place the env in the ASV env directory, instead of the default. - f"--envdir={env_path.parent}", - f"--session={self.nox_session_name}", - f"--python={self._python}", - "--install-only", - "--no-error-on-external-run", - "--verbose", - ] - - _ = asv_util.check_output(nox_cmd) - if not env_path.is_dir(): - message = f"Expected Nox environment not found: {env_path}" - log.error(message) - raise RuntimeError(message) - - if not setup: - # Restore ASV's files from the cache (if necessary). - copy_asv_files(asv_cache_path, env_path) - - def _setup(self) -> None: - """Used for initial environment creation - mimics parent method where possible.""" - try: - self.conda = _find_conda() - except IOError as e: - raise asv_util.UserError(str(e)) - if find_spec("nox") is None: - raise asv_util.UserError("Module not found: nox") - - message = f"Creating Nox-Conda environment for {self.name} ." - log.info(message) - - try: - self._nox_prep_env(setup=True) - finally: - # No longer need the setup checkout now that the environment has been built. - self.project_temp_checkout.cleanup() - - conda_args, pip_args = self._get_requirements(self.conda) - if conda_args or pip_args: - message = ( - "Ignoring user input package requirements. Benchmark " - "environment management is exclusively performed by Nox." - ) - log.warning(message) - - def checkout_project(self, repo: Repo, commit_hash: str) -> None: - """Check out the working tree of the project at given commit hash.""" - super().checkout_project(repo, commit_hash) - self._nox_prep_env() - log.info( - f"Environment {self.name} updated to spec at {commit_hash[:8]}" - ) diff --git a/noxfile.py b/noxfile.py index 8b23948677a..6367b74aef0 100755 --- a/noxfile.py +++ b/noxfile.py @@ -328,7 +328,14 @@ def asv_exec(*sub_args: str) -> None: # Else: compare to previous commit. previous_commit = os.environ.get("PR_BASE_SHA", "HEAD^1") try: - asv_exec("continuous", "--factor=1.2", previous_commit, "HEAD") + asv_exec( + "continuous", + "--factor=1.2", + previous_commit, + "HEAD", + "--attribute", + "rounds=4", + ) finally: asv_exec("compare", previous_commit, "HEAD") else: From e37d30460c4f5d22ce9a167e9f145f21f8ae065d Mon Sep 17 00:00:00 2001 From: Bill Little Date: Thu, 10 Feb 2022 12:18:39 +0000 Subject: [PATCH 47/69] adopt dependabot GHA (#4568) --- .github/dependabot.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..e9b45d116af --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# Reference: +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" + labels: + - "New: Pull Request" + - "Bot" From 6182d14e448aba8e2e82956a0d4747b0ca8296d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Feb 2022 12:56:08 +0000 Subject: [PATCH 48/69] Bump peter-evans/create-pull-request from 3.8.2 to 3.12.1 (#4577) Bumps [peter-evans/create-pull-request](https://github.com/peter-evans/create-pull-request) from 3.8.2 to 3.12.1. - [Release notes](https://github.com/peter-evans/create-pull-request/releases) - [Commits](https://github.com/peter-evans/create-pull-request/compare/052fc72b4198ba9fbc81b818c6e1859f747d49a8...f22a7da129c901513876a2380e2dae9f8e145330) --- updated-dependencies: - dependency-name: peter-evans/create-pull-request dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/refresh-lockfiles.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) mode change 100755 => 100644 .github/workflows/refresh-lockfiles.yml diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml old mode 100755 new mode 100644 index 3106d94a67d..082a06fb5fb --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -108,7 +108,7 @@ jobs: rm -r artifacts - name: Create Pull Request - uses: peter-evans/create-pull-request@052fc72b4198ba9fbc81b818c6e1859f747d49a8 + uses: peter-evans/create-pull-request@f22a7da129c901513876a2380e2dae9f8e145330 with: commit-message: Updated environment lockfiles committer: "Lockfile bot " From 163b49abfe34563f80c92834a4916f10bb99548e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Feb 2022 12:57:03 +0000 Subject: [PATCH 49/69] Bump actions/stale from 4.0.0 to 4.1.0 (#4575) Bumps [actions/stale](https://github.com/actions/stale) from 4.0.0 to 4.1.0. - [Release notes](https://github.com/actions/stale/releases) - [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/stale/compare/v4.0.0...v4.1.0) --- updated-dependencies: - dependency-name: actions/stale dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/stale.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index a38a03637e8..f9bb09ce46a 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -10,7 +10,7 @@ jobs: if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest steps: - - uses: actions/stale@v4.0.0 + - uses: actions/stale@v4.1.0 with: repo-token: ${{ secrets.GITHUB_TOKEN }} From 7c1529cb4112a1f5ec1ce12603f67079487aa6dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Feb 2022 13:19:31 +0000 Subject: [PATCH 50/69] Bump actions/script from 4 to 5.1.0 (#4576) --- .github/workflows/refresh-lockfiles.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 082a06fb5fb..a48b2a26299 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -35,7 +35,7 @@ jobs: # the lockfile bot has made the head commit, abort the workflow. # This job can be manually overridden by running directly from the github actions panel # (known as a "workflow_dispatch") and setting the `clobber` input to "yes". - - uses: actions/script@v4 + - uses: actions/script@v5.1.0 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | From e3fbd843d0c64a027d5ff460a5564fdd3a87439d Mon Sep 17 00:00:00 2001 From: Bill Little Date: Thu, 10 Feb 2022 13:47:47 +0000 Subject: [PATCH 51/69] gha: lockfiles labels and auto-pr details (#4578) --- .github/workflows/refresh-lockfiles.yml | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index a48b2a26299..f7fa10069f6 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -108,6 +108,7 @@ jobs: rm -r artifacts - name: Create Pull Request + id: cpr uses: peter-evans/create-pull-request@f22a7da129c901513876a2380e2dae9f8e145330 with: commit-message: Updated environment lockfiles @@ -115,6 +116,17 @@ jobs: author: "Lockfile bot " delete-branch: true branch: auto-update-lockfiles - title: Update CI environment lockfiles + title: [iris.ci] environment lockfiles auto-update body: | Lockfiles updated to the latest resolvable environment. + labels: | + New: Pull Request + Bot + + - name: Check Pull Request + if: steps.cpr.outputs.pull-request-number != '' + run: | + echo "pull-request #${{ steps.cpr.outputs.pull-request-number }}" + echo "pull-request URL ${{ steps.cpr.outputs.pull-request-url }}" + echo "pull-request operation [${{ steps.cpr.outputs.pull-request-operation }}]" + echo "pull-request head SHA ${{ steps.cpr.outputs.pull-request-head-sha }}" From d1d1e005de0854ddecad457e31b285b37cb2245b Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Fri, 11 Feb 2022 09:37:12 +0000 Subject: [PATCH 52/69] Fix refresh lockfile worrkflow pull request title (#4579) --- .github/workflows/refresh-lockfiles.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index f7fa10069f6..b40c3ca446a 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -116,7 +116,7 @@ jobs: author: "Lockfile bot " delete-branch: true branch: auto-update-lockfiles - title: [iris.ci] environment lockfiles auto-update + title: "[iris.ci] environment lockfiles auto-update" body: | Lockfiles updated to the latest resolvable environment. labels: | From 611416730970d7603b6e1e4e48dc20c7fb55e2e6 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Thu, 10 Feb 2022 11:44:56 +0000 Subject: [PATCH 53/69] New tool-agnostic ASV environment management (#4571) * New tool-agnostic ASV env management. * Benchmarking only build the latest Python. * Increase benchmark accuracy by increasing rounds. * Fix ASV rounds mistake. * ASV clearer use of _interpolate_commands. --- benchmarks/asv.conf.json | 23 ++- benchmarks/asv_delegated_conda.py | 208 +++++++++++++++++++++++++ benchmarks/nox_asv_plugin.py | 249 ------------------------------ noxfile.py | 9 +- 4 files changed, 231 insertions(+), 258 deletions(-) create mode 100644 benchmarks/asv_delegated_conda.py delete mode 100644 benchmarks/nox_asv_plugin.py diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 9ea1cdb101d..3468b2fca99 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -3,18 +3,25 @@ "project": "scitools-iris", "project_url": "https://github.com/SciTools/iris", "repo": "..", - "environment_type": "nox-conda", + "environment_type": "conda-delegated", "show_commit_url": "http://github.com/scitools/iris/commit/", "benchmark_dir": "./benchmarks", "env_dir": ".asv/env", "results_dir": ".asv/results", "html_dir": ".asv/html", - "plugins": [".nox_asv_plugin"], - // The commit to checkout to first run Nox to set up the environment. - "nox_setup_commit": "HEAD", - // The path of the noxfile's location relative to the project root. - "noxfile_rel_path": "noxfile.py", - // The ``--session`` arg to be used with ``--install-only`` to prep an environment. - "nox_session_name": "tests" + "plugins": [".asv_delegated_conda"], + + // The command(s) that create/update an environment correctly for the + // checked-out commit. + // Interpreted the same as build_command, with following exceptions: + // * No build-time environment variables. + // * Is run in the same environment as the ASV install itself. + "delegated_env_commands": [ + "sed -i 's/_PY_VERSIONS_ALL/_PY_VERSION_LATEST/g' noxfile.py", + "nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" + ], + // The parent directory of the above environment. + // The most recently modified environment in the directory will be used. + "delegated_env_parent": "{conf_dir}/.asv/env/nox01" } diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py new file mode 100644 index 00000000000..250a4e032d8 --- /dev/null +++ b/benchmarks/asv_delegated_conda.py @@ -0,0 +1,208 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` +subclass that manages the Conda environment via custom user scripts. + +""" + +from os import environ +from os.path import getmtime +from pathlib import Path +from shutil import copy2, copytree, rmtree +from tempfile import TemporaryDirectory + +from asv import util as asv_util +from asv.config import Config +from asv.console import log +from asv.plugins.conda import Conda +from asv.repo import Repo + + +class CondaDelegated(Conda): + """ + Manage a Conda environment using custom user scripts, run at each commit. + + Ignores user input variations - ``matrix`` / ``pythons`` / + ``conda_environment_file``, since environment is being managed outside ASV. + + Original environment creation behaviour is inherited, but upon checking out + a commit the custom script(s) are run and the original environment is + replaced with a symlink to the custom environment. This arrangement is then + re-used in subsequent runs. + + """ + + tool_name = "conda-delegated" + + def __init__( + self, + conf: Config, + python: str, + requirements: dict, + tagged_env_vars: dict, + ) -> None: + """ + Parameters + ---------- + conf : Config instance + + python : str + Version of Python. Must be of the form "MAJOR.MINOR". + + requirements : dict + Dictionary mapping a PyPI package name to a version + identifier string. + + tagged_env_vars : dict + Environment variables, tagged for build vs. non-build + + """ + ignored = ["`python`"] + if requirements: + ignored.append("`requirements`") + if tagged_env_vars: + ignored.append("`tagged_env_vars`") + if conf.conda_environment_file: + ignored.append("`conda_environment_file`") + message = ( + f"Ignoring ASV setting(s): {', '.join(ignored)}. Benchmark " + "environment management is delegated to third party script(s)." + ) + log.warning(message) + requirements = {} + tagged_env_vars = {} + conf.conda_environment_file = None + + super().__init__(conf, python, requirements, tagged_env_vars) + self._update_info() + + self._env_commands = self._interpolate_commands( + conf.delegated_env_commands + ) + # Again using _interpolate_commands to get env parent path - allows use + # of the same ASV env variables. + env_parent_interpolated = self._interpolate_commands( + conf.delegated_env_parent + ) + # Returns list of tuples, we just want the first. + env_parent_first = env_parent_interpolated[0] + # The 'command' is the first item in the returned tuple. + env_parent_string = " ".join(env_parent_first[0]) + self._delegated_env_parent = Path(env_parent_string).resolve() + + @property + def name(self): + """Get a name to uniquely identify this environment.""" + return asv_util.sanitize_filename(self.tool_name) + + def _update_info(self) -> None: + """Make sure class properties reflect the actual environment being used.""" + # Follow symlink if it has been created. + actual_path = Path(self._path).resolve() + self._path = str(actual_path) + + # Get custom environment's Python version if it exists yet. + try: + get_version = ( + "from sys import version_info; " + "print(f'{version_info.major}.{version_info.minor}')" + ) + actual_python = self.run(["-c", get_version]) + self._python = actual_python + except OSError: + pass + + def _prep_env(self) -> None: + """Run the custom environment script(s) and switch to using that environment.""" + message = f"Running delegated environment management for: {self.name}" + log.info(message) + env_path = Path(self._path) + + def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: + """For copying between self._path and a temporary cache.""" + asv_files = list(src_parent.glob("asv*")) + # build_root_path.name usually == "project" . + asv_files += [src_parent / Path(self._build_root).name] + for src_path in asv_files: + dst_path = dst_parent / src_path.name + if not dst_path.exists(): + # Only caching in case the environment has been rebuilt. + # If the dst_path already exists: rebuilding hasn't + # happened. Also a non-issue when copying in the reverse + # direction because the cache dir is temporary. + if src_path.is_dir(): + func = copytree + else: + func = copy2 + func(src_path, dst_path) + + with TemporaryDirectory(prefix="delegated_asv_cache_") as asv_cache: + asv_cache_path = Path(asv_cache) + # Cache all of ASV's files as delegated command may remove and + # re-build the environment. + copy_asv_files(env_path.resolve(), asv_cache_path) + + # Adapt the build_dir to the cache location. + build_root_path = Path(self._build_root) + build_dir_original = build_root_path / self._repo_subdir + build_dir_subpath = build_dir_original.relative_to( + build_root_path.parent + ) + build_dir = asv_cache_path / build_dir_subpath + + # Run the script(s) for delegated environment creation/updating. + # (An adaptation of self._interpolate_and_run_commands). + for command, env, return_codes, cwd in self._env_commands: + local_envs = dict(environ) + local_envs.update(env) + if cwd is None: + cwd = str(build_dir) + _ = asv_util.check_output( + command, + timeout=self._install_timeout, + cwd=cwd, + env=local_envs, + valid_return_codes=return_codes, + ) + + # Replace the env that ASV created with a symlink to the env + # created/updated by the custom script. + delegated_env_path = sorted( + self._delegated_env_parent.glob("*"), + key=getmtime, + reverse=True, + )[0] + if env_path.resolve() != delegated_env_path: + try: + env_path.unlink(missing_ok=True) + except IsADirectoryError: + rmtree(env_path) + env_path.symlink_to( + delegated_env_path, target_is_directory=True + ) + + # Check that environment exists. + try: + env_path.resolve(strict=True) + except FileNotFoundError: + message = f"Path does not resolve to environment: {env_path}" + log.error(message) + raise RuntimeError(message) + + # Restore ASV's files from the cache (if necessary). + copy_asv_files(asv_cache_path, env_path.resolve()) + + # Record new environment information in properties. + self._update_info() + + def checkout_project(self, repo: Repo, commit_hash: str) -> None: + """Check out the working tree of the project at given commit hash.""" + super().checkout_project(repo, commit_hash) + self._prep_env() + log.info( + f"Environment {self.name} updated to spec at {commit_hash[:8]}" + ) diff --git a/benchmarks/nox_asv_plugin.py b/benchmarks/nox_asv_plugin.py deleted file mode 100644 index 6c9ce142721..00000000000 --- a/benchmarks/nox_asv_plugin.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -ASV plug-in providing an alternative ``Environment`` subclass, which uses Nox -for environment management. - -""" -from importlib.util import find_spec -from pathlib import Path -from shutil import copy2, copytree -from tempfile import TemporaryDirectory - -from asv import util as asv_util -from asv.config import Config -from asv.console import log -from asv.environment import get_env_name -from asv.plugins.conda import Conda, _find_conda -from asv.repo import Repo, get_repo - - -class NoxConda(Conda): - """ - Manage a Conda environment using Nox, updating environment at each commit. - - Defers environment management to the project's noxfile, which must be able - to create/update the benchmarking environment using ``nox --install-only``, - with the ``--session`` specified in ``asv.conf.json.nox_session_name``. - - Notes - ----- - If not all benchmarked commits support this use of Nox: the plugin will - need to be modified to prep the environment in other ways. - - """ - - tool_name = "nox-conda" - - @classmethod - def matches(cls, python: str) -> bool: - """Used by ASV to work out if this type of environment can be used.""" - result = find_spec("nox") is not None - if result: - result = super().matches(python) - - if result: - message = ( - f"NOTE: ASV env match check incomplete. Not possible to know " - f"if selected Nox session (asv.conf.json.nox_session_name) is " - f"compatible with ``--python={python}`` until project is " - f"checked out." - ) - log.warning(message) - - return result - - def __init__(self, conf: Config, python: str, requirements: dict) -> None: - """ - Parameters - ---------- - conf: Config instance - - python : str - Version of Python. Must be of the form "MAJOR.MINOR". - - requirements : dict - Dictionary mapping a PyPI package name to a version - identifier string. - - """ - from nox.sessions import _normalize_path - - # Need to checkout the project BEFORE the benchmark run - to access a noxfile. - self.project_temp_checkout = TemporaryDirectory( - prefix="nox_asv_checkout_" - ) - repo = get_repo(conf) - repo.checkout(self.project_temp_checkout.name, conf.nox_setup_commit) - self.noxfile_rel_path = conf.noxfile_rel_path - self.setup_noxfile = ( - Path(self.project_temp_checkout.name) / self.noxfile_rel_path - ) - self.nox_session_name = conf.nox_session_name - - # Some duplication of parent code - need these attributes BEFORE - # running inherited code. - self._python = python - self._requirements = requirements - self._env_dir = conf.env_dir - - # Prepare the actual environment path, to override self._path. - nox_envdir = str(Path(self._env_dir).absolute() / self.hashname) - nox_friendly_name = self._get_nox_session_name(python) - self._nox_path = Path(_normalize_path(nox_envdir, nox_friendly_name)) - - # For storing any extra conda requirements from asv.conf.json. - self._extra_reqs_path = self._nox_path / "asv-extra-reqs.yaml" - - super().__init__(conf, python, requirements) - - @property - def _path(self) -> str: - """ - Using a property to override getting and setting in parent classes - - unable to modify parent classes as this is a plugin. - - """ - return str(self._nox_path) - - @_path.setter - def _path(self, value) -> None: - """Enforce overriding of this variable by disabling modification.""" - pass - - @property - def name(self) -> str: - """Overridden to prevent inclusion of user input requirements.""" - return get_env_name(self.tool_name, self._python, {}) - - def _get_nox_session_name(self, python: str) -> str: - nox_cmd_substring = ( - f"--noxfile={self.setup_noxfile} " - f"--session={self.nox_session_name} " - f"--python={python}" - ) - - list_output = asv_util.check_output( - ["nox", "--list", *nox_cmd_substring.split(" ")], - display_error=False, - dots=False, - ) - list_output = list_output.split("\n") - list_matches = list(filter(lambda s: s.startswith("*"), list_output)) - matches_count = len(list_matches) - - if matches_count == 0: - message = f"No Nox sessions found for: {nox_cmd_substring} ." - log.error(message) - raise RuntimeError(message) - elif matches_count > 1: - message = ( - f"Ambiguous - >1 Nox session found for: {nox_cmd_substring} ." - ) - log.error(message) - raise RuntimeError(message) - else: - line = list_matches[0] - session_name = line.split(" ")[1] - assert isinstance(session_name, str) - return session_name - - def _nox_prep_env(self, setup: bool = False) -> None: - message = f"Running Nox environment update for: {self.name}" - log.info(message) - - build_root_path = Path(self._build_root) - env_path = Path(self._path) - - def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: - """For copying between self._path and a temporary cache.""" - asv_files = list(src_parent.glob("asv*")) - # build_root_path.name usually == "project" . - asv_files += [src_parent / build_root_path.name] - for src_path in asv_files: - dst_path = dst_parent / src_path.name - if not dst_path.exists(): - # Only cache-ing in case Nox has rebuilt the env @ - # self._path. If the dst_path already exists: rebuilding - # hasn't happened. Also a non-issue when copying in the - # reverse direction because the cache dir is temporary. - if src_path.is_dir(): - func = copytree - else: - func = copy2 - func(src_path, dst_path) - - with TemporaryDirectory(prefix="nox_asv_cache_") as asv_cache: - asv_cache_path = Path(asv_cache) - if setup: - noxfile = self.setup_noxfile - else: - # Cache all of ASV's files as Nox may remove and re-build the environment. - copy_asv_files(env_path, asv_cache_path) - # Get location of noxfile in cache. - noxfile_original = ( - build_root_path / self._repo_subdir / self.noxfile_rel_path - ) - noxfile_subpath = noxfile_original.relative_to( - build_root_path.parent - ) - noxfile = asv_cache_path / noxfile_subpath - - nox_cmd = [ - "nox", - f"--noxfile={noxfile}", - # Place the env in the ASV env directory, instead of the default. - f"--envdir={env_path.parent}", - f"--session={self.nox_session_name}", - f"--python={self._python}", - "--install-only", - "--no-error-on-external-run", - "--verbose", - ] - - _ = asv_util.check_output(nox_cmd) - if not env_path.is_dir(): - message = f"Expected Nox environment not found: {env_path}" - log.error(message) - raise RuntimeError(message) - - if not setup: - # Restore ASV's files from the cache (if necessary). - copy_asv_files(asv_cache_path, env_path) - - def _setup(self) -> None: - """Used for initial environment creation - mimics parent method where possible.""" - try: - self.conda = _find_conda() - except IOError as e: - raise asv_util.UserError(str(e)) - if find_spec("nox") is None: - raise asv_util.UserError("Module not found: nox") - - message = f"Creating Nox-Conda environment for {self.name} ." - log.info(message) - - try: - self._nox_prep_env(setup=True) - finally: - # No longer need the setup checkout now that the environment has been built. - self.project_temp_checkout.cleanup() - - conda_args, pip_args = self._get_requirements(self.conda) - if conda_args or pip_args: - message = ( - "Ignoring user input package requirements. Benchmark " - "environment management is exclusively performed by Nox." - ) - log.warning(message) - - def checkout_project(self, repo: Repo, commit_hash: str) -> None: - """Check out the working tree of the project at given commit hash.""" - super().checkout_project(repo, commit_hash) - self._nox_prep_env() - log.info( - f"Environment {self.name} updated to spec at {commit_hash[:8]}" - ) diff --git a/noxfile.py b/noxfile.py index 8b23948677a..6367b74aef0 100755 --- a/noxfile.py +++ b/noxfile.py @@ -328,7 +328,14 @@ def asv_exec(*sub_args: str) -> None: # Else: compare to previous commit. previous_commit = os.environ.get("PR_BASE_SHA", "HEAD^1") try: - asv_exec("continuous", "--factor=1.2", previous_commit, "HEAD") + asv_exec( + "continuous", + "--factor=1.2", + previous_commit, + "HEAD", + "--attribute", + "rounds=4", + ) finally: asv_exec("compare", previous_commit, "HEAD") else: From ee9cadc989931dbd93d59495c92ddec88dbf4e68 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Fri, 11 Feb 2022 11:06:20 +0000 Subject: [PATCH 54/69] Fix load_http bug, extend testing, and note to docs (#4580) * Fix opendap bug, add docs and extra testing * Add whats new entry * Update docs/src/whatsnew/3.2.rst Co-authored-by: Bill Little * Add warning box Co-authored-by: Bill Little --- docs/src/whatsnew/3.2.rst | 3 +++ lib/iris/__init__.py | 8 ++++++++ lib/iris/fileformats/netcdf.py | 4 ++-- lib/iris/io/__init__.py | 8 ++++---- lib/iris/tests/test_load.py | 35 +++++++++++++++++++++++++++------- 5 files changed, 45 insertions(+), 13 deletions(-) diff --git a/docs/src/whatsnew/3.2.rst b/docs/src/whatsnew/3.2.rst index c78e1283d6c..9aa6a788466 100644 --- a/docs/src/whatsnew/3.2.rst +++ b/docs/src/whatsnew/3.2.rst @@ -177,6 +177,9 @@ This document explains the changes made to Iris for this release to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 (:issue:`3305`, :pull:`4535`) +#. `@lbdreyer`_ fixed a bug in :class:`iris.io.load_http` which was missing an import + (:pull:`4580`) + 💣 Incompatible Changes ======================= diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index aca4e77e888..95722c69cf7 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -44,6 +44,10 @@ standard library function :func:`os.path.expanduser` and module :mod:`fnmatch` for more details. + .. warning:: + + If supplying a URL, only OPeNDAP Data Sources are supported. + * constraints: Either a single constraint, or an iterable of constraints. Each constraint can be either a string, an instance of @@ -287,6 +291,7 @@ def load(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -315,6 +320,7 @@ def load_cube(uris, constraint=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -354,6 +360,7 @@ def load_cubes(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -399,6 +406,7 @@ def load_raw(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 100ab29daaa..dd819fb63ff 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -825,12 +825,12 @@ def inner(cf_datavar): def load_cubes(filenames, callback=None, constraints=None): """ - Loads cubes from a list of NetCDF filenames/URLs. + Loads cubes from a list of NetCDF filenames/OPeNDAP URLs. Args: * filenames (string/list): - One or more NetCDF filenames/DAP URLs to load from. + One or more NetCDF filenames/OPeNDAP URLs to load from. Kwargs: diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 034fa4baabe..8d5a2e05d20 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -216,7 +216,7 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): """ - Takes a list of urls and a callback function, and returns a generator + Takes a list of OPeNDAP URLs and a callback function, and returns a generator of Cubes from the given URLs. .. note:: @@ -226,11 +226,11 @@ def load_http(urls, callback): """ # Create default dict mapping iris format handler to its associated filenames + from iris.fileformats import FORMAT_AGENT + handler_map = collections.defaultdict(list) for url in urls: - handling_format_spec = iris.fileformats.FORMAT_AGENT.get_spec( - url, None - ) + handling_format_spec = FORMAT_AGENT.get_spec(url, None) handler_map[handling_format_spec].append(url) # Call each iris format handler with the appropriate filenames diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 86ff2f1eceb..d21b40ee262 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -12,6 +12,9 @@ import iris.tests as tests # isort:skip import pathlib +from unittest import mock + +import netCDF4 import iris import iris.io @@ -148,19 +151,20 @@ def test_path_object(self): self.assertEqual(len(cubes), 1) -class TestOpenDAP(tests.IrisTest): - def test_load(self): - # Check that calling iris.load_* with a http URI triggers a call to - # ``iris.io.load_http`` +class TestOPeNDAP(tests.IrisTest): + def setUp(self): + self.url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" - url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" + def test_load_http_called(self): + # Check that calling iris.load_* with an http URI triggers a call to + # ``iris.io.load_http`` class LoadHTTPCalled(Exception): pass def new_load_http(passed_urls, *args, **kwargs): self.assertEqual(len(passed_urls), 1) - self.assertEqual(url, passed_urls[0]) + self.assertEqual(self.url, passed_urls[0]) raise LoadHTTPCalled() try: @@ -174,11 +178,28 @@ def new_load_http(passed_urls, *args, **kwargs): iris.load_cubes, ]: with self.assertRaises(LoadHTTPCalled): - fn(url) + fn(self.url) finally: iris.io.load_http = orig + def test_netCDF_Dataset_call(self): + # Check that load_http calls netCDF4.Dataset and supplies the expected URL. + + # To avoid making a request to an OPeNDAP server in a test, instead + # mock the call to netCDF.Dataset so that it returns a dataset for a + # local file. + filename = tests.get_data_path( + ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") + ) + fake_dataset = netCDF4.Dataset(filename) + + with mock.patch( + "netCDF4.Dataset", return_value=fake_dataset + ) as dataset_loader: + next(iris.io.load_http([self.url], callback=None)) + dataset_loader.assert_called_with(self.url, mode="r") + if __name__ == "__main__": tests.main() From 15bd351f7d68fa0470376d52e52a621ef3566457 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 14 Feb 2022 15:11:19 +0000 Subject: [PATCH 55/69] Loading Benchmarks (#4477) * Synthetic FF PP NetCDF and loading benchmarks. --- .github/workflows/benchmark.yml | 14 +- benchmarks/benchmarks/__init__.py | 41 ---- .../benchmarks/generate_data/__init__.py | 94 ++++++++ .../benchmarks/generate_data/um_files.py | 215 ++++++++++++++++++ benchmarks/benchmarks/loading.py | 185 +++++++++++++++ noxfile.py | 45 +++- 6 files changed, 543 insertions(+), 51 deletions(-) create mode 100644 benchmarks/benchmarks/generate_data/__init__.py create mode 100644 benchmarks/benchmarks/generate_data/um_files.py create mode 100644 benchmarks/benchmarks/loading.py diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index b489eba0360..a8247a247bf 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -16,7 +16,9 @@ jobs: IRIS_TEST_DATA_PATH: benchmarks/iris-test-data IRIS_TEST_DATA_VERSION: "2.5" # Lets us manually bump the cache to rebuild + ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" + PY_VER: 3.8 steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it @@ -32,19 +34,15 @@ jobs: run: | pip install nox - - name: Cache .nox and .asv/env directories + - name: Cache environment directories id: cache-env-dir uses: actions/cache@v2 with: path: | .nox benchmarks/.asv/env - # Make sure GHA never gets an exact cache match by using the unique - # github.sha. This means it will always store this run as a new - # cache (Nox may have made relevant changes during run). Cache - # restoration still succeeds via the partial restore-key match. - key: ${{ runner.os }}-${{ github.sha }} - restore-keys: ${{ runner.os }} + $CONDA/pkgs + key: ${{ runner.os }}-${{ hashFiles('requirements/') }}-${{ env.ENV_CACHE_BUILD }} - name: Cache test data directory id: cache-test-data @@ -62,7 +60,7 @@ jobs: unzip -q iris-test-data.zip mkdir --parents ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_LOC_PATH} mv iris-test-data-${IRIS_TEST_DATA_VERSION} ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH} - + - name: Set test data var run: | echo "OVERRIDE_TEST_DATA_REPOSITORY=${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH}/test_data" >> $GITHUB_ENV diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 2e741c3da03..4a964a648d9 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -5,45 +5,4 @@ # licensing details. """Common code for benchmarks.""" -import os -from pathlib import Path - -# Environment variable names -_ASVDIR_VARNAME = "ASV_DIR" # As set in nightly script "asv_nightly/asv.sh" -_DATADIR_VARNAME = "BENCHMARK_DATA" # For local runs - ARTIFICIAL_DIM_SIZE = int(10e3) # For all artificial cubes, coords etc. - -# Work out where the benchmark data dir is. -asv_dir = os.environ.get("ASV_DIR", None) -if asv_dir: - # For an overnight run, this comes from the 'ASV_DIR' setting. - benchmark_data_dir = Path(asv_dir) / "data" -else: - # For a local run, you set 'BENCHMARK_DATA'. - benchmark_data_dir = os.environ.get(_DATADIR_VARNAME, None) - if benchmark_data_dir is not None: - benchmark_data_dir = Path(benchmark_data_dir) - - -def testdata_path(*path_names): - """ - Return the path of a benchmark test data file. - - These are based from a test-data location dir, which is either - ${}/data (for overnight tests), or ${} for local testing. - - If neither of these were set, an error is raised. - - """.format( - _ASVDIR_VARNAME, _DATADIR_VARNAME - ) - if benchmark_data_dir is None: - msg = ( - "Benchmark data dir is not defined : " - 'Either "${}" or "${}" must be set.' - ) - raise (ValueError(msg.format(_ASVDIR_VARNAME, _DATADIR_VARNAME))) - path = benchmark_data_dir.joinpath(*path_names) - path = str(path) # Because Iris doesn't understand Path objects yet. - return path diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py new file mode 100644 index 00000000000..a56f2e46230 --- /dev/null +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -0,0 +1,94 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Scripts for generating supporting data for benchmarking. + +Data generated using Iris should use :func:`run_function_elsewhere`, which +means that data is generated using a fixed version of Iris and a fixed +environment, rather than those that get changed when the benchmarking run +checks out a new commit. + +Downstream use of data generated 'elsewhere' requires saving; usually in a +NetCDF file. Could also use pickling but there is a potential risk if the +benchmark sequence runs over two different Python versions. + +""" +from inspect import getsource +from os import environ +from pathlib import Path +from subprocess import CalledProcessError, check_output, run +from textwrap import dedent + +#: Python executable used by :func:`run_function_elsewhere`, set via env +#: variable of same name. Must be path of Python within an environment that +#: includes Iris (including dependencies and test modules) and Mule. +try: + DATA_GEN_PYTHON = environ["DATA_GEN_PYTHON"] + _ = check_output([DATA_GEN_PYTHON, "-c", "a = True"]) +except KeyError: + error = "Env variable DATA_GEN_PYTHON not defined." + raise KeyError(error) +except (CalledProcessError, FileNotFoundError, PermissionError): + error = ( + "Env variable DATA_GEN_PYTHON not a runnable python executable path." + ) + raise ValueError(error) + +# The default location of data files used in benchmarks. Used by CI. +default_data_dir = (Path(__file__).parents[2] / ".data").resolve() +# Optionally override the default data location with environment variable. +BENCHMARK_DATA = Path(environ.get("BENCHMARK_DATA", default_data_dir)) +if BENCHMARK_DATA == default_data_dir: + BENCHMARK_DATA.mkdir(exist_ok=True) +elif not BENCHMARK_DATA.is_dir(): + message = f"Not a directory: {BENCHMARK_DATA} ." + raise ValueError(message) + +# Manual flag to allow the rebuilding of synthetic data. +# False forces a benchmark run to re-make all the data files. +REUSE_DATA = True + + +def run_function_elsewhere(func_to_run, *args, **kwargs): + """ + Run a given function using the :const:`DATA_GEN_PYTHON` executable. + + This structure allows the function to be written natively. + + Parameters + ---------- + func_to_run : FunctionType + The function object to be run. + NOTE: the function must be completely self-contained, i.e. perform all + its own imports (within the target :const:`DATA_GEN_PYTHON` + environment). + *args : tuple, optional + Function call arguments. Must all be expressible as simple literals, + i.e. the ``repr`` must be a valid literal expression. + **kwargs: dict, optional + Function call keyword arguments. All values must be expressible as + simple literals (see ``*args``). + + Returns + ------- + str + The ``stdout`` from the run. + + """ + func_string = dedent(getsource(func_to_run)) + func_string = func_string.replace("@staticmethod\n", "") + func_call_term_strings = [repr(arg) for arg in args] + func_call_term_strings += [ + f"{name}={repr(val)}" for name, val in kwargs.items() + ] + func_call_string = ( + f"{func_to_run.__name__}(" + ",".join(func_call_term_strings) + ")" + ) + python_string = "\n".join([func_string, func_call_string]) + result = run( + [DATA_GEN_PYTHON, "-c", python_string], capture_output=True, check=True + ) + return result.stdout diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py new file mode 100644 index 00000000000..8792fcc48b7 --- /dev/null +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -0,0 +1,215 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Generate FF, PP and NetCDF files based on a minimal synthetic FF file. + +NOTE: uses the Mule package, so depends on an environment with Mule installed. +""" + + +def _create_um_files( + len_x: int, len_y: int, len_z: int, len_t: int, compress, save_paths: dict +) -> None: + """ + Generate an FF object of given shape and compression, save to FF/PP/NetCDF. + + This is run externally + (:func:`benchmarks.generate_data.run_function_elsewhere`), so all imports + are self-contained and input parameters are simple types. + """ + from copy import deepcopy + from datetime import datetime + from tempfile import NamedTemporaryFile + + from mo_pack import compress_wgdos as mo_pack_compress + from mule import ArrayDataProvider, Field3, FieldsFile + from mule.pp import fields_to_pp_file + import numpy as np + + from iris import load_cube + from iris import save as save_cube + + def packing_patch(*compress_args, **compress_kwargs) -> bytes: + """ + Force conversion from returned :class:`memoryview` to :class:`bytes`. + + Downstream uses of :func:`mo_pack.compress_wgdos` were written + for the ``Python2`` behaviour, where the returned buffer had a + different ``__len__`` value to the current :class:`memoryview`. + Unable to fix directly in Mule, so monkey patching for now. + """ + return mo_pack_compress(*compress_args, **compress_kwargs).tobytes() + + import mo_pack + + mo_pack.compress_wgdos = packing_patch + + ######## + + template = { + "fixed_length_header": {"dataset_type": 3, "grid_staggering": 3}, + "integer_constants": { + "num_p_levels": len_z, + "num_cols": len_x, + "num_rows": len_y, + }, + "real_constants": {}, + "level_dependent_constants": {"dims": (len_z + 1, None)}, + } + new_ff = FieldsFile.from_template(deepcopy(template)) + + data_array = np.arange(len_x * len_y).reshape(len_x, len_y) + array_provider = ArrayDataProvider(data_array) + + def add_field(level_: int, time_step_: int) -> None: + """ + Add a minimal field to the new :class:`~mule.FieldsFile`. + + Includes the minimum information to allow Mule saving and Iris + loading, as well as incrementation for vertical levels and time + steps to allow generation of z and t dimensions. + """ + new_field = Field3.empty() + # To correspond to the header-release 3 class used. + new_field.lbrel = 3 + # Mule uses the first element of the lookup to test for + # unpopulated fields (and skips them), so the first element should + # be set to something. The year will do. + new_field.raw[1] = datetime.now().year + + # Horizontal. + new_field.lbcode = 1 + new_field.lbnpt = len_x + new_field.lbrow = len_y + new_field.bdx = new_ff.real_constants.col_spacing + new_field.bdy = new_ff.real_constants.row_spacing + new_field.bzx = new_ff.real_constants.start_lon - 0.5 * new_field.bdx + new_field.bzy = new_ff.real_constants.start_lat - 0.5 * new_field.bdy + + # Hemisphere. + new_field.lbhem = 32 + # Processing. + new_field.lbproc = 0 + + # Vertical. + # Hybrid height values by simulating sequences similar to those in a + # theta file. + new_field.lbvc = 65 + if level_ == 0: + new_field.lblev = 9999 + else: + new_field.lblev = level_ + + level_1 = level_ + 1 + six_rec = 20 / 3 + three_rec = six_rec / 2 + + new_field.blev = level_1 ** 2 * six_rec - six_rec + new_field.brsvd1 = ( + level_1 ** 2 * six_rec + (six_rec * level_1) - three_rec + ) + + brsvd2_simulated = np.linspace(0.995, 0, len_z) + shift = min(len_z, 2) + bhrlev_simulated = np.concatenate( + [np.ones(shift), brsvd2_simulated[:-shift]] + ) + new_field.brsvd2 = brsvd2_simulated[level_] + new_field.bhrlev = bhrlev_simulated[level_] + + # Time. + new_field.lbtim = 11 + + new_field.lbyr = time_step_ + for attr_name in ["lbmon", "lbdat", "lbhr", "lbmin", "lbsec"]: + setattr(new_field, attr_name, 0) + + new_field.lbyrd = time_step_ + 1 + for attr_name in ["lbmond", "lbdatd", "lbhrd", "lbmind", "lbsecd"]: + setattr(new_field, attr_name, 0) + + # Data and packing. + new_field.lbuser1 = 1 + new_field.lbpack = int(compress) + new_field.bacc = 0 + new_field.bmdi = -1 + new_field.lbext = 0 + new_field.set_data_provider(array_provider) + + new_ff.fields.append(new_field) + + for time_step in range(len_t): + for level in range(len_z): + add_field(level, time_step + 1) + + ff_path = save_paths.get("FF", None) + pp_path = save_paths.get("PP", None) + nc_path = save_paths.get("NetCDF", None) + + if ff_path: + new_ff.to_file(ff_path) + if pp_path: + fields_to_pp_file(str(pp_path), new_ff.fields) + if nc_path: + temp_ff_path = None + # Need an Iris Cube from the FF content. + if ff_path: + # Use the existing file. + ff_cube = load_cube(ff_path) + else: + # Make a temporary file. + temp_ff_path = NamedTemporaryFile() + new_ff.to_file(temp_ff_path.name) + ff_cube = load_cube(temp_ff_path.name) + + save_cube(ff_cube, nc_path, zlib=compress) + if temp_ff_path: + temp_ff_path.close() + + +FILE_EXTENSIONS = {"FF": "", "PP": ".pp", "NetCDF": ".nc"} + + +def create_um_files( + len_x: int, + len_y: int, + len_z: int, + len_t: int, + compress: bool, + file_types: list, +) -> dict: + """ + Generate FF-based FF / PP / NetCDF files with specified shape and compression. + + All files representing a given shape are saved in a dedicated directory. A + dictionary of the saved paths is returned. + + If the required files exist, they are re-used, unless + :const:`benchmarks.REUSE_DATA` is ``False``. + """ + # Self contained imports to avoid linting confusion with _create_um_files(). + from . import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere + + save_name_sections = ["UM", len_x, len_y, len_z, len_t] + save_name = "_".join(str(section) for section in save_name_sections) + save_dir = BENCHMARK_DATA / save_name + if not save_dir.is_dir(): + save_dir.mkdir(parents=True) + + save_paths = {} + files_exist = True + for file_type in file_types: + file_ext = FILE_EXTENSIONS[file_type] + save_path = (save_dir / f"{compress}").with_suffix(file_ext) + files_exist = files_exist and save_path.is_file() + save_paths[file_type] = str(save_path) + + if not REUSE_DATA or not files_exist: + _ = run_function_elsewhere( + _create_um_files, len_x, len_y, len_z, len_t, compress, save_paths + ) + + return save_paths diff --git a/benchmarks/benchmarks/loading.py b/benchmarks/benchmarks/loading.py new file mode 100644 index 00000000000..4558c3b5cba --- /dev/null +++ b/benchmarks/benchmarks/loading.py @@ -0,0 +1,185 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmark tests. + +Where applicable benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. Size should be _just_ large + enough - don't want to bloat benchmark runtime. + +""" + +from iris import AttributeConstraint, Constraint, load, load_cube +from iris.cube import Cube +from iris.fileformats.um import structured_um_loading + +from .generate_data import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere +from .generate_data.um_files import create_um_files + + +class LoadAndRealise: + params = [ + [(2, 2, 2), (1280, 960, 5)], + [False, True], + ["FF", "PP", "NetCDF"], + ] + param_names = ["xyz", "compressed", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[2] + file_path_dict = {} + for xyz in self.params[0]: + file_path_dict[xyz] = {} + x, y, z = xyz + for compress in self.params[1]: + file_path_dict[xyz][compress] = create_um_files( + x, y, z, 1, compress, file_type_args + ) + return file_path_dict + + def setup( + self, + file_path_dict: dict, + xyz: tuple, + compress: bool, + file_format: str, + ) -> None: + self.file_path = file_path_dict[xyz][compress][file_format] + self.cube = self.load() + + def load(self) -> Cube: + return load_cube(self.file_path) + + def time_load(self, _, __, ___, ____) -> None: + _ = self.load() + + def time_realise(self, _, __, ___, ____) -> None: + # Don't touch cube.data - permanent realisation plays badly with ASV's + # re-run strategy. + assert self.cube.has_lazy_data() + self.cube.core_data().compute() + + +class STASHConstraint: + # xyz sizes mimic LoadAndRealise to maximise file re-use. + params = [[(2, 2, 2), (1280, 960, 5)], ["FF", "PP"]] + param_names = ["xyz", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files( + x, y, z, 1, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, xyz: tuple, file_format: str + ) -> None: + self.file_path = file_path_dict[xyz][file_format] + + def time_stash_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, AttributeConstraint(STASH="m??s??i901")) + + +class TimeConstraint: + params = [[3, 20], ["FF", "PP", "NetCDF"]] + param_names = ["time_dim_len", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for time_dim_len in self.params[0]: + file_path_dict[time_dim_len] = create_um_files( + 20, 20, 5, time_dim_len, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, time_dim_len: int, file_format: str + ) -> None: + self.file_path = file_path_dict[time_dim_len][file_format] + self.time_constr = Constraint(time=lambda cell: cell.point.year < 3) + + def time_time_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, self.time_constr) + + +class ManyVars: + FILE_PATH = BENCHMARK_DATA / "many_var_file.nc" + + @staticmethod + def _create_file(save_path: str) -> None: + """Is run externally - everything must be self-contained.""" + import numpy as np + + from iris import save + from iris.coords import AuxCoord + from iris.cube import Cube + + data_len = 8 + data = np.arange(data_len) + cube = Cube(data, units="unknown") + extra_vars = 80 + names = ["coord_" + str(i) for i in range(extra_vars)] + for name in names: + coord = AuxCoord(data, long_name=name, units="unknown") + cube.add_aux_coord(coord, 0) + save(cube, save_path) + + def setup_cache(self) -> None: + if not REUSE_DATA or not self.FILE_PATH.is_file(): + # See :mod:`benchmarks.generate_data` docstring for full explanation. + _ = run_function_elsewhere( + self._create_file, + str(self.FILE_PATH), + ) + + def time_many_var_load(self) -> None: + _ = load(str(self.FILE_PATH)) + + +class StructuredFF: + """ + Test structured loading of a large-ish fieldsfile. + + Structured load of the larger size should show benefit over standard load, + avoiding the cost of merging. + """ + + params = [[(2, 2, 2), (1280, 960, 5)], [False, True]] + param_names = ["xyz", "structured_loading"] + + def setup_cache(self) -> dict: + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files(x, y, z, 1, False, ["FF"]) + return file_path_dict + + def setup(self, file_path_dict, xyz, structured_load): + self.file_path = file_path_dict[xyz]["FF"] + self.structured_load = structured_load + + def load(self): + """Load the whole file (in fact there is only 1 cube).""" + + def _load(): + _ = load(self.file_path) + + if self.structured_load: + with structured_um_loading(): + _load() + else: + _load() + + def time_structured_load(self, _, __, ___): + self.load() diff --git a/noxfile.py b/noxfile.py index 6367b74aef0..0600540c5b6 100755 --- a/noxfile.py +++ b/noxfile.py @@ -289,7 +289,7 @@ def linkcheck(session: nox.sessions.Session): ) -@nox.session(python=PY_VER[-1], venv_backend="conda") +@nox.session(python=PY_VER, venv_backend="conda") @nox.parametrize( ["ci_mode"], [True, False], @@ -297,7 +297,7 @@ def linkcheck(session: nox.sessions.Session): ) def benchmarks(session: nox.sessions.Session, ci_mode: bool): """ - Perform esmf-regrid performance benchmarks (using Airspeed Velocity). + Perform Iris performance benchmarks (using Airspeed Velocity). Parameters ---------- @@ -315,6 +315,47 @@ def benchmarks(session: nox.sessions.Session, ci_mode: bool): """ session.install("asv", "nox") + + data_gen_var = "DATA_GEN_PYTHON" + if data_gen_var in os.environ: + print("Using existing data generation environment.") + else: + print("Setting up the data generation environment...") + # Get Nox to build an environment for the `tests` session, but don't + # run the session. Will re-use a cached environment if appropriate. + session.run_always( + "nox", + "--session=tests", + "--install-only", + f"--python={session.python}", + ) + # Find the environment built above, set it to be the data generation + # environment. + data_gen_python = next( + Path(".nox").rglob(f"tests*/bin/python{session.python}") + ).resolve() + session.env[data_gen_var] = data_gen_python + + mule_dir = data_gen_python.parents[1] / "resources" / "mule" + if not mule_dir.is_dir(): + print("Installing Mule into data generation environment...") + session.run_always( + "git", + "clone", + "https://github.com/metomi/mule.git", + str(mule_dir), + external=True, + ) + session.run_always( + str(data_gen_python), + "-m", + "pip", + "install", + str(mule_dir / "mule"), + external=True, + ) + + print("Running ASV...") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") From cbc31c7f010c1538b8260e431eb7265683cf11ad Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Mon, 14 Feb 2022 15:43:52 +0000 Subject: [PATCH 56/69] fix test (#4585) --- .../pp_load_rules/test__convert_time_coords.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index d975884cb0e..2aae32b1ae8 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -13,8 +13,6 @@ # importing anything else. import iris.tests as tests # isort:skip -import unittest - from cf_units import CALENDAR_360_DAY, CALENDAR_GREGORIAN, Unit from cftime import datetime as nc_datetime import numpy as np @@ -733,7 +731,6 @@ def test_t1_list_t2_scalar(self): class TestArrayInputWithLBTIM_0_3_1(TestField): - @unittest.skip("#3508 investigate unit test failure") def test_t1_scalar_t2_list(self): lbtim = _lbtim(ib=3, ic=1) lbcode = _lbcode(1) @@ -756,9 +753,13 @@ def test_t1_scalar_t2_list(self): ) # Expected coords. + leap_year_adjust = np.array([0, 24, 24]) points = np.ones_like(years) * lbft bounds = np.array( - [lbft - ((years - 1970) * 365 * 24 + 2 * 24), points] + [ + lbft - ((years - 1970) * 365 * 24 + 2 * 24 + leap_year_adjust), + points, + ] ).transpose() fp_coord = AuxCoord( points, @@ -766,7 +767,7 @@ def test_t1_scalar_t2_list(self): units="hours", bounds=bounds, ) - points = (years - 1970) * 365 * 24 + 10 * 24 + 9 + points = (years - 1970) * 365 * 24 + 10 * 24 + 9 + leap_year_adjust bounds = np.array( [np.ones_like(points) * (8 * 24 + 9), points] ).transpose() From cf5413c1549b1614b345949e1e87ccda1b1846a6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Feb 2022 13:45:28 +0000 Subject: [PATCH 57/69] [pre-commit.ci] pre-commit autoupdate (#4587) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/psf/black: 21.12b0 → 22.1.0](https://github.com/psf/black/compare/21.12b0...22.1.0) - [github.com/asottile/blacken-docs: v1.12.0 → v1.12.1](https://github.com/asottile/blacken-docs/compare/v1.12.0...v1.12.1) - [github.com/aio-libs/sort-all: v1.1.0 → v1.2.0](https://github.com/aio-libs/sort-all/compare/v1.1.0...v1.2.0) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +-- .../benchmarks/generate_data/um_files.py | 4 +- benchmarks/benchmarks/plot.py | 2 +- .../meteorology/plot_wind_barbs.py | 4 +- .../meteorology/plot_wind_speed.py | 2 +- lib/iris/analysis/__init__.py | 2 +- lib/iris/analysis/_grid_angles.py | 2 +- lib/iris/analysis/_scipy_interpolate.py | 2 +- lib/iris/analysis/calculus.py | 12 ++---- lib/iris/analysis/cartography.py | 6 +-- lib/iris/analysis/maths.py | 2 +- lib/iris/analysis/stats.py | 4 +- lib/iris/fileformats/netcdf.py | 4 +- lib/iris/fileformats/pp.py | 4 +- lib/iris/tests/integration/test_netcdf.py | 6 +-- lib/iris/tests/test_basic_maths.py | 38 +++++++++---------- .../analysis/cartography/test_rotate_winds.py | 8 ++-- .../regrid/test_RectilinearRegridder.py | 2 +- .../test_add_categorised_coord.py | 2 +- ...__collapse_degenerate_points_and_bounds.py | 2 +- 20 files changed, 55 insertions(+), 59 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 97dff666cfc..ee036038e45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/psf/black - rev: 21.12b0 + rev: 22.1.0 hooks: - id: black pass_filenames: false @@ -50,14 +50,14 @@ repos: args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: v1.12.0 + rev: v1.12.1 hooks: - id: blacken-docs types: [file, rst] additional_dependencies: [black==21.6b0] - repo: https://github.com/aio-libs/sort-all - rev: v1.1.0 + rev: v1.2.0 hooks: - id: sort-all types: [file, python] diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py index 8792fcc48b7..1037954f08f 100644 --- a/benchmarks/benchmarks/generate_data/um_files.py +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -107,9 +107,9 @@ def add_field(level_: int, time_step_: int) -> None: six_rec = 20 / 3 three_rec = six_rec / 2 - new_field.blev = level_1 ** 2 * six_rec - six_rec + new_field.blev = level_1**2 * six_rec - six_rec new_field.brsvd1 = ( - level_1 ** 2 * six_rec + (six_rec * level_1) - three_rec + level_1**2 * six_rec + (six_rec * level_1) - three_rec ) brsvd2_simulated = np.linspace(0.995, 0, len_z) diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 45905abd2ff..24899776dc8 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -22,7 +22,7 @@ def setup(self): # Should generate 10 distinct contours, regardless of dim size. dim_size = int(ARTIFICIAL_DIM_SIZE / 5) repeat_number = int(dim_size / 10) - repeat_range = range(int((dim_size ** 2) / repeat_number)) + repeat_range = range(int((dim_size**2) / repeat_number)) data = np.repeat(repeat_range, repeat_number) data = data.reshape((dim_size,) * 2) diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index c3c056eb4ac..b09040c64e9 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -30,7 +30,7 @@ def main(): # To illustrate the full range of barbs, scale the wind speed up to pretend # that a storm is passing over - magnitude = (uwind ** 2 + vwind ** 2) ** 0.5 + magnitude = (uwind**2 + vwind**2) ** 0.5 magnitude.convert_units("knot") max_speed = magnitude.collapsed( ("latitude", "longitude"), iris.analysis.MAX @@ -41,7 +41,7 @@ def main(): vwind = vwind / max_speed * max_desired # Create a cube containing the wind speed - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") windspeed.convert_units("knot") diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index fd03f542057..40d9d0da002 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -27,7 +27,7 @@ def main(): vwind = iris.load_cube(infile, "y_wind") # Create a cube containing the wind speed. - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") # Plot the wind speed as a contour plot. diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 465a5210657..b1a9e1d2598 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1394,7 +1394,7 @@ def _lazy_rms(array, axis, **kwargs): # all. Thus trying to use this aggregator with weights will currently # raise an error in dask due to the unexpected keyword `weights`, # rather than silently returning the wrong answer. - return da.sqrt(da.mean(array ** 2, axis=axis, **kwargs)) + return da.sqrt(da.mean(array**2, axis=axis, **kwargs)) @_build_dask_mdtol_function diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 127aec7c1e9..0b52f54568e 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -120,7 +120,7 @@ def _angle(p, q, r): mid_lons = np.deg2rad(q[0]) pr = _3d_xyz_from_latlon(r[0], r[1]) - _3d_xyz_from_latlon(p[0], p[1]) - pr_norm = np.sqrt(np.sum(pr ** 2, axis=0)) + pr_norm = np.sqrt(np.sum(pr**2, axis=0)) pr_top = pr[1] * np.cos(mid_lons) - pr[0] * np.sin(mid_lons) index = pr_norm == 0 diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index c6b33c56a4d..fc642497292 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -229,7 +229,7 @@ def compute_interp_weights(self, xi, method=None): xi_shape, method, indices, norm_distances, out_of_bounds = prepared # Allocate arrays for describing the sparse matrix. - n_src_values_per_result_value = 2 ** ndim + n_src_values_per_result_value = 2**ndim n_result_values = len(indices[0]) n_non_zero = n_result_values * n_src_values_per_result_value weights = np.ones(n_non_zero, dtype=norm_distances[0].dtype) diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 409782f256a..4630f47967f 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -629,14 +629,10 @@ def curl(i_cube, j_cube, k_cube=None): # (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi) # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta) # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube) - if ( - y_coord.name() - not in [ - "latitude", - "grid_latitude", - ] - or x_coord.name() not in ["longitude", "grid_longitude"] - ): + if y_coord.name() not in [ + "latitude", + "grid_latitude", + ] or x_coord.name() not in ["longitude", "grid_longitude"]: raise ValueError( "Expecting latitude as the y coord and " "longitude as the x coord for spherical curl." diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 373487af532..f704468e332 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -335,7 +335,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): raise ValueError("Bounds must be [n,2] array") # fill in a new array of areas - radius_sqr = radius_of_earth ** 2 + radius_sqr = radius_of_earth**2 radian_lat_64 = radian_lat_bounds.astype(np.float64) radian_lon_64 = radian_lon_bounds.astype(np.float64) @@ -1010,8 +1010,8 @@ def _transform_distance_vectors_tolerance_mask( # Squared magnitudes should be equal to one within acceptable tolerance. # A value of atol=2e-3 is used, which corresponds to a change in magnitude # of approximately 0.1%. - sqmag_1_0 = u_one_t ** 2 + v_zero_t ** 2 - sqmag_0_1 = u_zero_t ** 2 + v_one_t ** 2 + sqmag_1_0 = u_one_t**2 + v_zero_t**2 + sqmag_0_1 = u_zero_t**2 + v_one_t**2 mask = np.logical_not( np.logical_and( np.isclose(sqmag_1_0, ones, atol=2e-3), diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 107d964ed4e..1cbc90cc60a 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -540,7 +540,7 @@ def power(data, out=None): return _math_op_common( cube, power, - cube.units ** exponent, + cube.units**exponent, new_dtype=new_dtype, in_place=in_place, ) diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 89dde1818bc..711e3c5bfbb 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -168,10 +168,10 @@ def _ones_like(cube): covar = (s1 * s2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol ) - var_1 = (s1 ** 2).collapsed( + var_1 = (s1**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1 ) - var_2 = (s2 ** 2).collapsed( + var_2 = (s2**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_2 ) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 100ab29daaa..73a137b4aff 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -2738,9 +2738,9 @@ def _create_cf_data_variable( cmin, cmax = _co_realise_lazy_arrays([cmin, cmax]) n = dtype.itemsize * 8 if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) + scale_factor = (cmax - cmin) / (2**n - 2) else: - scale_factor = (cmax - cmin) / (2 ** n - 1) + scale_factor = (cmax - cmin) / (2**n - 1) if dtype.kind == "u": add_offset = cmin elif dtype.kind == "i": diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 9f213ec4db9..9bda98bf61e 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -403,7 +403,7 @@ def _calculate_str_value_from_value(self): def _calculate_value_from_str_value(self): self._value = np.sum( - [10 ** i * val for i, val in enumerate(self._strvalue)] + [10**i * val for i, val in enumerate(self._strvalue)] ) def __len__(self): @@ -418,7 +418,7 @@ def __getitem__(self, key): # if the key returns a list of values, then combine them together # to an integer if isinstance(val, list): - val = sum([10 ** i * val for i, val in enumerate(val)]) + val = sum([10**i * val for i, val in enumerate(val)]) return val diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py index f7aaa1d05c0..2a45561e177 100644 --- a/lib/iris/tests/integration/test_netcdf.py +++ b/lib/iris/tests/integration/test_netcdf.py @@ -416,7 +416,7 @@ def setUp(self): levels.units = "centimeters" levels.positive = "down" levels.axis = "Z" - levels[:] = np.linspace(0, 10 ** 5, 3) + levels[:] = np.linspace(0, 10**5, 3) volcello.id = "volcello" volcello.out_name = "volcello" @@ -507,9 +507,9 @@ def _get_scale_factor_add_offset(cube, datatype): else: masked = False if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) + scale_factor = (cmax - cmin) / (2**n - 2) else: - scale_factor = (cmax - cmin) / (2 ** n - 1) + scale_factor = (cmax - cmin) / (2**n - 1) if dt.kind == "u": add_offset = cmin elif dt.kind == "i": diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index e753adbae85..24f2b894426 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -249,7 +249,7 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) self.assertCMLApproxData(a, ("analysis", "apply_ufunc_original.cml")) @@ -259,14 +259,14 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=True, ) self.assertCMLApproxData(b, ("analysis", "apply_ufunc.cml")) self.assertCMLApproxData(a, ("analysis", "apply_ufunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -295,7 +295,7 @@ def test_apply_ufunc_fail(self): def test_ifunc(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) b = my_ifunc(a, new_name="squared temperature", in_place=False) self.assertCMLApproxData(a, ("analysis", "apply_ifunc_original.cml")) @@ -307,7 +307,7 @@ def test_ifunc(self): self.assertCMLApproxData(a, ("analysis", "apply_ifunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -347,7 +347,7 @@ def test_ifunc_init_fail(self): def test_ifunc_call_fail(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) # should now NOT fail because giving 2 arguments to an ifunc that # expects only one will now ignore the surplus argument and raise @@ -367,7 +367,7 @@ def test_ifunc_call_fail(self): my_ifunc(a) my_ifunc = iris.analysis.maths.IFunc( - lambda a: (a, a ** 2.0), lambda cube: cf_units.Unit("1") + lambda a: (a, a**2.0), lambda cube: cf_units.Unit("1") ) # should fail because data function returns a tuple @@ -553,9 +553,9 @@ def test_square_root(self): a.data = abs(a.data) a.units **= 2 - e = a ** 0.5 + e = a**0.5 - self.assertArrayAllClose(e.data, a.data ** 0.5) + self.assertArrayAllClose(e.data, a.data**0.5) self.assertCML(e, ("analysis", "sqrt.cml"), checksum=False) self.assertRaises(ValueError, iris.analysis.maths.exponentiate, a, 0.3) @@ -585,26 +585,26 @@ def test_apply_ufunc(self): np.square, a, new_name="more_thingness", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 vec_mag_ufunc = np.frompyfunc(vec_mag, 2, 1) b = iris.analysis.maths.apply_ufunc(vec_mag_ufunc, a, c) - ans = a.data ** 2 + c.data ** 2 - b2 = b ** 2 + ans = a.data**2 + c.data**2 + b2 = b**2 self.assertArrayAlmostEqual(b2.data, ans) @@ -617,17 +617,17 @@ def test_ifunc(self): a = self.cube a.units = cf_units.Unit("meters") - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units**2) b = my_ifunc(a, new_name="more_thingness", in_place=False) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -637,12 +637,12 @@ def vec_mag(u, v): ) b = my_ifunc(a, c) - ans = (a.data ** 2 + c.data ** 2) ** 0.5 + ans = (a.data**2 + c.data**2) ** 0.5 self.assertArrayAlmostEqual(b.data, ans) def vec_mag_data_func(u_data, v_data): - return np.sqrt(u_data ** 2 + v_data ** 2) + return np.sqrt(u_data**2 + v_data**2) vec_mag_ifunc = iris.analysis.maths.IFunc( vec_mag_data_func, lambda a, b: (a + b).units diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index 9e3af90603e..eafaa20ec88 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -343,8 +343,8 @@ def test_orig_coords(self): def test_magnitude_preservation(self): u, v = self._uv_cubes_limited_extent() ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - orig_sq_mag = u.data ** 2 + v.data ** 2 - res_sq_mag = ut.data ** 2 + vt.data ** 2 + orig_sq_mag = u.data**2 + v.data**2 + res_sq_mag = ut.data**2 + vt.data**2 self.assertArrayAllClose(orig_sq_mag, res_sq_mag, rtol=5e-4) def test_data_values(self): @@ -437,9 +437,9 @@ def test_rotated_to_osgb(self): self.assertArrayEqual(expected_mask, vt.data.mask) # Check unmasked values have sufficiently small error in mag. - expected_mag = np.sqrt(u.data ** 2 + v.data ** 2) + expected_mag = np.sqrt(u.data**2 + v.data**2) # Use underlying data to ignore mask in calculation. - res_mag = np.sqrt(ut.data.data ** 2 + vt.data.data ** 2) + res_mag = np.sqrt(ut.data.data**2 + vt.data.data**2) # Calculate percentage error (note there are no zero magnitudes # so we can divide safely). anom = 100.0 * np.abs(res_mag - expected_mag) / expected_mag diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index f0dba837489..a018507fb35 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -33,7 +33,7 @@ def setUp(self): self.xs, self.ys = np.meshgrid(self.x.points, self.y.points) def transformation(x, y): - return x + y ** 2 + return x + y**2 # Construct a function which adds dimensions to the 2D data array # so that we can test higher dimensional functionality. diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py index b7c59ff5660..0c20f16f5a6 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py @@ -36,7 +36,7 @@ def test_vectorise_call(self): # The reason we use numpy.vectorize is to support multi-dimensional # coordinate points. def fn(coord, v): - return v ** 2 + return v**2 with mock.patch( "numpy.vectorize", return_value=self.vectorised diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index 0f2a8a2d4b9..c9c4821e0aa 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -65,7 +65,7 @@ def test_3d(self): def test_multiple_odd_dims(self): # Test to ensure multiple collapsed dimensions don't interfere. # make a 5-D array where dimensions 0, 2 and 3 are degenerate. - array = np.arange(3 ** 5).reshape([3] * 5) + array = np.arange(3**5).reshape([3] * 5) array[1:] = array[0:1] array[:, :, 1:] = array[:, :, 0:1] array[:, :, :, 1:] = array[:, :, :, 0:1] From 683ed1879b4d43219875cca67467c17fad82b3f4 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Tue, 15 Feb 2022 13:57:37 +0000 Subject: [PATCH 58/69] Finalise whatsnew and version string update (#4588) --- docs/src/whatsnew/3.2.rst | 6 +++--- lib/iris/__init__.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/src/whatsnew/3.2.rst b/docs/src/whatsnew/3.2.rst index 9aa6a788466..ef3764daa5a 100644 --- a/docs/src/whatsnew/3.2.rst +++ b/docs/src/whatsnew/3.2.rst @@ -1,7 +1,7 @@ .. include:: ../common_links.inc -v3.2 (31 Jan 2022) [unreleased] -******************************* +v3.2 (15 Feb 2022) +****************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -351,7 +351,7 @@ This document explains the changes made to Iris for this release #. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) -#. `@aaronspring `_ exchanged ``dask`` with +#. `@aaronspring`_ exchanged ``dask`` with ``dask-core`` in testing environments reducing the number of dependencies installed for testing. (:pull:`4434`) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 95722c69cf7..009a83aed50 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -108,7 +108,7 @@ def callback(cube, field, filename): # Iris revision. -__version__ = "3.2.0rc0" +__version__ = "3.2.0" # Restrict the names imported when using "from iris import *" __all__ = [ From 0365407396f7606b40487c84afe4ff3cc0b9cc45 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 15 Feb 2022 15:31:22 +0000 Subject: [PATCH 59/69] docs linkcheck skip (#4590) --- docs/src/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/conf.py b/docs/src/conf.py index 19f22e808f6..db2cdc36330 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -316,6 +316,7 @@ def _dotv(version): "https://software.ac.uk/how-cite-software", "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", "http://www.nationalarchives.gov.uk/doc/open-government-licence", + "https://www.metoffice.gov.uk/", ] # list of sources to exclude from the build. From f66353f611a1bfc7ee708da32ea330a100bc577f Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Wed, 16 Feb 2022 11:36:58 +0000 Subject: [PATCH 60/69] Add missing commit to v3.2.x and update version number (#4593) * fix trove classifier (#4324) * update version to 3.2. post release Co-authored-by: Bill Little --- lib/iris/__init__.py | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 009a83aed50..a28a7cd479f 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -108,7 +108,7 @@ def callback(cube, field, filename): # Iris revision. -__version__ = "3.2.0" +__version__ = "3.2.0.post0" # Restrict the names imported when using "from iris import *" __all__ = [ diff --git a/setup.cfg b/setup.cfg index c2d31a5ddba..1aabe33d835 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,7 @@ author = SciTools Developers author_email = scitools-iris-dev@googlegroups.com classifiers = - Development Status :: 5 Production/Stable + Development Status :: 5 - Production/Stable Intended Audience :: Science/Research License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+) Operating System :: MacOS From 2c29705d9e6286f75c802afa4a23f1c610189ca8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Feb 2022 13:50:30 +0000 Subject: [PATCH 61/69] Bump actions/script from 5.1.0 to 6 (#4586) Bumps [actions/script](https://github.com/actions/script) from 5.1.0 to 6. - [Release notes](https://github.com/actions/script/releases) - [Commits](https://github.com/actions/script/compare/v5.1.0...v6) --- updated-dependencies: - dependency-name: actions/script dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/refresh-lockfiles.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index b40c3ca446a..28e01e4511c 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -35,7 +35,7 @@ jobs: # the lockfile bot has made the head commit, abort the workflow. # This job can be manually overridden by running directly from the github actions panel # (known as a "workflow_dispatch") and setting the `clobber` input to "yes". - - uses: actions/script@v5.1.0 + - uses: actions/script@v6 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | From 36935a4775a4586cba65cddc43bc9c7fec69bfc4 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 16 Feb 2022 16:38:06 +0000 Subject: [PATCH 62/69] Yaml fixes + clarifications. (#4594) * Yaml fixes + clarifications. * Update .github/workflows/stale.yml Co-authored-by: Ruth Comer <10599679+rcomer@users.noreply.github.com> Co-authored-by: Bill Little Co-authored-by: Ruth Comer <10599679+rcomer@users.noreply.github.com> --- .github/workflows/refresh-lockfiles.yml | 4 +++- .github/workflows/stale.yml | 10 +++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 28e01e4511c..ff2f6c4d758 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -22,7 +22,9 @@ on: default: "no" schedule: # Run once a week on a Saturday night - - cron: 1 0 * * 6 + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "1 0 * * 6" jobs: diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index f9bb09ce46a..a1bb0fca6cc 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,9 +1,13 @@ # See https://github.com/actions/stale name: Stale issues and pull-requests + on: schedule: - - cron: 0 0 * * * + # Run once a day + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "0 0 * * *" jobs: stale: @@ -59,11 +63,11 @@ jobs: stale-pr-label: Stale # Labels on issues exempted from stale. - exempt-issue-labels: | + exempt-issue-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Labels on prs exempted from stale. - exempt-pr-labels: | + exempt-pr-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Max number of operations per run. From 70583f96849789ad4af35caface1a4474466d983 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Wed, 16 Feb 2022 23:14:27 +0000 Subject: [PATCH 63/69] Overnight benchmarks (#4583) --- .github/workflows/benchmark.yml | 59 +++++++++--- benchmarks/README.md | 80 ++++++++++++++++ noxfile.py | 165 +++++++++++++++++++++++++------- 3 files changed, 253 insertions(+), 51 deletions(-) create mode 100644 benchmarks/README.md diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index a8247a247bf..d4c01af48a0 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -1,10 +1,11 @@ -# This is a basic workflow to help you get started with Actions +# Use ASV to check for performance regressions in the last 24 hours' commits. name: benchmark-check on: - # Triggers the workflow on push or pull request events but only for the master branch - pull_request: + schedule: + # Runs every day at 23:00. + - cron: "0 23 * * *" jobs: benchmark: @@ -23,12 +24,8 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 - - - name: Fetch the PR base branch too - run: | - git fetch --depth=1 origin ${{ github.event.pull_request.base.ref }} - git branch _base FETCH_HEAD - echo PR_BASE_SHA=$(git rev-parse _base) >> $GITHUB_ENV + with: + fetch-depth: 0 - name: Install Nox run: | @@ -65,11 +62,46 @@ jobs: run: | echo "OVERRIDE_TEST_DATA_REPOSITORY=${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH}/test_data" >> $GITHUB_ENV - - name: Run CI benchmarks + - name: Run overnight benchmarks + run: | + first_commit=$(git log --after="$(date -d "1 day ago" +"%Y-%m-%d") 23:00:00" --pretty=format:"%h" | tail -n 1) + if [ "$first_commit" != "" ] + then + nox --session="benchmarks(overnight)" -- $first_commit + fi + + - name: Create issues for performance shifts + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - mkdir --parents benchmarks/.asv - set -o pipefail - nox --session="benchmarks(ci compare)" | tee benchmarks/.asv/ci_compare.txt + if [ -d benchmarks/.asv/performance-shifts ] + then + cd benchmarks/.asv/performance-shifts + for commit_file in * + do + pr_number=$(git log "$commit_file"^! --oneline | grep -o "#[0-9]*" | tail -1 | cut -c 2-) + assignee=$(gh pr view $pr_number --json author -q '.["author"]["login"]' --repo $GITHUB_REPOSITORY) + title="Performance Shift(s): \`$commit_file\`" + body=" + Benchmark comparison has identified performance shifts at commit \ + $commit_file (#$pr_number). Please review the report below and \ + take corrective/congratulatory action as appropriate \ + :slightly_smiling_face: + +

+ Performance shift report + + \`\`\` + $(cat $commit_file) + \`\`\` + +
+ + Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}) + " + gh issue create --title "$title" --body "$body" --assignee $assignee --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY + done + fi - name: Archive asv results if: ${{ always() }} @@ -78,4 +110,3 @@ jobs: name: asv-report path: | benchmarks/.asv/results - benchmarks/.asv/ci_compare.txt diff --git a/benchmarks/README.md b/benchmarks/README.md new file mode 100644 index 00000000000..baa1afe7001 --- /dev/null +++ b/benchmarks/README.md @@ -0,0 +1,80 @@ +# Iris Performance Benchmarking + +Iris uses an [Airspeed Velocity](https://github.com/airspeed-velocity/asv) +(ASV) setup to benchmark performance. This is primarily designed to check for +performance shifts between commits using statistical analysis, but can also +be easily repurposed for manual comparative and scalability analyses. + +The benchmarks are automatically run overnight +[by a GitHub Action](../.github/workflows/benchmark.yml), with any notable +shifts in performance being flagged in a new GitHub issue. + +## Running benchmarks + +`asv ...` commands must be run from this directory. You will need to have ASV +installed, as well as Nox (see +[Benchmark environments](#benchmark-environments)). + +[Iris' noxfile](../noxfile.py) includes a `benchmarks` session that provides +conveniences for setting up before benchmarking, and can also replicate the +automated overnight run locally. See the session docstring for detail. + +### Environment variables + +* ``DATA_GEN_PYTHON`` - required - path to a Python executable that can be +used to generate benchmark test objects/files; see +[Data generation](#data-generation). The Nox session sets this automatically, +but will defer to any value already set in the shell. +* ``BENCHMARK_DATA`` - optional - path to a directory for benchmark synthetic +test data, which the benchmark scripts will create if it doesn't already +exist. Defaults to ``/benchmarks/.data/`` if not set. + +## Writing benchmarks + +[See the ASV docs](https://asv.readthedocs.io/) for full detail. + +### Data generation +**Important:** be sure not to use the benchmarking environment to generate any +test objects/files, as this environment changes with each commit being +benchmarked, creating inconsistent benchmark 'conditions'. The +[generate_data](./benchmarks/generate_data/__init__.py) module offers a +solution; read more detail there. + +### ASV re-run behaviour + +Note that ASV re-runs a benchmark multiple times between its `setup()` routine. +This is a problem for benchmarking certain Iris operations such as data +realisation, since the data will no longer be lazy after the first run. +Consider writing extra steps to restore objects' original state _within_ the +benchmark itself. + +If adding steps to the benchmark will skew the result too much then re-running +can be disabled by setting an attribute on the benchmark: `number = 1`. To +maintain result accuracy this should be accompanied by increasing the number of +repeats _between_ `setup()` calls using the `repeat` attribute. +`warmup_time = 0` is also advisable since ASV performs independent re-runs to +estimate run-time, and these will still be subject to the original problem. + +### Scaling / non-Scaling Performance Differences + +When comparing performance between commits/file-type/whatever it can be helpful +to know if the differences exist in scaling or non-scaling parts of the Iris +functionality in question. This can be done using a size parameter, setting +one value to be as small as possible (e.g. a scalar `Cube`), and the other to +be significantly larger (e.g. a 1000x1000 `Cube`). Performance differences +might only be seen for the larger value, or the smaller, or both, getting you +closer to the root cause. + +## Benchmark environments + +We have disabled ASV's standard environment management, instead using an +environment built using the same Nox scripts as Iris' test environments. This +is done using ASV's plugin architecture - see +[asv_delegated_conda.py](asv_delegated_conda.py) and the extra config items in +[asv.conf.json](asv.conf.json). + +(ASV is written to control the environment(s) that benchmarks are run in - +minimising external factors and also allowing it to compare between a matrix +of dependencies (each in a separate environment). We have chosen to sacrifice +these features in favour of testing each commit with its intended dependencies, +controlled by Nox + lock-files). diff --git a/noxfile.py b/noxfile.py index 0600540c5b6..e4d91c6bab1 100755 --- a/noxfile.py +++ b/noxfile.py @@ -8,6 +8,8 @@ import hashlib import os from pathlib import Path +from tempfile import NamedTemporaryFile +from typing import Literal import nox from nox.logger import logger @@ -289,31 +291,60 @@ def linkcheck(session: nox.sessions.Session): ) -@nox.session(python=PY_VER, venv_backend="conda") +@nox.session @nox.parametrize( - ["ci_mode"], - [True, False], - ids=["ci compare", "full"], + "run_type", + ["overnight", "branch", "custom"], + ids=["overnight", "branch", "custom"], ) -def benchmarks(session: nox.sessions.Session, ci_mode: bool): +def benchmarks( + session: nox.sessions.Session, + run_type: Literal["overnight", "branch", "custom"], +): """ Perform Iris performance benchmarks (using Airspeed Velocity). + All run types require a single Nox positional argument (e.g. + ``nox --session="foo" -- my_pos_arg``) - detailed in the parameters + section - and can optionally accept a series of further arguments that will + be added to session's ASV command. + Parameters ---------- session: object A `nox.sessions.Session` object. - ci_mode: bool - Run a cut-down selection of benchmarks, comparing the current commit to - the last commit for performance regressions. - - Notes - ----- - ASV is set up to use ``nox --session=tests --install-only`` to prepare - the benchmarking environment. This session environment must use a Python - version that is also available for ``--session=tests``. + run_type: {"overnight", "branch", "custom"} + * ``overnight``: benchmarks all commits between the input **first + commit** to ``HEAD``, comparing each to its parent for performance + shifts. If a commit causes shifts, the output is saved to a file: + ``.asv/performance-shifts/``. Designed for checking the + previous 24 hours' commits, typically in a scheduled script. + * ``branch``: Performs the same operations as ``overnight``, but always + on two commits only - ``HEAD``, and ``HEAD``'s merge-base with the + input **base branch**. Output from this run is never saved to a file. + Designed for testing if the active branch's changes cause performance + shifts - anticipating what would be caught by ``overnight`` once + merged. + **For maximum accuracy, avoid using the machine that is running this + session. Run time could be >1 hour for the full benchmark suite.** + * ``custom``: run ASV with the input **ASV sub-command**, without any + preset arguments - must all be supplied by the user. So just like + running ASV manually, with the convenience of re-using the session's + scripted setup steps. + + Examples + -------- + * ``nox --session="benchmarks(overnight)" -- a1b23d4`` + * ``nox --session="benchmarks(branch)" -- upstream/main`` + * ``nox --session="benchmarks(branch)" -- upstream/mesh-data-model`` + * ``nox --session="benchmarks(branch)" -- upstream/main --bench=regridding`` + * ``nox --session="benchmarks(custom)" -- continuous a1b23d4 HEAD --quick`` """ + # The threshold beyond which shifts are 'notable'. See `asv compare`` docs + # for more. + COMPARE_FACTOR = 1.2 + session.install("asv", "nox") data_gen_var = "DATA_GEN_PYTHON" @@ -327,12 +358,12 @@ def benchmarks(session: nox.sessions.Session, ci_mode: bool): "nox", "--session=tests", "--install-only", - f"--python={session.python}", + f"--python={_PY_VERSION_LATEST}", ) # Find the environment built above, set it to be the data generation # environment. data_gen_python = next( - Path(".nox").rglob(f"tests*/bin/python{session.python}") + Path(".nox").rglob(f"tests*/bin/python{_PY_VERSION_LATEST}") ).resolve() session.env[data_gen_var] = data_gen_python @@ -360,25 +391,85 @@ def benchmarks(session: nox.sessions.Session, ci_mode: bool): # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") - def asv_exec(*sub_args: str) -> None: - run_args = ["asv", *sub_args] - session.run(*run_args) - - if ci_mode: - # If on a PR: compare to the base (target) branch. - # Else: compare to previous commit. - previous_commit = os.environ.get("PR_BASE_SHA", "HEAD^1") - try: - asv_exec( - "continuous", - "--factor=1.2", - previous_commit, - "HEAD", - "--attribute", - "rounds=4", - ) - finally: - asv_exec("compare", previous_commit, "HEAD") + # All run types require one Nox posarg. + run_type_arg = { + "overnight": "first commit", + "branch": "base branch", + "custom": "ASV sub-command", + } + if run_type not in run_type_arg.keys(): + message = f"Unsupported run-type: {run_type}" + raise NotImplementedError(message) + if not session.posargs: + message = ( + f"Missing mandatory first Nox session posarg: " + f"{run_type_arg[run_type]}" + ) + raise ValueError(message) + first_arg = session.posargs[0] + # Optional extra arguments to be passed down to ASV. + asv_args = session.posargs[1:] + + def asv_compare(*commits): + """Run through a list of commits comparing each one to the next.""" + commits = [commit[:8] for commit in commits] + shifts_dir = Path(".asv") / "performance-shifts" + for i in range(len(commits) - 1): + before = commits[i] + after = commits[i + 1] + asv_command_ = f"asv compare {before} {after} --factor={COMPARE_FACTOR} --split" + session.run(*asv_command_.split(" ")) + + if run_type == "overnight": + # Record performance shifts. + # Run the command again but limited to only showing performance + # shifts. + shifts = session.run( + *asv_command_.split(" "), "--only-changed", silent=True + ) + if shifts: + # Write the shifts report to a file. + # Dir is used by .github/workflows/benchmarks.yml, + # but not cached - intended to be discarded after run. + shifts_dir.mkdir(exist_ok=True, parents=True) + shifts_path = shifts_dir / after + with shifts_path.open("w") as shifts_file: + shifts_file.write(shifts) + + # Common ASV arguments used for both `overnight` and `bench` run_types. + asv_harness = "asv run {posargs} --attribute rounds=4 --interleave-rounds --strict --show-stderr" + + if run_type == "overnight": + first_commit = first_arg + commit_range = f"{first_commit}^^.." + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + # git rev-list --first-parent is the command ASV uses. + git_command = f"git rev-list --first-parent {commit_range}" + commit_string = session.run( + *git_command.split(" "), silent=True, external=True + ) + commit_list = commit_string.rstrip().split("\n") + asv_compare(*reversed(commit_list)) + + elif run_type == "branch": + base_branch = first_arg + git_command = f"git merge-base HEAD {base_branch}" + merge_base = session.run( + *git_command.split(" "), silent=True, external=True + )[:8] + + with NamedTemporaryFile("w") as hashfile: + hashfile.writelines([merge_base, "\n", "HEAD"]) + hashfile.flush() + commit_range = f"HASHFILE:{hashfile.name}" + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + asv_compare(merge_base, "HEAD") + else: - # f5ceb808 = first commit supporting nox --install-only . - asv_exec("run", "f5ceb808..HEAD") + asv_subcommand = first_arg + assert run_type == "custom" + session.run("asv", asv_subcommand, *asv_args) From 8f3e3b90dfb68945c1e32e8c3c4d3c6d11c6a7a0 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 17 Feb 2022 07:29:35 +0000 Subject: [PATCH 64/69] Utility class in netcdf loader should not be public. (#4592) * Utility class in netcdf loader should not be public. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Rename container for better clarity. Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../fileformats/_nc_load_rules/actions.py | 4 +-- lib/iris/fileformats/netcdf.py | 26 ++++++++++++------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d286abbf3d6..4c5184deb13 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -18,7 +18,7 @@ 3) Iris-specific info is (still) stored in additional properties created on the engine object : - engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename + engine.cf_var, .cube, .cube_parts, .requires, .rules_triggered, .filename Our "rules" are just action routines. The top-level 'run_actions' routine decides which actions to call, based on the @@ -78,7 +78,7 @@ def inner(engine, *args, **kwargs): # but also may vary depending on whether it successfully # triggered, and if so what it matched. rule_name = _default_rulenamesfunc(func.__name__) - engine.rule_triggered.add(rule_name) + engine.rules_triggered.add(rule_name) func._rulenames_func = _default_rulenamesfunc return inner diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 8eb2b7d8302..4526963972a 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -498,7 +498,7 @@ def _actions_activation_stats(engine, cf_name): print("Rules Triggered:") - for rule in sorted(list(engine.rule_triggered)): + for rule in sorted(list(engine.rules_triggered)): print("\t%s" % rule) print("Case Specific Facts:") @@ -570,13 +570,21 @@ def _get_cf_var_data(cf_var, filename): return as_lazy_data(proxy, chunks=chunks) -class OrderedAddableList(list): - # Used purely in actions debugging, to accumulate a record of which actions - # were activated. - # It replaces a set, so as to record the ordering of operations, with - # possible repeats, and it also numbers the entries. - # Actions routines invoke the 'add' method, which thus effectively converts - # a set.add into a list.append. +class _OrderedAddableList(list): + """ + A custom container object for actions recording. + + Used purely in actions debugging, to accumulate a record of which actions + were activated. + + It replaces a set, so as to preserve the ordering of operations, with + possible repeats, and it also numbers the entries. + + The actions routines invoke an 'add' method, so this effectively replaces + a set.add with a list.append. + + """ + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._n_add = 0 @@ -602,7 +610,7 @@ def _load_cube(engine, cf, cf_var, filename): engine.cube = cube engine.cube_parts = {} engine.requires = {} - engine.rule_triggered = OrderedAddableList() + engine.rules_triggered = _OrderedAddableList() engine.filename = filename # Assert all the case-specific facts. From ebc2039c8d1990d5230e2b105b15526ca08383fa Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Thu, 24 Feb 2022 11:58:11 +0000 Subject: [PATCH 65/69] Stop using nc_time_axis.CalendarDateTime (#4584) * stop using CalendarDateTime * update dependencies * stronger tests for _fixup_dates * add whatsnew --- docs/src/whatsnew/dev.rst | 4 +++- lib/iris/plot.py | 8 +++----- lib/iris/tests/integration/plot/test_netcdftime.py | 9 ++------- lib/iris/tests/unit/plot/test__fixup_dates.py | 10 ++++------ requirements/ci/py38.yml | 2 +- setup.cfg | 2 +- 6 files changed, 14 insertions(+), 21 deletions(-) diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/dev.rst index 27ed876a204..857264f43fb 100644 --- a/docs/src/whatsnew/dev.rst +++ b/docs/src/whatsnew/dev.rst @@ -61,7 +61,9 @@ This document explains the changes made to Iris for this release 🔗 Dependencies =============== -#. N/A +#. `@rcomer`_ introduced the ``nc-time-axis >=1.4`` minimum pin, reflecting that + we no longer use the deprecated :class:`nc_time_axis.CalendarDateTime` + when plotting against time coordinates. (:pull:`4584`) 📚 Documentation diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 0e9645c7835..3cd54ef08fe 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -591,7 +591,7 @@ def _fixup_dates(coord, values): r = [datetime.datetime(*date) for date in dates] else: try: - import nc_time_axis + import nc_time_axis # noqa: F401 except ImportError: msg = ( "Cannot plot against time in a non-gregorian " @@ -603,12 +603,10 @@ def _fixup_dates(coord, values): raise IrisError(msg) r = [ - nc_time_axis.CalendarDateTime( - cftime.datetime(*date, calendar=coord.units.calendar), - coord.units.calendar, - ) + cftime.datetime(*date, calendar=coord.units.calendar) for date in dates ] + values = np.empty(len(r), dtype=object) values[:] = r return values diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index 340f37dda71..9f0baeda35f 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -18,10 +18,6 @@ from iris.coords import AuxCoord -if tests.NC_TIME_AXIS_AVAILABLE: - from nc_time_axis import CalendarDateTime - - # Run tests in no graphics mode if matplotlib is not available. if tests.MPL_AVAILABLE: import iris.plot as iplt @@ -48,9 +44,8 @@ def test_360_day_calendar(self): ) for atime in times ] - expected_ydata = np.array( - [CalendarDateTime(time, calendar) for time in times] - ) + + expected_ydata = times (line1,) = iplt.plot(time_coord) result_ydata = line1.get_ydata() self.assertArrayEqual(expected_ydata, result_ydata) diff --git a/lib/iris/tests/unit/plot/test__fixup_dates.py b/lib/iris/tests/unit/plot/test__fixup_dates.py index 157780dcae7..1ad5c876919 100644 --- a/lib/iris/tests/unit/plot/test__fixup_dates.py +++ b/lib/iris/tests/unit/plot/test__fixup_dates.py @@ -23,6 +23,7 @@ def test_gregorian_calendar(self): unit = Unit("hours since 2000-04-13 00:00:00", calendar="gregorian") coord = AuxCoord([1, 3, 6], "time", units=unit) result = _fixup_dates(coord, coord.points) + self.assertIsInstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 1), datetime.datetime(2000, 4, 13, 3), @@ -34,6 +35,7 @@ def test_gregorian_calendar_sub_second(self): unit = Unit("seconds since 2000-04-13 00:00:00", calendar="gregorian") coord = AuxCoord([1, 1.25, 1.5], "time", units=unit) result = _fixup_dates(coord, coord.points) + self.assertIsInstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 0, 0, 1), datetime.datetime(2000, 4, 13, 0, 0, 1), @@ -52,9 +54,7 @@ def test_360_day_calendar(self): cftime.datetime(2000, 2, 29, calendar=calendar), cftime.datetime(2000, 2, 30, calendar=calendar), ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) + self.assertArrayEqual(result, expected_datetimes) @tests.skip_nc_time_axis def test_365_day_calendar(self): @@ -67,9 +67,7 @@ def test_365_day_calendar(self): cftime.datetime(2000, 2, 25, 1, 0, calendar=calendar), cftime.datetime(2000, 2, 25, 2, 30, calendar=calendar), ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) + self.assertArrayEqual(result, expected_datetimes) @tests.skip_nc_time_axis def test_360_day_calendar_attribute(self): diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml index d3d7f9d0c2c..ef095815c9b 100644 --- a/requirements/ci/py38.yml +++ b/requirements/ci/py38.yml @@ -25,7 +25,7 @@ dependencies: - graphviz - iris-sample-data >=2.4.0 - mo_pack - - nc-time-axis >=1.3 + - nc-time-axis >=1.4 - pandas - pip - python-stratify diff --git a/setup.cfg b/setup.cfg index 1aabe33d835..ecdcad85b2c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -81,7 +81,7 @@ test = requests all = mo_pack - nc-time-axis>=1.3 + nc-time-axis>=1.4 pandas stratify %(docs)s From 2314e6b565c9fe147113fc50a0c44a1492bf95fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Feb 2022 10:55:00 +0000 Subject: [PATCH 66/69] Bump peter-evans/create-pull-request from 3.12.1 to 3.13.0 (#4607) Bumps [peter-evans/create-pull-request](https://github.com/peter-evans/create-pull-request) from 3.12.1 to 3.13.0. - [Release notes](https://github.com/peter-evans/create-pull-request/releases) - [Commits](https://github.com/peter-evans/create-pull-request/compare/f22a7da129c901513876a2380e2dae9f8e145330...89265e8d24a5dea438a2577fdc409a11e9f855ca) --- updated-dependencies: - dependency-name: peter-evans/create-pull-request dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/refresh-lockfiles.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index ff2f6c4d758..5a06c971893 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -111,7 +111,7 @@ jobs: - name: Create Pull Request id: cpr - uses: peter-evans/create-pull-request@f22a7da129c901513876a2380e2dae9f8e145330 + uses: peter-evans/create-pull-request@89265e8d24a5dea438a2577fdc409a11e9f855ca with: commit-message: Updated environment lockfiles committer: "Lockfile bot " From c2e75572facc50833eb69e0f6559aaeea1fad5c1 Mon Sep 17 00:00:00 2001 From: Will Benfold <69585101+wjbenfold@users.noreply.github.com> Date: Tue, 1 Mar 2022 09:41:33 +0000 Subject: [PATCH 67/69] Support false-easting and false-northing when loading Mercator-projected data (#4524) * Add extra pieces to get false easting and northing * tests * Add extra test file ref and cml * lib/iris/tests/results/netcdf/netcdf_merc_false.cml * Update cml for new test data * Bump test data version for cirrus * What's new --- .cirrus.yml | 2 +- docs/src/whatsnew/dev.rst | 3 +- lib/iris/coord_systems.py | 20 +++++- .../fileformats/_nc_load_rules/helpers.py | 38 +++------- lib/iris/fileformats/netcdf.py | 6 +- .../tests/results/coord_systems/Mercator.xml | 2 +- lib/iris/tests/results/netcdf/netcdf_merc.cml | 8 +-- .../results/netcdf/netcdf_merc_false.cml | 33 +++++++++ lib/iris/tests/test_netcdf.py | 10 +++ .../tests/unit/coord_systems/test_Mercator.py | 29 +++++++- .../test_has_supported_mercator_parameters.py | 71 +++++-------------- 11 files changed, 127 insertions(+), 95 deletions(-) create mode 100644 lib/iris/tests/results/netcdf/netcdf_merc_false.cml diff --git a/.cirrus.yml b/.cirrus.yml index 92b8d788e6b..c9c1d718596 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -38,7 +38,7 @@ env: # Conda packages to be installed. CONDA_CACHE_PACKAGES: "nox pip" # Git commit hash for iris test data. - IRIS_TEST_DATA_VERSION: "2.5" + IRIS_TEST_DATA_VERSION: "2.7" # Base directory for the iris-test-data. IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/dev.rst index 857264f43fb..5952dc45b01 100644 --- a/docs/src/whatsnew/dev.rst +++ b/docs/src/whatsnew/dev.rst @@ -31,7 +31,8 @@ This document explains the changes made to Iris for this release ✨ Features =========== -#. N/A +#. `@wjbenfold`_ added support for ``false_easting`` and ``false_northing`` to + :class:`~iris.coord_system.Mercator`. (:issue:`3107`, :pull:`4524`) 🐛 Bugs Fixed diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 2f875bb1591..311ed35f44e 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -1083,6 +1083,8 @@ def __init__( longitude_of_projection_origin=None, ellipsoid=None, standard_parallel=None, + false_easting=None, + false_northing=None, ): """ Constructs a Mercator coord system. @@ -1098,6 +1100,12 @@ def __init__( * standard_parallel: The latitude where the scale is 1. Defaults to 0.0 . + * false_easting: + X offset from the planar origin in metres. Defaults to 0.0. + + * false_northing: + Y offset from the planar origin in metres. Defaults to 0.0. + """ #: True longitude of planar origin in degrees. self.longitude_of_projection_origin = _arg_default( @@ -1110,12 +1118,20 @@ def __init__( #: The latitude where the scale is 1. self.standard_parallel = _arg_default(standard_parallel, 0) + #: X offset from the planar origin in metres. + self.false_easting = _arg_default(false_easting, 0) + + #: Y offset from the planar origin in metres. + self.false_northing = _arg_default(false_northing, 0) + def __repr__(self): res = ( "Mercator(longitude_of_projection_origin=" "{self.longitude_of_projection_origin!r}, " "ellipsoid={self.ellipsoid!r}, " - "standard_parallel={self.standard_parallel!r})" + "standard_parallel={self.standard_parallel!r}, " + "false_easting={self.false_easting!r}, " + "false_northing={self.false_northing!r})" ) return res.format(self=self) @@ -1126,6 +1142,8 @@ def as_cartopy_crs(self): central_longitude=self.longitude_of_projection_origin, globe=globe, latitude_true_scale=self.standard_parallel, + false_easting=self.false_easting, + false_northing=self.false_northing, ) def as_cartopy_projection(self): diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index a5b507d5830..198daeceeaa 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -440,10 +440,13 @@ def build_mercator_coordinate_system(engine, cf_grid_var): longitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None ) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) # Iris currently only supports Mercator projections with specific - # values for false_easting, false_northing, - # scale_factor_at_projection_origin and standard_parallel. These are - # checked elsewhere. + # scale_factor_at_projection_origin. This is checked elsewhere. ellipsoid = None if ( @@ -454,7 +457,11 @@ def build_mercator_coordinate_system(engine, cf_grid_var): ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) cs = iris.coord_systems.Mercator( - longitude_of_projection_origin, ellipsoid=ellipsoid + longitude_of_projection_origin, + ellipsoid=ellipsoid, + standard_parallel=standard_parallel, + false_easting=false_easting, + false_northing=false_northing, ) return cs @@ -1244,27 +1251,10 @@ def has_supported_mercator_parameters(engine, cf_name): is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] - false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) scale_factor_at_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None - ) - if false_easting is not None and false_easting != 0: - warnings.warn( - "False eastings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False - if false_northing is not None and false_northing != 0: - warnings.warn( - "False northings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False if ( scale_factor_at_projection_origin is not None and scale_factor_at_projection_origin != 1 @@ -1274,12 +1264,6 @@ def has_supported_mercator_parameters(engine, cf_name): "Mercator projections" ) is_valid = False - if standard_parallel is not None and standard_parallel != 0: - warnings.warn( - "Standard parallels other than 0.0 not yet " - "supported for Mercator projections" - ) - is_valid = False return is_valid diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 4526963972a..80f213dbc2f 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -2561,10 +2561,8 @@ def add_ellipsoid(ellipsoid): cf_var_grid.longitude_of_projection_origin = ( cs.longitude_of_projection_origin ) - # The Mercator class has implicit defaults for certain - # parameters - cf_var_grid.false_easting = 0.0 - cf_var_grid.false_northing = 0.0 + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing cf_var_grid.scale_factor_at_projection_origin = 1.0 # lcc diff --git a/lib/iris/tests/results/coord_systems/Mercator.xml b/lib/iris/tests/results/coord_systems/Mercator.xml index e8036ef8244..db3ccffec78 100644 --- a/lib/iris/tests/results/coord_systems/Mercator.xml +++ b/lib/iris/tests/results/coord_systems/Mercator.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc.cml b/lib/iris/tests/results/netcdf/netcdf_merc.cml index 02fc4e7c345..5e17400158a 100644 --- a/lib/iris/tests/results/netcdf/netcdf_merc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_merc.cml @@ -53,15 +53,15 @@ 45.5158, 45.9993]]" shape="(192, 192)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="lon"/> - - + - - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_false.cml b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml new file mode 100644 index 00000000000..d916f5f7539 --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 2c22c6d0886..8cdbe272574 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -218,6 +218,16 @@ def test_load_merc_grid(self): ) self.assertCML(cube, ("netcdf", "netcdf_merc.cml")) + def test_load_merc_false_en_grid(self): + # Test loading a single CF-netCDF file with a Mercator grid_mapping that + # includes false easting and northing + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "mercator", "false_east_north_merc.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_merc_false.cml")) + def test_load_stereographic_grid(self): # Test loading a single CF-netCDF file with a stereographic # grid_mapping. diff --git a/lib/iris/tests/unit/coord_systems/test_Mercator.py b/lib/iris/tests/unit/coord_systems/test_Mercator.py index 33efaef9dac..8a37a8fcc55 100644 --- a/lib/iris/tests/unit/coord_systems/test_Mercator.py +++ b/lib/iris/tests/unit/coord_systems/test_Mercator.py @@ -29,7 +29,8 @@ def test_repr(self): "Mercator(longitude_of_projection_origin=90.0, " "ellipsoid=GeogCS(semi_major_axis=6377563.396, " "semi_minor_axis=6356256.909), " - "standard_parallel=0.0)" + "standard_parallel=0.0, " + "false_easting=0.0, false_northing=0.0)" ) self.assertEqual(expected, repr(self.tm)) @@ -38,16 +39,23 @@ class Test_init_defaults(tests.IrisTest): def test_set_optional_args(self): # Check that setting the optional (non-ellipse) args works. crs = Mercator( - longitude_of_projection_origin=27, standard_parallel=157.4 + longitude_of_projection_origin=27, + standard_parallel=157.4, + false_easting=13, + false_northing=12, ) self.assertEqualAndKind(crs.longitude_of_projection_origin, 27.0) self.assertEqualAndKind(crs.standard_parallel, 157.4) + self.assertEqualAndKind(crs.false_easting, 13.0) + self.assertEqualAndKind(crs.false_northing, 12.0) def _check_crs_defaults(self, crs): # Check for property defaults when no kwargs options were set. # NOTE: except ellipsoid, which is done elsewhere. self.assertEqualAndKind(crs.longitude_of_projection_origin, 0.0) self.assertEqualAndKind(crs.standard_parallel, 0.0) + self.assertEqualAndKind(crs.false_easting, 0.0) + self.assertEqualAndKind(crs.false_northing, 0.0) def test_no_optional_args(self): # Check expected defaults with no optional args. @@ -57,7 +65,10 @@ def test_no_optional_args(self): def test_optional_args_None(self): # Check expected defaults with optional args=None. crs = Mercator( - longitude_of_projection_origin=None, standard_parallel=None + longitude_of_projection_origin=None, + standard_parallel=None, + false_easting=None, + false_northing=None, ) self._check_crs_defaults(crs) @@ -77,6 +88,8 @@ def test_extra_kwargs(self): # converted to a cartopy CRS. longitude_of_projection_origin = 90.0 true_scale_lat = 14.0 + false_easting = 13 + false_northing = 12 ellipsoid = GeogCS( semi_major_axis=6377563.396, semi_minor_axis=6356256.909 ) @@ -85,6 +98,8 @@ def test_extra_kwargs(self): longitude_of_projection_origin, ellipsoid=ellipsoid, standard_parallel=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) expected = ccrs.Mercator( @@ -95,6 +110,8 @@ def test_extra_kwargs(self): ellipse=None, ), latitude_true_scale=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) res = merc_cs.as_cartopy_crs() @@ -113,6 +130,8 @@ def test_simple(self): def test_extra_kwargs(self): longitude_of_projection_origin = 90.0 true_scale_lat = 14.0 + false_easting = 13 + false_northing = 12 ellipsoid = GeogCS( semi_major_axis=6377563.396, semi_minor_axis=6356256.909 ) @@ -121,6 +140,8 @@ def test_extra_kwargs(self): longitude_of_projection_origin, ellipsoid=ellipsoid, standard_parallel=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) expected = ccrs.Mercator( @@ -131,6 +152,8 @@ def test_extra_kwargs(self): ellipse=None, ), latitude_true_scale=true_scale_lat, + false_easting=false_easting, + false_northing=false_northing, ) res = merc_cs.as_cartopy_projection() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index dfe2895f298..1b9857c0be0 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -28,7 +28,7 @@ def _engine(cf_grid_var, cf_name): class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid(self): + def test_valid_base(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], @@ -45,85 +45,50 @@ def test_valid(self): self.assertTrue(is_valid) - def test_invalid_scale_factor(self): - # Iris does not yet support scale factors other than one for - # Mercator projections + def test_valid_false_easting_northing(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], - longitude_of_projection_origin=0, - false_easting=0, - false_northing=0, - scale_factor_at_projection_origin=0.9, + longitude_of_projection_origin=-90, + false_easting=15, + false_northing=10, + scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) engine = _engine(cf_grid_var, cf_name) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) + is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Scale factor") + self.assertTrue(is_valid) - def test_invalid_standard_parallel(self): - # Iris does not yet support standard parallels other than zero for - # Mercator projections + def test_valid_standard_parallel(self): cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], - longitude_of_projection_origin=0, + longitude_of_projection_origin=-90, false_easting=0, false_northing=0, - standard_parallel=30, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Standard parallel") - - def test_invalid_false_easting(self): - # Iris does not yet support false eastings other than zero for - # Mercator projections - cf_name = "mercator" - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=0, - false_easting=100, - false_northing=0, - scale_factor_at_projection_origin=1, + standard_parallel=15, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) engine = _engine(cf_grid_var, cf_name) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) + is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False easting") + self.assertTrue(is_valid) - def test_invalid_false_northing(self): - # Iris does not yet support false northings other than zero for + def test_invalid_scale_factor(self): + # Iris does not yet support scale factors other than one for # Mercator projections cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, false_easting=0, - false_northing=100, - scale_factor_at_projection_origin=1, + false_northing=0, + scale_factor_at_projection_origin=0.9, semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) @@ -135,7 +100,7 @@ def test_invalid_false_northing(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False northing") + self.assertRegex(str(warns[0]), "Scale factor") if __name__ == "__main__": From de88403ee5aece18c2fe89f5068dc9ea30fb9099 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Mar 2022 09:47:18 +0000 Subject: [PATCH 68/69] Bump peter-evans/create-pull-request from 3.13.0 to 3.14.0 (#4608) Bumps [peter-evans/create-pull-request](https://github.com/peter-evans/create-pull-request) from 3.13.0 to 3.14.0. - [Release notes](https://github.com/peter-evans/create-pull-request/releases) - [Commits](https://github.com/peter-evans/create-pull-request/compare/89265e8d24a5dea438a2577fdc409a11e9f855ca...18f7dc018cc2cd597073088f7c7591b9d1c02672) --- updated-dependencies: - dependency-name: peter-evans/create-pull-request dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/refresh-lockfiles.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 5a06c971893..96572fb815a 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -111,7 +111,7 @@ jobs: - name: Create Pull Request id: cpr - uses: peter-evans/create-pull-request@89265e8d24a5dea438a2577fdc409a11e9f855ca + uses: peter-evans/create-pull-request@18f7dc018cc2cd597073088f7c7591b9d1c02672 with: commit-message: Updated environment lockfiles committer: "Lockfile bot " From 0adcbfa12931455eacf4a537edfd7f9ed39d27e1 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Tue, 1 Mar 2022 17:22:35 +0000 Subject: [PATCH 69/69] Revert plotting-vs-y (#4601) * revert plotting-vs-y * whatsnew --- docs/src/whatsnew/dev.rst | 4 +++- lib/iris/plot.py | 2 +- lib/iris/tests/results/imagerepo.json | 10 ++++++++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/src/whatsnew/dev.rst b/docs/src/whatsnew/dev.rst index 5952dc45b01..b9d5989bfc5 100644 --- a/docs/src/whatsnew/dev.rst +++ b/docs/src/whatsnew/dev.rst @@ -38,7 +38,9 @@ This document explains the changes made to Iris for this release 🐛 Bugs Fixed ============= -#. N/A +#. `@rcomer`_ reverted part of the change from :pull:`3906` so that + :func:`iris.plot.plot` no longer defaults to placing a "Y" coordinate (e.g. + latitude) on the y-axis of the plot. (:issue:`4493`, :pull:`4601`) 💣 Incompatible Changes diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 3cd54ef08fe..aefca889cf5 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -673,7 +673,7 @@ def _get_plot_objects(args): if ( isinstance(v_object, iris.cube.Cube) and isinstance(u_object, iris.coords.Coord) - and iris.util.guess_coord_axis(u_object) in ["Y", "Z"] + and iris.util.guess_coord_axis(u_object) == "Z" ): u_object, v_object = v_object, u_object u, v = v, u diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 79560a53658..6a997c38b43 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -684,7 +684,10 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe956b7c01c2f26300929dfc1e3c6690736f91817e3b0c84be6be5d1603ed1.png" ], "iris.tests.test_plot.TestPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff99c067e01e7166101c9c6b04396b5cd4e2f0993163de9c4fe7b79207e36a1.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896266f068d873b83cb71e435725cd07c607ad07e70fcd0007a7881fe7ab8.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896066f068d873b83cb71e435725cd07c607ad07c70fcd0007af881fe7bb8.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896366f0f8d93398bcb71e435f24ed074646ed07670acf010726d81f2798c.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/aff8946c7a14c99fb193d263e42432d8d00c2d27944a3f8dc5223ef703ff6b90.png" ], "iris.tests.test_plot.TestPlot.test_z.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" @@ -874,7 +877,10 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1.png" ], "iris.tests.test_plot.TestQuickplotPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3f9bc067e01c6166009c9c6b5439ee5cd4e0d2993361de9ccf65b79887636a9.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/a7ffb6067f008d87339bc973e435d86ef034c87ad07c586cd001da69897e5838.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/a7ffb6067f008d87339bc973e435d86ef034c87ad07cd86cd001da68897e58a8.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/a7efb6367f008d97338fc973e435d86ef030c86ed070d86cd030d86d89f0d82c.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/a2fbb46e7f10c99f2013d863e46498dcd06c0d2798421fa5dd221e7789ff6f10.png" ], "iris.tests.test_plot.TestQuickplotPlot.test_z.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png"