diff --git a/docs/iris/gallery_code/general/plot_SOI_filtering.py b/docs/iris/gallery_code/general/plot_SOI_filtering.py
index 116e819af7..d7948ac965 100644
--- a/docs/iris/gallery_code/general/plot_SOI_filtering.py
+++ b/docs/iris/gallery_code/general/plot_SOI_filtering.py
@@ -1,5 +1,5 @@
"""
-Applying a filter to a time-series
+Applying a Filter to a Time-Series
==================================
This example demonstrates low pass filtering a time-series by applying a
diff --git a/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py b/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py
index b0cee818de..778f92db1b 100644
--- a/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py
+++ b/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py
@@ -1,5 +1,5 @@
"""
-Colouring anomaly data with logarithmic scaling
+Colouring Anomaly Data With Logarithmic Scaling
===============================================
In this example, we need to plot anomaly data where the values have a
diff --git a/docs/iris/gallery_code/general/plot_coriolis.py b/docs/iris/gallery_code/general/plot_coriolis.py
index cc67d1267c..77066d362a 100644
--- a/docs/iris/gallery_code/general/plot_coriolis.py
+++ b/docs/iris/gallery_code/general/plot_coriolis.py
@@ -1,5 +1,5 @@
"""
-Deriving the Coriolis frequency over the globe
+Deriving the Coriolis Frequency Over the Globe
==============================================
This code computes the Coriolis frequency and stores it in a cube with
diff --git a/docs/iris/gallery_code/general/plot_cross_section.py b/docs/iris/gallery_code/general/plot_cross_section.py
index a4bc918fc7..12f4bdb0dc 100644
--- a/docs/iris/gallery_code/general/plot_cross_section.py
+++ b/docs/iris/gallery_code/general/plot_cross_section.py
@@ -1,5 +1,5 @@
"""
-Cross section plots
+Cross Section Plots
===================
This example demonstrates contour plots of a cross-sectioned multi-dimensional
diff --git a/docs/iris/gallery_code/general/plot_custom_aggregation.py b/docs/iris/gallery_code/general/plot_custom_aggregation.py
index 9c847be779..5fba3669b6 100644
--- a/docs/iris/gallery_code/general/plot_custom_aggregation.py
+++ b/docs/iris/gallery_code/general/plot_custom_aggregation.py
@@ -1,5 +1,5 @@
"""
-Calculating a custom statistic
+Calculating a Custom Statistic
==============================
This example shows how to define and use a custom
diff --git a/docs/iris/gallery_code/general/plot_custom_file_loading.py b/docs/iris/gallery_code/general/plot_custom_file_loading.py
index b96e152bf8..6890650704 100644
--- a/docs/iris/gallery_code/general/plot_custom_file_loading.py
+++ b/docs/iris/gallery_code/general/plot_custom_file_loading.py
@@ -1,5 +1,5 @@
"""
-Loading a cube from a custom file format
+Loading a Cube From a Custom File Format
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This example shows how a custom text file can be loaded using the standard Iris
diff --git a/docs/iris/gallery_code/general/plot_global_map.py b/docs/iris/gallery_code/general/plot_global_map.py
index 41fd226921..8d2bdee174 100644
--- a/docs/iris/gallery_code/general/plot_global_map.py
+++ b/docs/iris/gallery_code/general/plot_global_map.py
@@ -1,5 +1,5 @@
"""
-Quickplot of a 2d cube on a map
+Quickplot of a 2D Cube on a Map
===============================
This example demonstrates a contour plot of global air temperature. The plot
diff --git a/docs/iris/gallery_code/general/plot_lineplot_with_legend.py b/docs/iris/gallery_code/general/plot_lineplot_with_legend.py
index 5641b9c4d0..78401817ba 100644
--- a/docs/iris/gallery_code/general/plot_lineplot_with_legend.py
+++ b/docs/iris/gallery_code/general/plot_lineplot_with_legend.py
@@ -1,5 +1,5 @@
"""
-Multi-line temperature profile plot
+Multi-Line Temperature Profile Plot
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
diff --git a/docs/iris/gallery_code/general/plot_polar_stereo.py b/docs/iris/gallery_code/general/plot_polar_stereo.py
index bd4a11923d..71c0f3b00e 100644
--- a/docs/iris/gallery_code/general/plot_polar_stereo.py
+++ b/docs/iris/gallery_code/general/plot_polar_stereo.py
@@ -1,5 +1,5 @@
"""
-Example of a polar stereographic plot
+Example of a Polar Stereographic Plot
=====================================
Demonstrates plotting data that are defined on a polar stereographic
diff --git a/docs/iris/gallery_code/general/plot_polynomial_fit.py b/docs/iris/gallery_code/general/plot_polynomial_fit.py
index 237f4044b6..5da5d50571 100644
--- a/docs/iris/gallery_code/general/plot_polynomial_fit.py
+++ b/docs/iris/gallery_code/general/plot_polynomial_fit.py
@@ -1,5 +1,5 @@
"""
-Fitting a polynomial
+Fitting a Polynomial
====================
This example demonstrates computing a polynomial fit to 1D data from an Iris
diff --git a/docs/iris/gallery_code/general/plot_projections_and_annotations.py b/docs/iris/gallery_code/general/plot_projections_and_annotations.py
index e59bb236d7..f93ac3714f 100644
--- a/docs/iris/gallery_code/general/plot_projections_and_annotations.py
+++ b/docs/iris/gallery_code/general/plot_projections_and_annotations.py
@@ -1,5 +1,5 @@
"""
-Plotting in different projections
+Plotting in Different Projections
=================================
This example shows how to overlay data and graphics in different projections,
diff --git a/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py b/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py
index 063fe93674..8a0c80c707 100644
--- a/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py
+++ b/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py
@@ -1,5 +1,5 @@
"""
-Rotated pole mapping
+Rotated Pole Mapping
=====================
This example uses several visualisation methods to achieve an array of
diff --git a/docs/iris/gallery_code/meteorology/plot_COP_1d.py b/docs/iris/gallery_code/meteorology/plot_COP_1d.py
index 2f93627b77..bebbad4224 100644
--- a/docs/iris/gallery_code/meteorology/plot_COP_1d.py
+++ b/docs/iris/gallery_code/meteorology/plot_COP_1d.py
@@ -1,5 +1,5 @@
"""
-Global average annual temperature plot
+Global Average Annual Temperature Plot
======================================
Produces a time-series plot of North American temperature forecasts for 2
diff --git a/docs/iris/gallery_code/meteorology/plot_COP_maps.py b/docs/iris/gallery_code/meteorology/plot_COP_maps.py
index a8e6055a77..5555a0b85c 100644
--- a/docs/iris/gallery_code/meteorology/plot_COP_maps.py
+++ b/docs/iris/gallery_code/meteorology/plot_COP_maps.py
@@ -1,5 +1,5 @@
"""
-Global average annual temperature maps
+Global Average Annual Temperature Maps
======================================
Produces maps of global temperature forecasts from the A1B and E1 scenarios.
diff --git a/docs/iris/gallery_code/meteorology/plot_TEC.py b/docs/iris/gallery_code/meteorology/plot_TEC.py
index df2e29ef19..71a743a161 100644
--- a/docs/iris/gallery_code/meteorology/plot_TEC.py
+++ b/docs/iris/gallery_code/meteorology/plot_TEC.py
@@ -1,5 +1,5 @@
"""
-Ionosphere space weather
+Ionosphere Space Weather
========================
This space weather example plots a filled contour of rotated pole point
diff --git a/docs/iris/gallery_code/meteorology/plot_hovmoller.py b/docs/iris/gallery_code/meteorology/plot_hovmoller.py
index 9f18b8021e..e9f8207a94 100644
--- a/docs/iris/gallery_code/meteorology/plot_hovmoller.py
+++ b/docs/iris/gallery_code/meteorology/plot_hovmoller.py
@@ -1,5 +1,5 @@
"""
-Hovmoller diagram of monthly surface temperature
+Hovmoller Diagram of Monthly Surface Temperature
================================================
This example demonstrates the creation of a Hovmoller diagram with fine control
diff --git a/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py
index cdd39028c8..5cd2752f39 100644
--- a/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py
+++ b/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py
@@ -1,5 +1,5 @@
"""
-Seasonal ensemble model plots
+Seasonal Ensemble Model Plots
=============================
This example demonstrates the loading of a lagged ensemble dataset from the
diff --git a/docs/iris/gallery_code/meteorology/plot_wind_speed.py b/docs/iris/gallery_code/meteorology/plot_wind_speed.py
index 6844d3874c..79be64ddd7 100644
--- a/docs/iris/gallery_code/meteorology/plot_wind_speed.py
+++ b/docs/iris/gallery_code/meteorology/plot_wind_speed.py
@@ -1,6 +1,6 @@
"""
-Plotting wind direction using quiver
-===========================================================
+Plotting Wind Direction Using Quiver
+====================================
This example demonstrates using quiver to plot wind speed contours and wind
direction arrows from wind vector component input data. The vector components
diff --git a/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py
index 89d99c80b4..dc038ecffe 100644
--- a/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py
+++ b/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py
@@ -1,5 +1,5 @@
"""
-Oceanographic profiles and T-S diagrams
+Oceanographic Profiles and T-S Diagrams
=======================================
This example demonstrates how to plot vertical profiles of different
diff --git a/docs/iris/gallery_code/oceanography/plot_load_nemo.py b/docs/iris/gallery_code/oceanography/plot_load_nemo.py
index 5f2b72c956..c7ad5aaee4 100644
--- a/docs/iris/gallery_code/oceanography/plot_load_nemo.py
+++ b/docs/iris/gallery_code/oceanography/plot_load_nemo.py
@@ -1,5 +1,5 @@
"""
-Load a time series of data from the NEMO model
+Load a Time Series of Data From the NEMO Model
==============================================
This example demonstrates how to load multiple files containing data output by
diff --git a/docs/iris/src/common_links.inc b/docs/iris/src/common_links.inc
index 0bc8ca60e6..050752a483 100644
--- a/docs/iris/src/common_links.inc
+++ b/docs/iris/src/common_links.inc
@@ -1,28 +1,57 @@
-.. _SciTools: https://github.com/SciTools
+.. comment
+ Common resources in alphabetical order:
+
+.. _.cirrus.yml: https://github.com/SciTools/iris/blob/master/.cirrus.yml
+.. _.flake8.yml: https://github.com/SciTools/iris/blob/master/.flake8
+.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris
+.. _conda: https://docs.conda.io/en/latest/
+.. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
+.. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
+.. _generating sss keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account
+.. _GitHub Help Documentation: https://docs.github.com/en/github
.. _Iris: https://github.com/SciTools/iris
.. _Iris GitHub: https://github.com/SciTools/iris
.. _iris mailing list: https://groups.google.com/forum/#!forum/scitools-iris
+.. _iris-sample-data: https://github.com/SciTools/iris-sample-data
+.. _iris-test-data: https://github.com/SciTools/iris-test-data
.. _issue: https://github.com/SciTools/iris/issues
.. _issues: https://github.com/SciTools/iris/issues
+.. _legacy documentation: https://scitools.org.uk/iris/docs/v2.4.0/
+.. _matplotlib: https://matplotlib.org/
+.. _napolean: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html
+.. _New Issue: https://github.com/scitools/iris/issues/new/choose
.. _pull request: https://github.com/SciTools/iris/pulls
.. _pull requests: https://github.com/SciTools/iris/pulls
-.. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
-.. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
-.. _iris-test-data: https://github.com/SciTools/iris-test-data
-.. _iris-sample-data: https://github.com/SciTools/iris-sample-data
-.. _test-iris-imagehash: https://github.com/SciTools/test-iris-imagehash
.. _readthedocs.yml: https://github.com/SciTools/iris/blob/master/requirements/ci/readthedocs.yml
-.. _travis-ci: https://travis-ci.org/github/SciTools/iris
-.. _.travis.yml: https://github.com/SciTools/iris/blob/master/.travis.yml
-.. _.stickler.yml: https://github.com/SciTools/iris/blob/master/.stickler.yml
-.. _.flake8.yml: https://github.com/SciTools/iris/blob/master/.flake8
-.. _GitHub Help Documentation: https://docs.github.com/en/github
-.. _using git: https://docs.github.com/en/github/using-git
-.. _generating sss keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account
-.. _New Issue: https://github.com/scitools/iris/issues/new/choose
-.. _matplotlib: https://matplotlib.org/
-.. _conda: https://docs.conda.io/en/latest/
+.. _SciTools: https://github.com/SciTools
.. _sphinx: https://www.sphinx-doc.org/en/master/
-.. _napolean: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html
-.. _legacy documentation: https://scitools.org.uk/iris/docs/v2.4.0/
-.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris
+.. _test-iris-imagehash: https://github.com/SciTools/test-iris-imagehash
+.. _using git: https://docs.github.com/en/github/using-git
+
+
+.. comment
+ Core developers (@github names) in alphabetical order:
+
+.. _@abooton: https://github.com/abooton
+.. _@alastair-gemmell: https://github.com/alastair-gemmell
+.. _@ajdawson: https://github.com/ajdawson
+.. _@bjlittle: https://github.com/bjlittle
+.. _@bouweandela: https://github.com/bouweandela
+.. _@corinnebosley: https://github.com/corinnebosley
+.. _@cpelley: https://github.com/cpelley
+.. _@djkirkham: https://github.com/djkirkham
+.. _@DPeterK: https://github.com/DPeterK
+.. _@esc24: https://github.com/esc24
+.. _@jonseddon: https://github.com/jonseddon
+.. _@jvegasbsc: https://github.com/jvegasbsc
+.. _@lbdreyer: https://github.com/lbdreyer
+.. _@marqh: https://github.com/marqh
+.. _@pelson: https://github.com/pelson
+.. _@pp-mo: https://github.com/pp-mo
+.. _@QuLogic: https://github.com/QuLogic
+.. _@rcomer: https://github.com/rcomer
+.. _@rhattersley: https://github.com/rhattersley
+.. _@stephenworsley: https://github.com/stephenworsley
+.. _@tkknight: https://github.com/tkknight
+.. _@trexfeathers: https://github.com/trexfeathers
+.. _@zklaus: https://github.com/zklaus
diff --git a/docs/iris/src/conf.py b/docs/iris/src/conf.py
index 524e9a2ae4..de80745d91 100644
--- a/docs/iris/src/conf.py
+++ b/docs/iris/src/conf.py
@@ -82,8 +82,8 @@ def autolog(message):
if iris.__version__ == "dev":
version = "dev"
else:
- # major.feature(.minor)-dev -> major.minor
- version = ".".join(iris.__version__.split("-")[0].split(".")[:2])
+ # major.minor.patch-dev -> major.minor.patch
+ version = ".".join(iris.__version__.split("-")[0].split(".")[:3])
# The full version, including alpha/beta/rc tags.
release = iris.__version__
@@ -232,7 +232,7 @@ def autolog(message):
"menu_links_name": "Support",
"menu_links": [
(
- ' Source code',
+ ' Source Code',
"https://github.com/SciTools/iris",
),
(
@@ -244,11 +244,11 @@ def autolog(message):
"https://groups.google.com/forum/#!forum/scitools-iris-dev",
),
(
- ' StackOverflow for "How do I?"',
+ ' StackOverflow for "How Do I?"',
"https://stackoverflow.com/questions/tagged/python-iris",
),
(
- ' Legacy documentation',
+ ' Legacy Documentation',
"https://scitools.org.uk/iris/docs/v2.4.0/index.html",
),
],
@@ -271,6 +271,8 @@ def autolog(message):
"http://schacon.github.com/git",
"http://scitools.github.com/cartopy",
"http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf",
+ "https://software.ac.uk/how-cite-software",
+ "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml",
]
# list of sources to exclude from the build.
diff --git a/docs/iris/src/copyright.rst b/docs/iris/src/copyright.rst
index 08a40e5a1e..16ac07acb3 100644
--- a/docs/iris/src/copyright.rst
+++ b/docs/iris/src/copyright.rst
@@ -1,8 +1,8 @@
-Iris copyright, licensing and contributors
+Iris Copyright, Licensing and Contributors
==========================================
-Iris code
+Iris Code
---------
All Iris source code, unless explicitly stated, is ``Copyright Iris
@@ -20,7 +20,7 @@ You should find all source files with the following header:
licensing details.
-Iris documentation and examples
+Iris Documentation and Examples
-------------------------------
All documentation, examples and sample data found on this website and in source repository
diff --git a/docs/iris/src/developers_guide/ci_checks.png b/docs/iris/src/developers_guide/ci_checks.png
old mode 100644
new mode 100755
index cf93239dea..e088e03a66
Binary files a/docs/iris/src/developers_guide/ci_checks.png and b/docs/iris/src/developers_guide/ci_checks.png differ
diff --git a/docs/iris/src/developers_guide/contributing_changes.rst b/docs/iris/src/developers_guide/contributing_changes.rst
index a752986ec4..48357874a7 100644
--- a/docs/iris/src/developers_guide/contributing_changes.rst
+++ b/docs/iris/src/developers_guide/contributing_changes.rst
@@ -1,7 +1,7 @@
.. _contributing.changes:
-Contributing your changes
+Contributing Your Changes
=========================
.. toctree::
diff --git a/docs/iris/src/developers_guide/contributing_ci_tests.rst b/docs/iris/src/developers_guide/contributing_ci_tests.rst
index c7a041bcb2..a6bdac4ae0 100644
--- a/docs/iris/src/developers_guide/contributing_ci_tests.rst
+++ b/docs/iris/src/developers_guide/contributing_ci_tests.rst
@@ -10,8 +10,7 @@ automatically when a pull request is created, updated or merged against
Iris **master**. The checks performed are:
* :ref:`testing_cla`
-* :ref:`testing_travis`
-* :ref:`testing_stickler`
+* :ref:`testing_cirrus`
.. _testing_cla:
@@ -24,33 +23,23 @@ A bot that checks the user who created the pull request has signed the
please see https://scitools.org.uk/organisation.html#governance
-.. _testing_stickler:
+.. _testing_cirrus:
-Stickler CI
-***********
-
-Automatically enforces coding standards. The configuration file named
-`.stickler.yml`_ is in the Iris_ root directory. For more information see
-https://stickler-ci.com/.
-
-
-.. _testing_travis:
-
-Travis-CI
+Cirrus-CI
*********
The unit and integration tests in Iris are an essential mechanism to ensure
that the Iris code base is working as expected. :ref:`developer_running_tests`
may be run manually but to ensure the checks are performed a
-continuous integration testing tool named `travis-ci`_ is used.
+continuous integration testing tool named `cirrus-ci`_ is used.
-A `travis-ci`_ configuration file named `.travis.yml`_
-is in the Iris repository which tells travis-ci what commands to run. The
+A `cirrus-ci`_ configuration file named `.cirrus.yml`_
+is in the Iris repository which tells Cirrus-CI what commands to run. The
commands include retrieving the Iris code base and associated test files using
-conda and then running the tests. `travis-ci`_ allows for a matrix of tests to
+conda and then running the tests. `cirrus-ci`_ allows for a matrix of tests to
be performed to ensure that all expected variations test successfully.
-The `travis-ci`_ tests are run automatically against the `Iris`_ master
+The `cirrus-ci`_ tests are run automatically against the `Iris`_ master
repository when a pull request is submitted, updated or merged.
GitHub Checklist
diff --git a/docs/iris/src/developers_guide/contributing_code_formatting.rst b/docs/iris/src/developers_guide/contributing_code_formatting.rst
index b3f23f655a..6bf8dca717 100644
--- a/docs/iris/src/developers_guide/contributing_code_formatting.rst
+++ b/docs/iris/src/developers_guide/contributing_code_formatting.rst
@@ -2,7 +2,7 @@
.. _code_formatting:
-Code formatting
+Code Formatting
===============
To ensure a consistent code format throughout Iris, we recommend using
diff --git a/docs/iris/src/developers_guide/contributing_codebase_index.rst b/docs/iris/src/developers_guide/contributing_codebase_index.rst
index 8d7eed8c84..88986c0c7a 100644
--- a/docs/iris/src/developers_guide/contributing_codebase_index.rst
+++ b/docs/iris/src/developers_guide/contributing_codebase_index.rst
@@ -1,6 +1,6 @@
.. _contributing.documentation.codebase:
-Contributing to the code base
+Contributing to the Code Base
=============================
.. toctree::
diff --git a/docs/iris/src/developers_guide/contributing_deprecations.rst b/docs/iris/src/developers_guide/contributing_deprecations.rst
index c7a6888984..1ecafdca9f 100644
--- a/docs/iris/src/developers_guide/contributing_deprecations.rst
+++ b/docs/iris/src/developers_guide/contributing_deprecations.rst
@@ -10,12 +10,12 @@ one release, before removing/updating it in the next
`major release `_.
-Adding a deprecation
+Adding a Deprecation
====================
.. _removing-a-public-api:
-Removing a public API
+Removing a Public API
---------------------
The simplest form of deprecation occurs when you need to remove a public
@@ -49,7 +49,7 @@ Under these circumstances the following points apply:
- You should check the documentation for references to the deprecated
API and update them as appropriate.
-Changing a default
+Changing a Default
------------------
When you need to change the default behaviour of a public API the
@@ -74,7 +74,7 @@ API:
deprecation warning and corresponding Sphinx deprecation directive.
-Removing a deprecation
+Removing a Deprecation
======================
When the time comes to make a new major release you should locate any
@@ -83,7 +83,7 @@ minimum period described previously. Locating deprecated APIs can easily
be done by searching for the Sphinx deprecation directives and/or
deprecation warnings.
-Removing a public API
+Removing a Public API
---------------------
The deprecated API should be removed and any corresponding documentation
@@ -91,7 +91,7 @@ and/or example code should be removed/updated as appropriate.
.. _iris_developer_future:
-Changing a default
+Changing a Default
------------------
- You should update the initial state of the relevant boolean attribute
diff --git a/docs/iris/src/developers_guide/contributing_documentation.rst b/docs/iris/src/developers_guide/contributing_documentation.rst
index 9674289568..56d2257a55 100644
--- a/docs/iris/src/developers_guide/contributing_documentation.rst
+++ b/docs/iris/src/developers_guide/contributing_documentation.rst
@@ -1,7 +1,7 @@
.. _contributing.documentation:
-Contributing to the documentation
+Contributing to the Documentation
---------------------------------
Documentation is important and we encourage any improvements that can be made.
@@ -28,7 +28,7 @@ The build can be run from the documentation directory ``iris/docs/iris/src``.
The build output for the html is found in the ``_build/html`` sub directory.
When updating the documentation ensure the html build has *no errors* or
-*warnings* otherwise it may fail the automated `travis-ci`_ build.
+*warnings* otherwise it may fail the automated `cirrus-ci`_ build.
Once the build is complete, if it is rerun it will only rebuild the impacted
build artefacts so should take less time.
@@ -50,7 +50,7 @@ This is useful for a final test before committing your changes.
have been promoted to be **errors** to ensure they are addressed.
This **only** applies when ``make html`` is run.
-.. _travis-ci: https://travis-ci.org/github/SciTools/iris
+.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris
.. _contributing.documentation.testing:
@@ -99,7 +99,7 @@ or ignore the url.
``spelling_word_list_filename``.
-.. note:: In addition to the automated `travis-ci`_ build of all the
+.. note:: In addition to the automated `cirrus-ci`_ build of all the
documentation build options above, the
https://readthedocs.org/ service is also used. The configuration
of this held in a file in the root of the
@@ -112,7 +112,7 @@ or ignore the url.
.. _contributing.documentation.api:
-Generating API documentation
+Generating API Documentation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to auto generate the API documentation based upon the docstrings a
diff --git a/docs/iris/src/developers_guide/contributing_getting_involved.rst b/docs/iris/src/developers_guide/contributing_getting_involved.rst
index edcbbaf726..0fd873517f 100644
--- a/docs/iris/src/developers_guide/contributing_getting_involved.rst
+++ b/docs/iris/src/developers_guide/contributing_getting_involved.rst
@@ -2,7 +2,7 @@
.. _development_where_to_start:
-Getting involved
+Getting Involved
----------------
Iris_ is an Open Source project hosted on Github and as such anyone with a
diff --git a/docs/iris/src/developers_guide/contributing_graphics_tests.rst b/docs/iris/src/developers_guide/contributing_graphics_tests.rst
index a276f520d6..8d8189c69b 100644
--- a/docs/iris/src/developers_guide/contributing_graphics_tests.rst
+++ b/docs/iris/src/developers_guide/contributing_graphics_tests.rst
@@ -2,7 +2,7 @@
.. _testing.graphics:
-Graphics tests
+Graphics Tests
**************
Iris may be used to create various forms of graphical output; to ensure
@@ -31,10 +31,10 @@ known acceptable output may fail. The failure may also not be visually
perceived as it may be a simple pixel shift.
-Testing strategy
+Testing Strategy
================
-The `Iris Travis matrix`_ defines multiple test runs that use
+The `Iris Cirrus-CI matrix`_ defines multiple test runs that use
different versions of Python to ensure Iris is working as expected.
To make this manageable, the ``iris.tests.IrisTest_nometa.check_graphic`` test
@@ -64,7 +64,7 @@ This consists of:
against the existing accepted reference images, for each failing test.
-Reviewing failing tests
+Reviewing Failing Tests
=======================
When you find that a graphics test in the Iris testing suite has failed,
@@ -122,7 +122,7 @@ you should follow:
happens, simply repeat the check-and-accept process until all tests pass.
-Add your changes to Iris
+Add Your Changes to Iris
========================
To add your changes to Iris, you need to make two pull requests (PR).
@@ -155,7 +155,7 @@ To add your changes to Iris, you need to make two pull requests (PR).
.. important::
- The Iris pull-request will not test successfully in Travis until the
+ The Iris pull-request will not test successfully in Cirrus-CI until the
``test-iris-imagehash`` pull request has been merged. This is because there
is an Iris_ test which ensures the existence of the reference images (uris)
for all the targets in the image results database. It will also fail
@@ -163,4 +163,4 @@ To add your changes to Iris, you need to make two pull requests (PR).
image-listing file in ``test-iris-imagehash``.
-.. _Iris travis matrix: https://github.com/scitools/iris/blob/master/.travis.yml#L15
+.. _Iris Cirrus-CI matrix: https://github.com/scitools/iris/blob/master/.cirrus.yml
diff --git a/docs/iris/src/developers_guide/contributing_pull_request_checklist.rst b/docs/iris/src/developers_guide/contributing_pull_request_checklist.rst
index b01f370ea2..3e7a9f1ae3 100644
--- a/docs/iris/src/developers_guide/contributing_pull_request_checklist.rst
+++ b/docs/iris/src/developers_guide/contributing_pull_request_checklist.rst
@@ -2,8 +2,8 @@
.. _pr_check:
-Pull request check list
-=======================
+Pull Request Checklist
+======================
All pull request will be reviewed by a core developer who will manage the
process of merging. It is the responsibility of a developer submitting a
@@ -38,7 +38,7 @@ is merged. Before submitting a pull request please consider this list.
#. **Check the documentation builds without warnings or errors**. See
:ref:`contributing.documentation.building`
-#. **Check for any new dependencies in the** `.travis.yml`_ **config file.**
+#. **Check for any new dependencies in the** `.cirrus.yml`_ **config file.**
#. **Check for any new dependencies in the** `readthedocs.yml`_ **file**. This
file is used to build the documentation that is served from
diff --git a/docs/iris/src/developers_guide/contributing_running_tests.rst b/docs/iris/src/developers_guide/contributing_running_tests.rst
index cadf3710db..99ea4e831c 100644
--- a/docs/iris/src/developers_guide/contributing_running_tests.rst
+++ b/docs/iris/src/developers_guide/contributing_running_tests.rst
@@ -2,9 +2,14 @@
.. _developer_running_tests:
-Running the tests
+Running the Tests
*****************
+Using setuptools for Testing Iris
+=================================
+
+.. warning:: The `setuptools`_ ``test`` command was deprecated in `v41.5.0`_. See :ref:`using nox`.
+
A prerequisite of running the tests is to have the Python environment
setup. For more information on this see :ref:`installing_from_source`.
@@ -90,4 +95,93 @@ due to an experimental dependency not being present.
All Python decorators that skip tests will be defined in
``lib/iris/tests/__init__.py`` with a function name with a prefix of
- ``skip_``.
\ No newline at end of file
+ ``skip_``.
+
+
+.. _using nox:
+
+Using Nox for Testing Iris
+==========================
+
+Iris has adopted the use of the `nox`_ tool for automated testing on `cirrus-ci`_
+and also locally on the command-line for developers.
+
+`nox`_ is similar to `tox`_, but instead leverages the expressiveness and power of a Python
+configuration file rather than an `.ini` style file. As with `tox`_, `nox`_ can use `virtualenv`_
+to create isolated Python environments, but in addition also supports `conda`_ as a testing
+environment backend.
+
+
+Where is Nox Used?
+------------------
+
+Iris uses `nox`_ as a convenience to fully automate the process of executing the Iris tests, but also
+automates the process of:
+
+* building the documentation and executing the doc-tests
+* building the documentation gallery
+* running the documentation URL link check
+* linting the code-base
+* ensuring the code-base style conforms to the `black`_ standard
+
+
+You can perform all of these tasks manually yourself, however the onus is on you to first ensure
+that all of the required package dependencies are installed and available in the testing environment.
+
+`Nox`_ has been configured to automatically do this for you, and provides a means to easily replicate
+the remote testing behaviour of `cirrus-ci`_ locally for the developer.
+
+
+Installing Nox
+--------------
+
+We recommend installing `nox`_ using `conda`_. To install `nox`_ in a separate `conda`_ environment::
+
+ conda create -n nox -c conda-forge nox
+ conda activate nox
+
+To install `nox`_ in an existing active `conda`_ environment::
+
+ conda install -c conda-forge nox
+
+The `nox`_ package is also available on PyPI, however `nox`_ has been configured to use the `conda`_
+backend for Iris, so an installation of `conda`_ must always be available.
+
+
+Testing with Nox
+----------------
+
+The `nox`_ configuration file `noxfile.py` is available in the root ``iris`` project directory, and
+defines all the `nox`_ sessions (i.e., tasks) that may be performed. `nox`_ must always be executed
+from the ``iris`` root directory.
+
+To list the configured `nox`_ sessions for Iris::
+
+ nox --list
+
+To run the Iris tests for all configured versions of Python::
+
+ nox --session tests
+
+To build the Iris documentation specifically for Python 3.7::
+
+ nox --session doctest-3.7
+
+To run all the Iris `nox`_ sessions::
+
+ nox
+
+For further `nox`_ command-line options::
+
+ nox --help
+
+.. note:: `nox`_ will cache its testing environments in the `.nox` root ``iris`` project directory.
+
+
+.. _black: https://black.readthedocs.io/en/stable/
+.. _nox: https://nox.thea.codes/en/latest/
+.. _setuptools: https://setuptools.readthedocs.io/en/latest/
+.. _tox: https://tox.readthedocs.io/en/latest/
+.. _virtualenv: https://virtualenv.pypa.io/en/latest/
+.. _PyPI: https://pypi.org/project/nox/
+.. _v41.5.0: https://setuptools.readthedocs.io/en/latest/history.html#v41-5-0
diff --git a/docs/iris/src/developers_guide/contributing_testing.rst b/docs/iris/src/developers_guide/contributing_testing.rst
index 375ad57003..486af706d3 100644
--- a/docs/iris/src/developers_guide/contributing_testing.rst
+++ b/docs/iris/src/developers_guide/contributing_testing.rst
@@ -3,7 +3,7 @@
.. _developer_test_categories:
-Test categories
+Test Categories
***************
There are two main categories of tests within Iris:
@@ -20,7 +20,7 @@ feel free to submit a pull-request in any state and ask for assistance.
.. _testing.unit_test:
-Unit tests
+Unit Tests
==========
Code changes should be accompanied by enough unit tests to give a
@@ -128,7 +128,7 @@ Within that file the tests might look something like:
.. _testing.integration:
-Integration tests
+Integration Tests
=================
Some code changes may require tests which exercise several units in
@@ -141,4 +141,4 @@ tests. But folders and files must be created as required to help
developers locate relevant tests. It is recommended they are named
according to the capabilities under test, e.g.
``metadata/test_pp_preservation.py``, and not named according to the
-module(s) under test.
\ No newline at end of file
+module(s) under test.
diff --git a/docs/iris/src/developers_guide/documenting/docstrings.rst b/docs/iris/src/developers_guide/documenting/docstrings.rst
index 34ec790d03..8a06024ee2 100644
--- a/docs/iris/src/developers_guide/documenting/docstrings.rst
+++ b/docs/iris/src/developers_guide/documenting/docstrings.rst
@@ -27,7 +27,7 @@ There are two forms of docstrings: **single-line** and **multi-line**
docstrings.
-Single-line docstrings
+Single-Line Docstrings
======================
The single line docstring of an object must state the **purpose** of that
@@ -35,7 +35,7 @@ object, known as the **purpose section**. This terse overview must be on one
line and ideally no longer than 80 characters.
-Multi-line docstrings
+Multi-Line Docstrings
=====================
Multi-line docstrings must consist of at least a purpose section akin to the
@@ -53,7 +53,7 @@ not to document *argument* and *keyword argument* details. Such information
should be documented in the following *arguments and keywords section*.
-Sample multi-line docstring
+Sample Multi-Line Docstring
---------------------------
Here is a simple example of a standard docstring:
@@ -75,7 +75,7 @@ Additionally, a summary can be extracted automatically, which would result in:
documenting.docstrings_sample_routine.sample_routine
-Documenting classes
+Documenting Classes
===================
The class constructor should be documented in the docstring for its
@@ -90,7 +90,7 @@ superclass method and does not call the superclass method; use the verb
(in addition to its own behaviour).
-Attribute and property docstrings
+Attribute and Property Docstrings
---------------------------------
Here is a simple example of a class containing an attribute docstring and a
diff --git a/docs/iris/src/developers_guide/documenting/rest_guide.rst b/docs/iris/src/developers_guide/documenting/rest_guide.rst
index bc34d16cd8..4845132b15 100644
--- a/docs/iris/src/developers_guide/documenting/rest_guide.rst
+++ b/docs/iris/src/developers_guide/documenting/rest_guide.rst
@@ -3,7 +3,7 @@
.. _reST_quick_start:
================
-reST quick start
+reST Quick Start
================
`reST`_ is used to create the documentation for Iris_. It is used to author
@@ -19,7 +19,7 @@ reST markup syntaxes, for the basics of reST the following links may be useful:
Reference documentation for reST can be found at http://docutils.sourceforge.net/rst.html.
-Creating links
+Creating Links
--------------
Basic links can be created with ```Text of the link `_``
which will look like `Text of the link `_
diff --git a/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst b/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst
index 856d9af0a9..4bd9021333 100644
--- a/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst
+++ b/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst
@@ -1,7 +1,7 @@
.. _whats_new_contributions:
=================================
-Contributing a "What's New" entry
+Contributing a "What's New" Entry
=================================
Iris uses a file named ``latest.rst`` to keep a draft of upcoming changes
@@ -38,7 +38,7 @@ situation is thought likely (large PR, high repo activity etc.):
* PR author: create the "What's New" pull request
* PR reviewer: once the "What's New" PR is created, **merge the main PR**.
- (this will fix any `travis-ci`_ linkcheck errors where the links in the
+ (this will fix any `cirrus-ci`_ linkcheck errors where the links in the
"What's New" PR reference new features introduced in the main PR)
* PR reviewer: review the "What's New" PR, merge once acceptable
@@ -48,7 +48,7 @@ for the minimum time, minimising conflicts and minimising the need to rebase or
merge from trunk.
-Writing a contribution
+Writing a Contribution
======================
As introduced above, a contribution is the description of a change to Iris
@@ -59,16 +59,15 @@ what's new document.
The appropriate contribution for a pull request might in fact be an addition or
change to an existing "What's New" entry.
-Each contribution will ideally be written as a single concise bullet point
-in a reStructuredText format. Where possible do not exceed **column 80** and
-ensure that any subsequent lines of the same bullet point are aligned with the
-first. The content should target an Iris user as the audience. The required
-content, in order, is as follows:
+Each contribution will ideally be written as a single concise entry using a
+reStructuredText auto-enumerated list ``#.`` directive. Where possible do not
+exceed **column 80** and ensure that any subsequent lines of the same entry are
+aligned with the first. The content should target an Iris user as the audience.
+The required content, in order, is as follows:
* Names of those who contributed the change. These should be their GitHub
user name. Link the name to their GitHub profile. E.g.
- ```@bjlittle `_ and
- `@tkknight `_ changed...``
+ ```@tkknight `_ changed...``
* The new/changed behaviour
@@ -79,15 +78,14 @@ content, in order, is as follows:
* Pull request references, bracketed, following the final period. E.g.
``(:pull:`1111`, :pull:`9999`)``
-* A trailing blank line (standard reStructuredText bullet format)
+* A trailing blank line (standard reStructuredText list format)
For example::
- * `@bjlittle `_ and
- `@tkknight `_ changed changed argument ``x``
- to be optional in :class:`~iris.module.class` and
- :meth:`iris.module.method`. This allows greater flexibility as requested in
- :issue:`9999`. (:pull:`1111`, :pull:`9999`)
+ #. `@tkknight `_ changed changed argument ``x``
+ to be optional in :class:`~iris.module.class` and
+ :meth:`iris.module.method`. This allows greater flexibility as requested in
+ :issue:`9999`. (:pull:`1111`, :pull:`9999`)
The above example also demonstrates some of the possible syntax for including
@@ -96,14 +94,14 @@ examine past what's :ref:`iris_whatsnew` entries.
.. note:: The reStructuredText syntax will be checked as part of building
the documentation. Any warnings should be corrected.
- `travis-ci`_ will automatically build the documentation when
+ `cirrus-ci`_ will automatically build the documentation when
creating a pull request, however you can also manually
:ref:`build ` the documentation.
-.. _travis-ci: https://travis-ci.org/github/SciTools/iris
+.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris
-Contribution categories
+Contribution Categories
=======================
The structure of the what's new release note should be easy to read by
diff --git a/docs/iris/src/developers_guide/gitwash/configure_git.rst b/docs/iris/src/developers_guide/gitwash/configure_git.rst
index b958a683ee..6fc288daf9 100644
--- a/docs/iris/src/developers_guide/gitwash/configure_git.rst
+++ b/docs/iris/src/developers_guide/gitwash/configure_git.rst
@@ -3,7 +3,7 @@
.. _configure-git:
=============
-Configure git
+Configure Git
=============
.. _git-config-basic:
@@ -51,7 +51,7 @@ command::
To set up on another computer, you can copy your ``~/.gitconfig`` file,
or run the commands above.
-In detail
+In Detail
=========
user.name and user.email
@@ -124,7 +124,7 @@ Or from the command line::
.. _fancy-log:
-Fancy log output
+Fancy Log Output
----------------
This is a very nice alias to get a fancy log output; it should go in the
diff --git a/docs/iris/src/developers_guide/gitwash/development_workflow.rst b/docs/iris/src/developers_guide/gitwash/development_workflow.rst
index b67885e6bd..f6144a05e9 100644
--- a/docs/iris/src/developers_guide/gitwash/development_workflow.rst
+++ b/docs/iris/src/developers_guide/gitwash/development_workflow.rst
@@ -1,14 +1,14 @@
.. _development-workflow:
####################
-Development workflow
+Development Workflow
####################
You already have your own forked copy of the `iris`_ repository, by
following :ref:`forking`. You have :ref:`set-up-fork`. You have configured
git by following :ref:`configure-git`. Now you are ready for some real work.
-Workflow summary
+Workflow Summary
================
In what follows we'll refer to the upstream iris ``master`` branch, as
@@ -34,7 +34,7 @@ what you've done, and why you did it.
See `linux git workflow`_ for some explanation.
-Consider deleting your master branch
+Consider Deleting Your Master Branch
====================================
It may sound strange, but deleting your own ``master`` branch can help reduce
@@ -43,7 +43,7 @@ details.
.. _update-mirror-trunk:
-Update the mirror of trunk
+Update the Mirror of Trunk
==========================
First make sure you have done :ref:`linking-to-upstream`.
@@ -59,7 +59,7 @@ you last checked, ``upstream/master`` will change after you do the fetch.
.. _make-feature-branch:
-Make a new feature branch
+Make a New Feature Branch
=========================
When you are ready to make some changes to the code, you should start a new
@@ -99,7 +99,7 @@ From now on git will know that ``my-new-feature`` is related to the
.. _edit-flow:
-The editing workflow
+The Editing Workflow
====================
Overview
@@ -112,7 +112,7 @@ Overview
git commit -am 'NF - some message'
git push
-In more detail
+In More Detail
--------------
#. Make some changes
@@ -144,14 +144,14 @@ In more detail
push`` (see `git push`_).
-Testing your changes
+Testing Your Changes
====================
Once you are happy with your changes, work thorough the :ref:`pr_check` and
make sure your branch passes all the relevant tests.
-Ask for your changes to be reviewed or merged
+Ask for Your Changes to be Reviewed or Merged
=============================================
When you are ready to ask for someone to review your code and consider a merge:
@@ -175,10 +175,10 @@ When you are ready to ask for someone to review your code and consider a merge:
pull request message. This is still a good way of getting some preliminary
code review.
-Some other things you might want to do
+Some Other Things you Might Want to do
======================================
-Delete a branch on github
+Delete a Branch on Github
-------------------------
::
@@ -193,7 +193,7 @@ Note the colon ``:`` before ``test-branch``. See also:
http://github.com/guides/remove-a-remote-branch
-Several people sharing a single repository
+Several People Sharing a Single Repository
------------------------------------------
If you want to work on some stuff with other people, where you are all
@@ -225,7 +225,7 @@ usual::
git commit -am 'ENH - much better code'
git push origin master # pushes directly into your repo
-Explore your repository
+Explore Your Repository
-----------------------
To see a graphical representation of the repository branches and
@@ -243,7 +243,7 @@ graph of the repository.
.. _rebase-on-trunk:
-Rebasing on trunk
+Rebasing on Trunk
-----------------
For more information please see the
diff --git a/docs/iris/src/developers_guide/gitwash/forking.rst b/docs/iris/src/developers_guide/gitwash/forking.rst
index e10b8f84ca..161847ed79 100644
--- a/docs/iris/src/developers_guide/gitwash/forking.rst
+++ b/docs/iris/src/developers_guide/gitwash/forking.rst
@@ -3,7 +3,7 @@
.. _forking:
===================================
-Making your own copy (fork) of Iris
+Making Your own Copy (fork) of Iris
===================================
You need to do this only once. The instructions here are very similar
@@ -12,7 +12,7 @@ that page for more detail. We're repeating some of it here just to give the
specifics for the `Iris`_ project, and to suggest some default names.
-Set up and configure a github account
+Set up and Configure a Github Account
=====================================
If you don't have a github account, go to the github page, and make one.
@@ -21,7 +21,7 @@ You then need to configure your account to allow write access, see
the `generating sss keys for GitHub`_ help on `github help`_.
-Create your own forked copy of Iris
+Create Your own Forked Copy of Iris
===================================
#. Log into your github account.
diff --git a/docs/iris/src/developers_guide/gitwash/index.rst b/docs/iris/src/developers_guide/gitwash/index.rst
index d0e70597f1..3cde622583 100644
--- a/docs/iris/src/developers_guide/gitwash/index.rst
+++ b/docs/iris/src/developers_guide/gitwash/index.rst
@@ -1,6 +1,6 @@
.. _using-git:
-Working with Iris source code
+Working With Iris Source Code
=============================
.. toctree::
diff --git a/docs/iris/src/developers_guide/gitwash/set_up_fork.rst b/docs/iris/src/developers_guide/gitwash/set_up_fork.rst
index 9dc6618c64..70d602c97c 100644
--- a/docs/iris/src/developers_guide/gitwash/set_up_fork.rst
+++ b/docs/iris/src/developers_guide/gitwash/set_up_fork.rst
@@ -3,7 +3,7 @@
.. _set-up-fork:
================
-Set up your fork
+Set up Your Fork
================
First you follow the instructions for :ref:`forking`.
@@ -17,10 +17,10 @@ Overview
cd iris
git remote add upstream git://github.com/SciTools/iris.git
-In detail
+In Detail
=========
-Clone your fork
+Clone Your Fork
---------------
#. Clone your fork to the local computer with ``git clone
@@ -42,7 +42,7 @@ Clone your fork
.. _linking-to-upstream:
-Linking your repository to the upstream repo
+Linking Your Repository to the Upstream Repo
--------------------------------------------
::
diff --git a/docs/iris/src/developers_guide/release.rst b/docs/iris/src/developers_guide/release.rst
index 2ec787a780..6ac3af5c75 100644
--- a/docs/iris/src/developers_guide/release.rst
+++ b/docs/iris/src/developers_guide/release.rst
@@ -10,7 +10,7 @@ The summary below is of the main areas that constitute the release. The final
section details the :ref:`iris_development_releases_steps` to take.
-Before release
+Before Release
--------------
Deprecations
@@ -21,7 +21,7 @@ previous releases is now finally changed. More detail, including the correct
number of releases, is in :ref:`iris_development_deprecations`.
-Release branch
+Release Branch
--------------
Once the features intended for the release are on master, a release branch
@@ -37,7 +37,7 @@ This branch shall be used to finalise the release details in preparation for
the release candidate.
-Release candidate
+Release Candidate
-----------------
Prior to a release, a release candidate tag may be created, marked as a
@@ -67,7 +67,7 @@ This content should be reviewed and adapted as required.
Steps to achieve this can be found in the :ref:`iris_development_releases_steps`.
-The release
+The Release
-----------
The final steps are to change the version string in the source of
@@ -78,7 +78,7 @@ Once all checks are complete, the release is cut by the creation of a new tag
in the SciTools Iris repository.
-Conda recipe
+Conda Recipe
------------
Once a release is cut, the `Iris feedstock`_ for the conda recipe must be
@@ -88,7 +88,7 @@ updated to build the latest release of Iris and push this artefact to
.. _Iris feedstock: https://github.com/conda-forge/iris-feedstock/tree/master/recipe
.. _conda forge: https://anaconda.org/conda-forge/iris
-Merge back
+Merge Back
----------
After the release is cut, the changes shall be merged back onto the
@@ -101,7 +101,7 @@ pull request to master. This work flow ensures that the commit identifiers are
consistent between the :literal:`.x` branch and :literal:`master`.
-Point releases
+Point Releases
--------------
Bug fixes may be implemented and targeted as the :literal:`.x` branch. These
@@ -118,12 +118,12 @@ release process is to be followed, including the merge back of changes into
.. _iris_development_releases_steps:
-Maintainer steps
+Maintainer Steps
----------------
These steps assume a release for ``v1.9`` is to be created
-Release steps
+Release Steps
~~~~~~~~~~~~~
#. Create the branch ``1.9.x`` on the main repo, not in a forked repo, for the
@@ -156,7 +156,7 @@ Release steps
`Iris release page `_
-Post release steps
+Post Release Steps
~~~~~~~~~~~~~~~~~~
#. Check the documentation has built on `Read The Docs`_. The build is
diff --git a/docs/iris/src/further_topics/index.rst b/docs/iris/src/further_topics/index.rst
index 8a4d95b6cd..dc162d6a1e 100644
--- a/docs/iris/src/further_topics/index.rst
+++ b/docs/iris/src/further_topics/index.rst
@@ -5,7 +5,7 @@ Introduction
Some specific areas of Iris may require further explanation or a deep dive
into additional detail above and beyond that offered by the
-:ref:`User guide `.
+:ref:`User Guide `.
This section provides a collection of additional material on focused topics
that may be of interest to the more advanced or curious user.
diff --git a/docs/iris/src/further_topics/lenient_maths.rst b/docs/iris/src/further_topics/lenient_maths.rst
index 6f139fd9bf..4aad721780 100644
--- a/docs/iris/src/further_topics/lenient_maths.rst
+++ b/docs/iris/src/further_topics/lenient_maths.rst
@@ -1,6 +1,6 @@
.. _lenient maths:
-Lenient cube maths
+Lenient Cube Maths
******************
This section provides an overview of lenient cube maths. In particular, it explains
@@ -46,7 +46,7 @@ a practical worked example, which we'll explore together next.
.. _lenient example:
-Lenient example
+Lenient Example
===============
.. testsetup:: lenient-example
@@ -154,7 +154,7 @@ Now let's compare and contrast this lenient result with the strict alternative.
But before we do so, let's first clarify how to control the behaviour of cube maths.
-Control the behaviour
+Control the Behaviour
=====================
As stated earlier, lenient cube maths is the default behaviour from Iris ``3.0.0``.
@@ -191,7 +191,7 @@ scope of the ``LENIENT`` `context manager`_,
Lenient(maths=True)
-Strict example
+Strict Example
==============
Now that we know how to control the underlying behaviour of cube maths,
@@ -229,7 +229,7 @@ This is because strict cube maths, in general, will only return common metadata
and common coordinates that are :ref:`strictly equivalent `.
-Finer detail
+Finer Detail
============
In general, if you want to preserve as much metadata and coordinate information as
@@ -278,4 +278,4 @@ resultant :class:`~iris.cube.Cube`,
.. _atmosphere hybrid height parametric vertical coordinate: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#atmosphere-hybrid-height-coordinate
-.. _context manager: https://docs.python.org/3/library/contextlib.html
\ No newline at end of file
+.. _context manager: https://docs.python.org/3/library/contextlib.html
diff --git a/docs/iris/src/further_topics/lenient_metadata.rst b/docs/iris/src/further_topics/lenient_metadata.rst
index ada7049786..b68ed501ba 100644
--- a/docs/iris/src/further_topics/lenient_metadata.rst
+++ b/docs/iris/src/further_topics/lenient_metadata.rst
@@ -1,6 +1,6 @@
.. _lenient metadata:
-Lenient metadata
+Lenient Metadata
****************
This section discusses lenient metadata; what it is, what it means, and how you
@@ -27,7 +27,7 @@ methods that provide this rich metadata behaviour, all of which are explored
more fully in :ref:`metadata`.
-Strict behaviour
+Strict Behaviour
================
.. testsetup:: strict-behaviour
@@ -137,7 +137,7 @@ practical behaviour is available.
.. _lenient behaviour:
-Lenient behaviour
+Lenient Behaviour
=================
.. testsetup:: lenient-behaviour
@@ -210,7 +210,7 @@ lenient behaviour for each of the metadata classes.
.. _lenient equality:
-Lenient equality
+Lenient Equality
----------------
Lenient equality is enabled using the ``lenient`` keyword argument, therefore
@@ -273,7 +273,7 @@ forgiving and practical alternative to strict behaviour.
.. _lenient difference:
-Lenient difference
+Lenient Difference
------------------
Similar to :ref:`lenient equality`, the lenient ``difference`` method
@@ -330,7 +330,7 @@ highlights the change in how such dissimilar metadata is treated gracefully,
.. _lenient combination:
-Lenient combination
+Lenient Combination
-------------------
The behaviour of the lenient ``combine`` metadata class method is outlined
@@ -380,7 +380,7 @@ for more inclusive, richer metadata,
.. _lenient members:
-Lenient members
+Lenient Members
---------------
:ref:`lenient behaviour` is not applied regardlessly across all metadata members
@@ -429,7 +429,7 @@ strict behaviour, regardlessly.
.. _special lenient name:
-Special lenient name behaviour
+Special Lenient Name Behaviour
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``standard_name``, ``long_name`` and ``var_name`` have a closer association
diff --git a/docs/iris/src/further_topics/metadata.rst b/docs/iris/src/further_topics/metadata.rst
index 3536c87a2b..e6d6ebc57a 100644
--- a/docs/iris/src/further_topics/metadata.rst
+++ b/docs/iris/src/further_topics/metadata.rst
@@ -42,7 +42,7 @@ Collectively, the aforementioned classes will be known here as the Iris
`SciTools/iris`_
-Common metadata
+Common Metadata
===============
Each of the Iris `CF Conventions`_ classes use **metadata** to define them and
@@ -69,7 +69,7 @@ actual `data attribute`_ names of the metadata members on the Iris class.
:align: center
=================== ======================================= ============================== ========================================== ================================= ======================== ============================== ===================
- Metadata members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` Metadata members
+ Metadata Members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` Metadata Members
=================== ======================================= ============================== ========================================== ================================= ======================== ============================== ===================
``standard_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``standard_name``
``long_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``long_name``
@@ -90,7 +90,7 @@ actual `data attribute`_ names of the metadata members on the Iris class.
terms.
-Common metadata API
+Common Metadata API
===================
.. testsetup::
@@ -149,7 +149,7 @@ a **common** and **consistent** approach to managing your metadata, which we'll
now explore a little more fully.
-Metadata classes
+Metadata Classes
----------------
The ``metadata`` property will return an appropriate `namedtuple`_ metadata class
@@ -162,7 +162,7 @@ each container class is shown in :numref:`metadata classes table` below,
:align: center
========================================== ========================================================
- Container class Metadata class
+ Container Class Metadata Class
========================================== ========================================================
:class:`~iris.coords.AncillaryVariable` :class:`~iris.common.metadata.AncillaryVariableMetadata`
:class:`~iris.coords.AuxCoord` :class:`~iris.common.metadata.CoordMetadata`
@@ -232,7 +232,7 @@ discussion on options how to **set** and **get** metadata on the instance of
an Iris `CF Conventions`_ container class (:numref:`metadata classes table`).
-Metadata class behaviour
+Metadata Class Behaviour
------------------------
As mentioned previously, the metadata classes in :numref:`metadata classes table`
@@ -301,7 +301,7 @@ which we explore next.
.. _richer metadata:
-Richer metadata behaviour
+Richer Metadata Behaviour
-------------------------
.. testsetup:: richer-metadata
@@ -320,7 +320,7 @@ allows you to easily **compare**, **combine**, **convert** and understand the
.. _metadata equality:
-Metadata equality
+Metadata Equality
^^^^^^^^^^^^^^^^^
The metadata classes support both **equality** (``__eq__``) and **inequality**
@@ -357,7 +357,7 @@ a means to enable **lenient** equality, as discussed in :ref:`lenient equality`.
.. _strict equality:
-Strict equality
+Strict Equality
"""""""""""""""
By default, metadata class equality will perform a **strict** comparison between
@@ -426,7 +426,7 @@ However, metadata class equality is rich enough to handle this eventuality,
.. _compare like:
-Comparing like with like
+Comparing Like With Like
""""""""""""""""""""""""
So far in our journey through metadata class equality, we have only considered
@@ -446,7 +446,7 @@ metadata class contains **different** members, as shown in
.. _exception rule:
-Exception to the rule
+Exception to the Rule
~~~~~~~~~~~~~~~~~~~~~
In general, **different** metadata classes cannot be compared, however support
@@ -502,7 +502,7 @@ methods of metadata classes.
.. _metadata difference:
-Metadata difference
+Metadata Difference
^^^^^^^^^^^^^^^^^^^
Being able to compare metadata is valuable, especially when we have the
@@ -605,7 +605,7 @@ Now, let's compare the two above instances and see what ``attributes`` member di
.. _diff like:
-Diffing like with like
+Diffing Like With Like
""""""""""""""""""""""
As discussed in :ref:`compare like`, it only makes sense to determine the
@@ -655,7 +655,7 @@ In general, however, comparing **different** metadata classes will result in a
.. _metadata combine:
-Metadata combination
+Metadata Combination
^^^^^^^^^^^^^^^^^^^^
.. testsetup:: metadata-combine
@@ -740,7 +740,7 @@ metadata class. This is explored in a little further detail next.
.. _combine like:
-Combine like with like
+Combine Like With Like
""""""""""""""""""""""
Akin to the :ref:`equal ` and
@@ -788,7 +788,7 @@ However, note that commutativity in this case cannot be honoured, for obvious re
.. _metadata conversion:
-Metadata conversion
+Metadata Conversion
^^^^^^^^^^^^^^^^^^^
.. testsetup:: metadata-convert
@@ -853,7 +853,7 @@ class instance,
.. _metadata assignment:
-Metadata assignment
+Metadata Assignment
^^^^^^^^^^^^^^^^^^^
.. testsetup:: metadata-assign
@@ -888,7 +888,7 @@ coordinate,
DimCoordMetadata(standard_name='latitude', long_name=None, var_name='latitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False)
-Assign by iterable
+Assign by Iterable
""""""""""""""""""
It is also possible to assign to the ``metadata`` property of an Iris
@@ -903,7 +903,7 @@ number** of associated member values, e.g.,
DimCoordMetadata(standard_name='latitude', long_name=None, var_name='latitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False)
-Assign by namedtuple
+Assign by Namedtuple
""""""""""""""""""""
A `namedtuple`_ may also be used to assign to the ``metadata`` property of an
@@ -933,7 +933,7 @@ of the ``longitude`` coordinate,
DimCoordMetadata(standard_name='latitude', long_name=None, var_name='latitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False)
-Assign by mapping
+Assign by Mapping
"""""""""""""""""
It is also possible to assign to the ``metadata`` property using a `mapping`_,
diff --git a/docs/iris/src/index.rst b/docs/iris/src/index.rst
index e18ae409cd..892c31f36a 100644
--- a/docs/iris/src/index.rst
+++ b/docs/iris/src/index.rst
@@ -46,7 +46,7 @@ For **Iris 2.4** and earlier documentation please see the
:container: container-lg pb-3
:column: col-lg-4 col-md-4 col-sm-6 col-xs-12 p-2
- Install Iris to use or for development.
+ Install Iris as a user or developer.
+++
.. link-button:: installing_iris
:type: ref
@@ -91,7 +91,7 @@ For **Iris 2.4** and earlier documentation please see the
.. toctree::
:maxdepth: 1
- :caption: Getting started
+ :caption: Getting Started
:hidden:
installing
diff --git a/docs/iris/src/installing.rst b/docs/iris/src/installing.rst
index 762fe60e4d..8b3ae8d3e7 100644
--- a/docs/iris/src/installing.rst
+++ b/docs/iris/src/installing.rst
@@ -22,7 +22,7 @@ any WSL_ distributions.
.. _installing_using_conda:
-Installing using conda (users)
+Installing Using Conda (Users)
------------------------------
To install Iris using conda, you must first download and install conda,
@@ -41,11 +41,45 @@ need the Iris sample data. This can also be installed using conda::
Further documentation on using conda and the features it provides can be found
at https://conda.io/en/latest/index.html.
+.. _installing_from_source_without_conda:
+
+Installing from Source Without Conda on Debian-Based Linux Distros (Developers)
+-------------------------------------------------------------------------------
+
+Iris can also be installed without a conda environment. The instructions in
+this section are valid for Debian-based Linux distributions (Debian, Ubuntu,
+Kubuntu, etc.).
+
+Iris and its dependencies need some shared libraries in order to work properly.
+These can be installed
+with apt::
+
+ sudo apt-get install python3-pip python3-tk libudunits2-dev libproj-dev proj-bin libgeos-dev libcunit1-dev
+
+Consider executing::
+
+ sudo apt-get update
+
+before and after installation of Debian packages.
+
+The rest can be done with pip. Begin with numpy::
+
+ pip3 install numpy
+
+Finally, Iris and its Python dependencies can be installed with the following
+command::
+
+ pip3 install setuptools cftime==1.2.1 cf-units scitools-pyke scitools-iris
+
+This procedure was tested on a Ubuntu 20.04 system on the
+27th of January, 2021.
+Be aware that through updates of the involved Debian and/or Python packages,
+dependency conflicts might arise or the procedure might have to modified.
.. _installing_from_source:
-Installing from source (devs)
------------------------------
+Installing from Source with Conda (Developers)
+----------------------------------------------
The latest Iris source release is available from
https://github.com/SciTools/iris.
@@ -81,7 +115,7 @@ to find your local Iris code::
python setup.py develop
-Running the tests
+Running the Tests
-----------------
To ensure your setup is configured correctly you can run the test suite using
@@ -92,7 +126,7 @@ the command::
For more information see :ref:`developer_running_tests`.
-Custom site configuration
+Custom Site Configuration
-------------------------
The default site configuration values can be overridden by creating the file
diff --git a/docs/iris/src/techpapers/change_management.rst b/docs/iris/src/techpapers/change_management.rst
index ab45fe7926..f39d64f430 100644
--- a/docs/iris/src/techpapers/change_management.rst
+++ b/docs/iris/src/techpapers/change_management.rst
@@ -4,7 +4,7 @@
.. _change_management:
-Change Management in Iris from the User's perspective
+Change Management in Iris From the User's Perspective
*****************************************************
As Iris changes, user code will need revising from time to time to keep it
@@ -16,7 +16,7 @@ Here, we define ways to make this as easy as possible.
.. include:: ../userguide/change_management_goals.txt
-Key principles you can rely on
+Key Principles you can Rely on
==============================
Iris code editions are published as defined version releases, with a given
@@ -42,7 +42,7 @@ If your code produces :ref:`deprecation warnings `, then it
-User Actions : How you should respond to changes and releases
+User Actions : How you Should Respond to Changes and Releases
=============================================================
Checklist :
@@ -96,7 +96,7 @@ Key concepts covered here:
.. _iris_backward_compatibility:
-Backwards compatibility
+Backwards Compatibility
-----------------------
"Backwards-compatible" changes are those that leave any existing valid API
@@ -135,7 +135,7 @@ See :ref:`Usage of iris.FUTURE `, below.
.. _iris_api:
-Terminology : API, features, usages and behaviours
+Terminology : API, Features, Usages and Behaviours
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The API is the components of the iris module and its submodules which are
@@ -320,7 +320,7 @@ This is to warn users :
* eventually to rewrite old code to use the newer or better alternatives
-Deprecated features support through the Release cycle
+Deprecated Features Support Through the Release Cycle
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The whole point of a deprecation is that the feature continues to work, but
@@ -341,7 +341,7 @@ follows:
.. _iris_future_usage:
-Future options, `iris.FUTURE`
+Future Options, `iris.FUTURE`
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A special approach is needed where the replacement behaviour is not controlled
diff --git a/docs/iris/src/techpapers/index.rst b/docs/iris/src/techpapers/index.rst
index 3074569eae..773c8f7059 100644
--- a/docs/iris/src/techpapers/index.rst
+++ b/docs/iris/src/techpapers/index.rst
@@ -1,7 +1,7 @@
.. _techpapers_index:
-Iris technical papers
+Iris Technical Papers
=====================
Extra information on specific technical issues.
diff --git a/docs/iris/src/techpapers/missing_data_handling.rst b/docs/iris/src/techpapers/missing_data_handling.rst
index 46279bc566..13b00d3424 100644
--- a/docs/iris/src/techpapers/missing_data_handling.rst
+++ b/docs/iris/src/techpapers/missing_data_handling.rst
@@ -1,5 +1,5 @@
=============================
-Missing data handling in Iris
+Missing Data Handling in Iris
=============================
This document provides a brief overview of how Iris handles missing data values
@@ -73,7 +73,7 @@ all have the same fill-value. If the components have differing fill-values, a
default fill-value will be used instead.
-Other operations
+Other Operations
----------------
Other operations, such as :class:`~iris.cube.Cube` arithmetic operations,
diff --git a/docs/iris/src/techpapers/um_files_loading.rst b/docs/iris/src/techpapers/um_files_loading.rst
index d8c796b31f..72d34962ce 100644
--- a/docs/iris/src/techpapers/um_files_loading.rst
+++ b/docs/iris/src/techpapers/um_files_loading.rst
@@ -14,7 +14,7 @@
===================================
-Iris handling of PP and Fieldsfiles
+Iris Handling of PP and Fieldsfiles
===================================
This document provides a basic account of how PP and Fieldsfiles data is
@@ -40,7 +40,7 @@ For details of Iris terms (cubes, coordinates, attributes), refer to
For details of CF conventions, see http://cfconventions.org/.
-Overview of loading process
+Overview of Loading Process
---------------------------
The basics of Iris loading are explained at :ref:`loading_iris_cubes`.
@@ -165,7 +165,7 @@ For example:
sections are written only if the actual values are unevenly spaced.
-Phenomenon identification
+Phenomenon Identification
-------------------------
**UM Field elements**
@@ -218,7 +218,7 @@ For example:
LBUSER4 and LBUSER7 elements.
-Vertical coordinates
+Vertical Coordinates
--------------------
**UM Field elements**
@@ -319,7 +319,7 @@ See an example printout of a hybrid height cube,
.. _um_time_metadata:
-Time information
+Time Information
----------------
**UM Field elements**
@@ -391,7 +391,7 @@ See an example printout of a forecast data cube,
'forecast_reference_time' is a constant.
-Statistical measures
+Statistical Measures
--------------------
**UM Field elements**
@@ -438,7 +438,7 @@ For example:
(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)
-Other metadata
+Other Metadata
--------------
LBRSVD4
diff --git a/docs/iris/src/userguide/citation.rst b/docs/iris/src/userguide/citation.rst
index 56eab0a4eb..0a3a85fb89 100644
--- a/docs/iris/src/userguide/citation.rst
+++ b/docs/iris/src/userguide/citation.rst
@@ -8,7 +8,7 @@ If Iris played an important part in your research then please add us to your
reference list by using one of the recommendations below.
************
-BibTeX entry
+BibTeX Entry
************
For example::
@@ -24,7 +24,7 @@ For example::
*******************
-Downloaded software
+Downloaded Software
*******************
Suggested format::
@@ -37,7 +37,7 @@ For example::
********************
-Checked out software
+Checked Out Software
********************
Suggested format::
@@ -48,7 +48,7 @@ For example::
Iris. Met Office. git@github.com:SciTools/iris.git 06-03-2013
-.. _How to cite and describe software: http://software.ac.uk/so-exactly-what-software-did-you-use
+.. _How to cite and describe software: https://software.ac.uk/how-cite-software
Reference: [Jackson]_.
diff --git a/docs/iris/src/userguide/code_maintenance.rst b/docs/iris/src/userguide/code_maintenance.rst
index d03808e18f..b2b498bc80 100644
--- a/docs/iris/src/userguide/code_maintenance.rst
+++ b/docs/iris/src/userguide/code_maintenance.rst
@@ -1,11 +1,11 @@
-Code maintenance
+Code Maintenance
================
From a user point of view "code maintenance" means ensuring that your existing
working code stays working, in the face of changes to Iris.
-Stability and change
+Stability and Change
---------------------
In practice, as Iris develops, most users will want to periodically upgrade
@@ -25,7 +25,7 @@ maintenance effort is probably still necessary:
for some completely unconnected reason.
-Principles of change management
+Principles of Change Management
-------------------------------
When you upgrade software to a new version, you often find that you need to
diff --git a/docs/iris/src/userguide/cube_maths.rst b/docs/iris/src/userguide/cube_maths.rst
index eebff53e62..d2d4d84b68 100644
--- a/docs/iris/src/userguide/cube_maths.rst
+++ b/docs/iris/src/userguide/cube_maths.rst
@@ -1,7 +1,7 @@
.. _cube maths:
==========
-Cube maths
+Cube Maths
==========
@@ -29,7 +29,7 @@ In order to reduce the amount of metadata which becomes inconsistent,
fundamental arithmetic operations such as addition, subtraction, division
and multiplication can be applied directly to any cube.
-Calculating the difference between two cubes
+Calculating the Difference Between Two Cubes
--------------------------------------------
Let's load some air temperature which runs from 1860 to 2100::
@@ -77,7 +77,7 @@ but with the data representing their difference:
.. _cube-maths_anomaly:
-Calculating a cube anomaly
+Calculating a Cube Anomaly
--------------------------
In section :doc:`cube_statistics` we discussed how the dimensionality of a cube
@@ -165,7 +165,7 @@ broadcasting behaviour::
>>> print(result.summary(True))
unknown / (K) (time: 240; latitude: 37; longitude: 49)
-Combining multiple phenomena to form a new one
+Combining Multiple Phenomena to Form a New One
----------------------------------------------
Combining cubes of potential-temperature and pressure we can calculate
@@ -223,7 +223,7 @@ The result could now be plotted using the guidance provided in the
.. _cube_maths_combining_units:
-Combining units
+Combining Units
---------------
It should be noted that when combining cubes by multiplication, division or
@@ -243,7 +243,7 @@ unit (if ``a`` had units ``'m2'`` then ``a ** 0.5`` would result in a cube
with units ``'m'``).
Iris inherits units from `cf_units `_
-which in turn inherits from `UDUNITS `_.
+which in turn inherits from `UDUNITS `_.
As well as the units UDUNITS provides, cf units also provides the units
``'no-unit'`` and ``'unknown'``. A unit of ``'no-unit'`` means that the
associated data is not suitable for describing with a unit, cf units
diff --git a/docs/iris/src/userguide/cube_statistics.rst b/docs/iris/src/userguide/cube_statistics.rst
index 310551c76f..4eb016078e 100644
--- a/docs/iris/src/userguide/cube_statistics.rst
+++ b/docs/iris/src/userguide/cube_statistics.rst
@@ -1,12 +1,12 @@
.. _cube-statistics:
===============
-Cube statistics
+Cube Statistics
===============
.. _cube-statistics-collapsing:
-Collapsing entire data dimensions
+Collapsing Entire Data Dimensions
---------------------------------
.. testsetup::
@@ -100,7 +100,7 @@ in the gallery takes a zonal mean of an ``XYT`` cube by using the
.. _cube-statistics-collapsing-average:
-Area averaging
+Area Averaging
^^^^^^^^^^^^^^
Some operators support additional keywords to the ``cube.collapsed`` method.
@@ -152,14 +152,14 @@ including an example on taking a :ref:`global area-weighted mean
.. _cube-statistics-aggregated-by:
-Partially reducing data dimensions
+Partially Reducing Data Dimensions
----------------------------------
Instead of completely collapsing a dimension, other methods can be applied
to reduce or filter the number of data points of a particular dimension.
-Aggregation of grouped data
+Aggregation of Grouped Data
^^^^^^^^^^^^^^^^^^^^^^^^^^^
The :meth:`Cube.aggregated_by ` operation
diff --git a/docs/iris/src/userguide/interpolation_and_regridding.rst b/docs/iris/src/userguide/interpolation_and_regridding.rst
index ffed21a7f5..5a5a985ccb 100644
--- a/docs/iris/src/userguide/interpolation_and_regridding.rst
+++ b/docs/iris/src/userguide/interpolation_and_regridding.rst
@@ -8,7 +8,7 @@
warnings.simplefilter('ignore')
=================================
-Cube interpolation and regridding
+Cube Interpolation and Regridding
=================================
Iris provides powerful cube-aware interpolation and regridding functionality,
@@ -123,7 +123,7 @@ will be orthogonal:
air_temperature / (K) (latitude: 13; longitude: 14)
-Interpolating non-horizontal coordinates
+Interpolating Non-Horizontal Coordinates
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Interpolation in Iris is not limited to horizontal-spatial coordinates - any
@@ -195,7 +195,7 @@ For example, to mask values that lie beyond the range of the original data:
.. _caching_an_interpolator:
-Caching an interpolator
+Caching an Interpolator
^^^^^^^^^^^^^^^^^^^^^^^
If you need to interpolate a cube on multiple sets of sample points you can
@@ -305,7 +305,7 @@ cells have now become rectangular in a plate carrée (equirectangular) projectio
The spatial grid of the resulting cube is really global, with a large proportion of the
data being masked.
-Area-weighted regridding
+Area-Weighted Regridding
^^^^^^^^^^^^^^^^^^^^^^^^
It is often the case that a point-based regridding scheme (such as
@@ -384,7 +384,7 @@ To visualise the above regrid, let's plot the original data, along with 3 distin
.. _caching_a_regridder:
-Caching a regridder
+Caching a Regridder
^^^^^^^^^^^^^^^^^^^
If you need to regrid multiple cubes with a common source grid onto a common
@@ -415,7 +415,7 @@ In each case ``result`` will be the input cube regridded to the grid defined by
the target grid cube (in this case ``rotated_psl``) that we used to define the
cached regridder.
-Regridding lazy data
+Regridding Lazy Data
^^^^^^^^^^^^^^^^^^^^
If you are working with large cubes, especially when you are regridding to a
diff --git a/docs/iris/src/userguide/iris_cubes.rst b/docs/iris/src/userguide/iris_cubes.rst
index 5929c402f2..de206486d3 100644
--- a/docs/iris/src/userguide/iris_cubes.rst
+++ b/docs/iris/src/userguide/iris_cubes.rst
@@ -1,7 +1,7 @@
.. _iris_data_structures:
====================
-Iris data structures
+Iris Data Structures
====================
The top level object in Iris is called a cube. A cube contains data and metadata about a phenomenon.
@@ -71,11 +71,11 @@ A cube consists of:
* a list of coordinate "factories" used for deriving coordinates from the values of other coordinates in the cube
-Cubes in practice
+Cubes in Practice
-----------------
-A simple cube example
+A Simple Cube Example
=====================
Suppose we have some gridded data which has 24 air temperature readings (in Kelvin) which is located at
@@ -137,7 +137,7 @@ For example, it is possible to attach any of the following:
a collection of "ensembles" (i.e. multiple model runs).
-Printing a cube
+Printing a Cube
===============
Every Iris cube can be printed to screen as you will see later in the user guide. It is worth familiarising yourself with the
diff --git a/docs/iris/src/userguide/loading_iris_cubes.rst b/docs/iris/src/userguide/loading_iris_cubes.rst
index 006a919408..659c28420a 100644
--- a/docs/iris/src/userguide/loading_iris_cubes.rst
+++ b/docs/iris/src/userguide/loading_iris_cubes.rst
@@ -1,7 +1,7 @@
.. _loading_iris_cubes:
===================
-Loading Iris cubes
+Loading Iris Cubes
===================
To load a single file into a **list** of Iris cubes
@@ -116,7 +116,7 @@ This was the output discussed at the end of the :doc:`iris_cubes` section.
appropriate column for each cube data dimension that they describe.
-Loading multiple files
+Loading Multiple Files
-----------------------
To load more than one file into a list of cubes, a list of filenames can be
@@ -142,7 +142,7 @@ star wildcards can be used::
The cubes returned will not necessarily be in the same order as the
order of the filenames.
-Lazy loading
+Lazy Loading
------------
In fact when Iris loads data from most file types, it normally only reads the
@@ -155,7 +155,7 @@ For more on the benefits, handling and uses of lazy data, see :doc:`Real and Laz
.. _constrained-loading:
-Constrained loading
+Constrained Loading
-----------------------
Given a large dataset, it is possible to restrict or constrain the load
to match specific Iris cube metadata.
@@ -261,7 +261,7 @@ then specific STASH codes can be filtered::
:class:`iris.Constraint` reference documentation.
-Constraining a circular coordinate across its boundary
+Constraining a Circular Coordinate Across its Boundary
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Occasionally you may need to constrain your cube with a region that crosses the
@@ -403,7 +403,7 @@ Notice how the dates printed are between the range specified in the ``st_swithun
and that they span multiple years.
-Strict loading
+Strict Loading
--------------
The :py:func:`iris.load_cube` and :py:func:`iris.load_cubes` functions are
diff --git a/docs/iris/src/userguide/merge_and_concat.rst b/docs/iris/src/userguide/merge_and_concat.rst
index 0d844ac403..ffa36ccdeb 100644
--- a/docs/iris/src/userguide/merge_and_concat.rst
+++ b/docs/iris/src/userguide/merge_and_concat.rst
@@ -1,7 +1,7 @@
.. _merge_and_concat:
=====================
-Merge and concatenate
+Merge and Concatenate
=====================
We saw in the :doc:`loading_iris_cubes` chapter that Iris tries to load as few cubes as
@@ -203,7 +203,7 @@ single cube. An example of fixing an issue like this can be found in the
:ref:`merge_concat_common_issues` section.
-Merge in Iris load
+Merge in Iris Load
==================
The CubeList's :meth:`~iris.cube.CubeList.merge` method is used internally
@@ -365,7 +365,7 @@ single cube. An example of fixing an issue like this can be found in the
.. _merge_concat_common_issues:
-Common issues with merge and concatenate
+Common Issues With Merge and Concatenate
----------------------------------------
The Iris algorithms that drive :meth:`~iris.cube.CubeList.merge` and
@@ -529,7 +529,7 @@ Trying to merge the input cubes with duplicate cubes not allowed raises an
error highlighting the presence of the duplicate cube.
-**Single value coordinates**
+**Single Value Coordinates**
Coordinates containing only a single value can cause confusion when
combining input cubes. Remember:
diff --git a/docs/iris/src/userguide/navigating_a_cube.rst b/docs/iris/src/userguide/navigating_a_cube.rst
index a7b7717ae3..df18c032c1 100644
--- a/docs/iris/src/userguide/navigating_a_cube.rst
+++ b/docs/iris/src/userguide/navigating_a_cube.rst
@@ -1,5 +1,5 @@
=================
-Navigating a cube
+Navigating a Cube
=================
.. testsetup::
@@ -15,7 +15,7 @@ Navigating a cube
After loading any cube, you will want to investigate precisely what it contains. This section is all about accessing
and manipulating the metadata contained within a cube.
-Cube string representations
+Cube String Representations
---------------------------
We have already seen a basic string representation of a cube when printing:
@@ -52,7 +52,7 @@ variable. In most cases it is reasonable to ignore anything starting with a "``_
dir(cube)
help(cube)
-Working with cubes
+Working With Cubes
------------------
Every cube has a standard name, long name and units which are accessed with
@@ -111,7 +111,7 @@ cube with the :attr:`Cube.cell_methods ` attribute:
print(cube.cell_methods)
-Accessing coordinates on the cube
+Accessing Coordinates on the Cube
---------------------------------
A cube's coordinates can be retrieved via :meth:`Cube.coords `.
@@ -148,7 +148,7 @@ numpy array. If the coordinate has no bounds ``None`` will be returned::
print(type(coord.bounds))
-Adding metadata to a cube
+Adding Metadata to a Cube
-------------------------
We can add and remove coordinates via :func:`Cube.add_dim_coord`,
@@ -177,7 +177,7 @@ We can add and remove coordinates via :func:`Cube.add_dim_coord`_ package in order to generate
@@ -13,7 +13,7 @@ been extended within Iris to facilitate easy visualisation of a cube's data.
***************************
-Matplotlib's pyplot basics
+Matplotlib's Pyplot Basics
***************************
A simple line plot can be created using the
@@ -35,7 +35,7 @@ There are two modes of rendering within Matplotlib; **interactive** and
**non-interactive**.
-Interactive plot rendering
+Interactive Plot Rendering
==========================
The previous example was *non-interactive* as the figure is only rendered
*after* the call to :py:func:`plt.show() `.
@@ -84,7 +84,7 @@ so ensure that interactive mode is turned off with::
plt.interactive(False)
-Saving a plot
+Saving a Plot
=============
The :py:func:`matplotlib.pyplot.savefig` function is similar to **plt.show()**
@@ -113,7 +113,7 @@ Some of the formats which are supported by **plt.savefig**:
====== ====== ======================================================================
******************
-Iris cube plotting
+Iris Cube Plotting
******************
The Iris modules :py:mod:`iris.quickplot` and :py:mod:`iris.plot` extend the
@@ -149,7 +149,7 @@ where appropriate.
import iris.quickplot as qplt
-Plotting 1-dimensional cubes
+Plotting 1-Dimensional Cubes
============================
The simplest 1D plot is achieved with the :py:func:`iris.plot.plot` function.
@@ -181,7 +181,7 @@ For example, the previous plot can be improved quickly by replacing
-Multi-line plot
+Multi-Line Plot
---------------
A multi-lined (or over-plotted) plot, with a legend, can be achieved easily by
@@ -212,10 +212,10 @@ the temperature at some latitude cross-sections.
and run it using ``python my_file.py``.
-Plotting 2-dimensional cubes
+Plotting 2-Dimensional Cubes
============================
-Creating maps
+Creating Maps
-------------
Whenever a 2D plot is created using an :class:`iris.coord_systems.CoordSystem`,
a cartopy :class:`~cartopy.mpl.GeoAxes` instance is created, which can be
@@ -230,7 +230,7 @@ things.
:meth:`cartopy's coastlines() `.
-Cube contour
+Cube Contour
------------
A simple contour plot of a cube can be created with either the
:func:`iris.plot.contour` or :func:`iris.quickplot.contour` functions:
@@ -239,7 +239,7 @@ A simple contour plot of a cube can be created with either the
:include-source:
-Cube filled contour
+Cube Filled Contour
-------------------
Similarly a filled contour plot of a cube can be created with the
:func:`iris.plot.contourf` or :func:`iris.quickplot.contourf` functions:
@@ -248,7 +248,7 @@ Similarly a filled contour plot of a cube can be created with the
:include-source:
-Cube block plot
+Cube Block Plot
---------------
In some situations the underlying coordinates are better represented with a
continuous bounded coordinate, in which case a "block" plot may be more
@@ -268,7 +268,7 @@ or :func:`iris.quickplot.pcolormesh`.
.. _brewer-info:
***********************
-Brewer colour palettes
+Brewer Colour Palettes
***********************
Iris includes colour specifications and designs developed by
@@ -303,7 +303,7 @@ The following subset of Brewer palettes found at
.. plot:: userguide/plotting_examples/brewer.py
-Plotting with Brewer
+Plotting With Brewer
====================
To plot a cube using a Brewer colour palette, simply select one of the Iris
@@ -316,7 +316,7 @@ become available once :mod:`iris.plot` or :mod:`iris.quickplot` are imported.
.. _brewer-cite:
-Adding a citation
+Adding a Citation
=================
Citations can be easily added to a plot using the
diff --git a/docs/iris/src/userguide/real_and_lazy_data.rst b/docs/iris/src/userguide/real_and_lazy_data.rst
index 574ca4e1a0..0bc1846457 100644
--- a/docs/iris/src/userguide/real_and_lazy_data.rst
+++ b/docs/iris/src/userguide/real_and_lazy_data.rst
@@ -10,7 +10,7 @@
==================
-Real and lazy data
+Real and Lazy Data
==================
We have seen in the :doc:`iris_cubes` section of the user guide that
@@ -21,7 +21,7 @@ In this section of the user guide we will look specifically at the concepts of
real and lazy data as they apply to the cube and other data structures in Iris.
-What is real and lazy data?
+What is Real and Lazy Data?
---------------------------
In Iris, we use the term **real data** to describe data arrays that are loaded
@@ -97,7 +97,7 @@ In such cases, a required portion can be extracted and realised without calculat
.. _when_real_data:
-When does my data become real?
+When Does My Data Become Real?
------------------------------
Certain operations, such as cube indexing and statistics, can be
@@ -134,7 +134,7 @@ You can also realise (and so load into memory) your cube's lazy data if you 'tou
To 'touch' the data means directly accessing the data by calling ``cube.data``,
as in the previous example.
-Core data
+Core Data
^^^^^^^^^
Cubes have the concept of "core data". This returns the cube's data in its
@@ -225,7 +225,7 @@ coordinates' lazy points and bounds:
Printing a lazy :class:`~iris.coords.AuxCoord` will realise its points and bounds arrays!
-Dask processing options
+Dask Processing Options
-----------------------
Iris uses dask to provide lazy data arrays for both Iris cubes and coordinates,
diff --git a/docs/iris/src/userguide/saving_iris_cubes.rst b/docs/iris/src/userguide/saving_iris_cubes.rst
index 3a30321979..237ceb18b6 100644
--- a/docs/iris/src/userguide/saving_iris_cubes.rst
+++ b/docs/iris/src/userguide/saving_iris_cubes.rst
@@ -1,7 +1,7 @@
.. _saving_iris_cubes:
==================
-Saving Iris cubes
+Saving Iris Cubes
==================
Iris supports the saving of cubes and cube lists to:
@@ -39,8 +39,8 @@ and the keyword argument `saver` is not required.
attempting to overwrite an existing file.
-Controlling the save process
------------------------------
+Controlling the Save Process
+----------------------------
The :py:func:`iris.save` function passes all other keywords through to the saver function defined, or automatically set from the file extension. This enables saver specific functionality to be called.
@@ -73,8 +73,8 @@ See
for more details on supported arguments for the individual savers.
-Customising the save process
------------------------------
+Customising the Save Process
+----------------------------
When saving to GRIB or PP, the save process may be intercepted between the translation step and the file writing. This enables customisation of the output messages, based on Cube metadata if required, over and above the translations supplied by Iris.
@@ -103,14 +103,14 @@ Similarly a PP field may need to be written out with a specific value for LBEXP.
iris.fileformats.pp.save_fields(tweaked_fields(cubes[0]), '/tmp/app.pp')
-netCDF
-^^^^^^^
+NetCDF
+^^^^^^
NetCDF is a flexible container for metadata and cube metadata is closely related to the CF for netCDF semantics. This means that cube metadata is well represented in netCDF files, closely resembling the in memory metadata representation.
Thus there is no provision for similar save customisation functionality for netCDF saving, all customisations should be applied to the cube prior to saving to netCDF.
-Bespoke saver
---------------
+Bespoke Saver
+-------------
A bespoke saver may be written to support an alternative file format. This can be provided to the :py:func:`iris.save` function, enabling Iris to write to a different file format.
Such a custom saver will need be written to meet the needs of the file format and to handle the metadata translation from cube metadata effectively.
diff --git a/docs/iris/src/userguide/subsetting_a_cube.rst b/docs/iris/src/userguide/subsetting_a_cube.rst
index 5d9a560be9..02cf1645a1 100644
--- a/docs/iris/src/userguide/subsetting_a_cube.rst
+++ b/docs/iris/src/userguide/subsetting_a_cube.rst
@@ -1,7 +1,7 @@
.. _subsetting_a_cube:
=================
-Subsetting a cube
+Subsetting a Cube
=================
The :doc:`loading_iris_cubes` section of the user guide showed how to load data into multidimensional Iris cubes.
@@ -11,7 +11,7 @@ Iris provides several ways of reducing both the amount of data and/or the number
In all cases **the subset of a valid cube is itself a valid cube**.
-Cube extraction
+Cube Extraction
^^^^^^^^^^^^^^^^
A subset of a cube can be "extracted" from a multi-dimensional cube in order to reduce its dimensionality:
@@ -101,7 +101,7 @@ same way as loading with constraints:
um_version: 7.3
-Cube iteration
+Cube Iteration
^^^^^^^^^^^^^^^
It is not possible to directly iterate over an Iris cube. That is, you cannot use code such as
``for x in cube:``. However, you can iterate over cube slices, as this section details.
@@ -152,7 +152,7 @@ slicing the 3 dimensional cube (15, 100, 100) by longitude (i starts at 0 and 15
cube using the slices method.
-Cube indexing
+Cube Indexing
^^^^^^^^^^^^^
In the same way that you would expect a numeric multidimensional array to be **indexed** to take a subset of your
original array, you can **index** a Cube for the same purpose.
diff --git a/docs/iris/src/whatsnew/1.0.rst b/docs/iris/src/whatsnew/1.0.rst
index 11d29320b6..b226dc609b 100644
--- a/docs/iris/src/whatsnew/1.0.rst
+++ b/docs/iris/src/whatsnew/1.0.rst
@@ -10,7 +10,7 @@ work. Following this release we plan to deliver significant performance
improvements and additional features.
-The role of 1.x
+The Role of 1.x
===============
The 1.x series of releases is intended to provide a relatively stable,
@@ -58,7 +58,7 @@ A summary of the main features added with version 1.0:
contain bounds.
-CF-netCDF coordinate systems
+CF-NetCDF Coordinate Systems
----------------------------
The coordinate systems in Iris are now defined by the CF-netCDF
@@ -73,7 +73,7 @@ The coordinate systems available in Iris 1.0 and their corresponding
Iris classes are:
================================================================================================================= =========================================
-CF name Iris class
+CF Name Iris Class
================================================================================================================= =========================================
`Latitude-longitude `_ :class:`~iris.coord_systems.GeogCS`
`Rotated pole `_ :class:`~iris.coord_systems.RotatedGeogCS`
@@ -88,7 +88,7 @@ coordinate system used by the British
.. _whats-new-cartopy:
-Using Cartopy for mapping in matplotlib
+Using Cartopy for Mapping in Matplotlib
---------------------------------------
The underlying map drawing package has now been updated to use
@@ -135,7 +135,7 @@ For more examples of what can be done with Cartopy, see the Iris gallery and
`Cartopy's documentation `_.
-Hybrid-pressure
+Hybrid-Pressure
---------------
With the introduction of the :class:`~iris.aux_factory.HybridPressureFactory`
@@ -181,7 +181,7 @@ dealing with large numbers of netCDF files, or in long running
processes.
-Brewer colour palettes
+Brewer Colour Palettes
----------------------
Iris includes a selection of carefully designed colour palettes produced
@@ -207,7 +207,7 @@ To include a reference in a journal article or report please refer to
in the citation guidance provided by Cynthia Brewer.
-Metadata attributes
+Metadata Attributes
-------------------
Iris now stores "source" and "history" metadata in Cube attributes.
@@ -241,7 +241,7 @@ Where previously it would have appeared as::
cube.add_aux_coord(src_coord)
-New loading functions
+New Loading Functions
---------------------
The main functions for loading cubes are now:
@@ -264,7 +264,7 @@ now use the :func:`iris.load_cube()` and :func:`iris.load_cubes()`
functions instead.
-Cube projection
+Cube Projection
---------------
Iris now has the ability to project a cube into a number of map projections.
@@ -302,7 +302,7 @@ preserved. This function currently assumes global data and will if
necessary extrapolate beyond the geographical extent of the source cube.
-Incompatible changes
+Incompatible Changes
====================
* The "source" and "history" metadata are now represented as Cube
diff --git a/docs/iris/src/whatsnew/1.1.rst b/docs/iris/src/whatsnew/1.1.rst
index f2b0995fa0..86f0bb16fa 100644
--- a/docs/iris/src/whatsnew/1.1.rst
+++ b/docs/iris/src/whatsnew/1.1.rst
@@ -44,7 +44,7 @@ some notable improvements to netCDF/PP import.
with product template 4.9.
-Coordinate categorisation
+Coordinate Categorisation
-------------------------
An :func:`~iris.coord_categorisation.add_day_of_year` categorisation
@@ -52,7 +52,7 @@ function has been added to the existing suite in
:mod:`iris.coord_categorisation`.
-Custom seasons
+Custom Seasons
~~~~~~~~~~~~~~
The conventional seasonal categorisation functions have been
@@ -87,7 +87,7 @@ This function adds a coordinate containing True/False values determined
by membership of a single custom season.
-Bugs fixed
+Bugs Fixed
==========
* PP export no longer attempts to set/overwrite the STASH code based on
diff --git a/docs/iris/src/whatsnew/1.10.rst b/docs/iris/src/whatsnew/1.10.rst
index 3f51287fa1..92822087dd 100644
--- a/docs/iris/src/whatsnew/1.10.rst
+++ b/docs/iris/src/whatsnew/1.10.rst
@@ -1,5 +1,5 @@
v1.10 (05 Sep 2016)
-*********************
+*******************
This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
@@ -137,7 +137,7 @@ Features
attributes is now allowed.
-Bugs fixed
+Bugs Fixed
==========
* Altered Cell Methods to display coordinate's standard_name rather than
@@ -215,7 +215,7 @@ Bugs fixed
thrown while trying to subset over a non-dimensional scalar coordinate.
-Incompatible changes
+Incompatible Changes
====================
* The source and target for
diff --git a/docs/iris/src/whatsnew/1.11.rst b/docs/iris/src/whatsnew/1.11.rst
index e0d46d0f09..356e6ec85b 100644
--- a/docs/iris/src/whatsnew/1.11.rst
+++ b/docs/iris/src/whatsnew/1.11.rst
@@ -16,7 +16,7 @@ Features
* The coordinate system :class:`iris.coord_systems.LambertAzimuthalEqualArea`
has been added with NetCDF saving support.
-Bugs fixed
+Bugs Fixed
==========
* Fixed a floating point tolerance bug in
diff --git a/docs/iris/src/whatsnew/1.13.rst b/docs/iris/src/whatsnew/1.13.rst
index 2d3b3ffce5..028c298505 100644
--- a/docs/iris/src/whatsnew/1.13.rst
+++ b/docs/iris/src/whatsnew/1.13.rst
@@ -1,5 +1,5 @@
v1.13 (17 May 2017)
-*************************
+*******************
This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
@@ -17,7 +17,7 @@ Features
:meth:`iris.cube.share_data` flag.
-Bug fixes
+Bug Fixes
=========
* The bounds are now set correctly on the longitude coordinate if a zonal mean
diff --git a/docs/iris/src/whatsnew/1.2.rst b/docs/iris/src/whatsnew/1.2.rst
index d4bb863a3b..dce0b6dc04 100644
--- a/docs/iris/src/whatsnew/1.2.rst
+++ b/docs/iris/src/whatsnew/1.2.rst
@@ -44,7 +44,7 @@ Features
:class:`~iris.cube.Cube`.
-Bugs fixed
+Bugs Fixed
==========
* The GRIB hindcast interpretation of negative forecast times can be enabled
@@ -54,7 +54,7 @@ Bugs fixed
coordinates.
-Incompatible changes
+Incompatible Changes
====================
* The deprecated :attr:`iris.cube.Cube.unit` and :attr:`iris.coords.Coord.unit`
diff --git a/docs/iris/src/whatsnew/1.3.rst b/docs/iris/src/whatsnew/1.3.rst
index 9a2ac2eba1..beaa594ab5 100644
--- a/docs/iris/src/whatsnew/1.3.rst
+++ b/docs/iris/src/whatsnew/1.3.rst
@@ -30,7 +30,7 @@ Features
.. _whats-new-abf:
-Loading ABF/ABL files
+Loading ABF/ABL Files
---------------------
Support for the ABF and ABL file formats (as
@@ -51,7 +51,7 @@ For example::
.. _whats-new-cf-profile:
-Customised CF profiles
+Customised CF Profiles
----------------------
Iris now provides hooks in the CF-netCDF export process to allow
@@ -74,7 +74,7 @@ For further implementation details see ``iris/fileformats/netcdf.py``.
.. _whats-new-concat:
-Cube concatenation
+Cube Concatenation
------------------
Iris now provides initial support for concatenating Cubes along one or
@@ -101,7 +101,7 @@ combine these into a single Cube as follows::
As this is an experimental feature, your feedback is especially welcome.
-Bugs fixed
+Bugs Fixed
==========
* Printing a Cube now supports Unicode attribute values.
@@ -123,7 +123,7 @@ Deprecations
naming conventions.
====================================== ===========================================
- Deprecated property/method New method
+ Deprecated Property/Method New Method
====================================== ===========================================
:meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()`
:attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()`
diff --git a/docs/iris/src/whatsnew/1.4.rst b/docs/iris/src/whatsnew/1.4.rst
index 29f2079af8..858f985ec6 100644
--- a/docs/iris/src/whatsnew/1.4.rst
+++ b/docs/iris/src/whatsnew/1.4.rst
@@ -61,7 +61,7 @@ Features
.. _OPeNDAP: http://www.opendap.org/about
.. _exp-regrid:
-Experimental regridding enhancements
+Experimental Regridding Enhancements
------------------------------------
Bilinear, area-weighted and area-conservative regridding functions are now
@@ -72,7 +72,7 @@ development.
In the meantime:
-Bilinear rectilinear regridding
+Bilinear Rectilinear Regridding
-------------------------------
:func:`~iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid`
@@ -85,7 +85,7 @@ For example::
regridded_cube = regrid_bilinear_rectilinear_src_and_grid(source_cube, target_grid_cube)
-Area-weighted regridding
+Area-Weighted Regridding
------------------------
:func:`~iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`
@@ -98,7 +98,7 @@ For example::
regridded_cube = regrid_area_weighted(source_cube, target_grid_cube)
-Area-conservative regridding
+Area-Conservative Regridding
----------------------------
:func:`~iris.experimental.regrid_conservative.regrid_conservative_via_esmpy`
@@ -113,7 +113,7 @@ For example::
.. _iris-pandas:
-Iris-Pandas interoperability
+Iris-Pandas Interoperability
----------------------------
Conversion to and from Pandas Series_ and DataFrames_ is now available.
@@ -125,7 +125,7 @@ See :mod:`iris.pandas` for more details.
.. _load-opendap:
-Load cubes from the internet via OPeNDAP
+Load Cubes From the Internet via OPeNDAP
----------------------------------------
Cubes can now be loaded directly from the internet, via OPeNDAP_.
@@ -137,7 +137,7 @@ For example::
.. _geotiff_export:
-GeoTiff export
+GeoTiff Export
--------------
With this experimental feature, two dimensional cubes can now be exported to
@@ -155,7 +155,7 @@ For example::
.. _cube-merge-update:
-Cube merge update
+Cube Merge Update
-----------------
Cube merging now favours numerical coordinates over string coordinates
@@ -167,7 +167,7 @@ dimensions"*.
.. _season-year-name:
-Unambiguous season year naming
+Unambiguous Season Year Naming
------------------------------
The default names of categorisation coordinates are now less ambiguous.
@@ -178,7 +178,7 @@ For example, :func:`~iris.coord_categorisation.add_month_number` and
.. _grib-novert:
-Cubes with no vertical coord can now be exported to GRIB
+Cubes With no Vertical Coord can now be Exported to GRIB
--------------------------------------------------------
Iris can now export cubes with no vertical coord to GRIB.
@@ -188,7 +188,7 @@ https://github.com/SciTools/iris/issues/519.
.. _simple_cfg:
-Simplified resource configuration
+Simplified Resource Configuration
---------------------------------
A new configuration variable called :data:`iris.config.TEST_DATA_DIR`
@@ -202,7 +202,7 @@ be set by adding a ``test_data_dir`` entry to the ``Resources`` section of
.. _grib_params:
-Extended GRIB parameter translation
+Extended GRIB Parameter Translation
-----------------------------------
- More GRIB2 params are recognised on input.
@@ -213,7 +213,7 @@ Extended GRIB parameter translation
.. _one-d-linear:
-One dimensional linear interpolation fix
+One dimensional Linear Interpolation Fix
----------------------------------------
:func:`~iris.analysis.interpolate.linear` can now extrapolate from a single
@@ -232,7 +232,7 @@ to cause the loss of coordinate metadata when calculating the curl or the
derivative of a cube has been fixed.
-Incompatible changes
+Incompatible Changes
====================
* As part of simplifying the mechanism for accessing test data,
diff --git a/docs/iris/src/whatsnew/1.5.rst b/docs/iris/src/whatsnew/1.5.rst
index ea7965fe15..72bdbac480 100644
--- a/docs/iris/src/whatsnew/1.5.rst
+++ b/docs/iris/src/whatsnew/1.5.rst
@@ -125,7 +125,7 @@ Features
systems and mapping 0 to 360 longitudes to the -180 to 180 range.
-Bugs fixed
+Bugs Fixed
==========
* NetCDF error handling on save has been extended to capture file path and
diff --git a/docs/iris/src/whatsnew/1.6.rst b/docs/iris/src/whatsnew/1.6.rst
index 3855d71479..8b0205b86f 100644
--- a/docs/iris/src/whatsnew/1.6.rst
+++ b/docs/iris/src/whatsnew/1.6.rst
@@ -146,7 +146,7 @@ Features
.. _caching:
-A new utility function to assist with caching
+A New Utility Function to Assist With Caching
---------------------------------------------
To assist with management of caching results to file, the new utility
function :func:`iris.util.file_is_newer_than` may be used to easily determine whether
@@ -173,7 +173,7 @@ consuming processing, or to reap the benefit of fast-loading a pickled cube.
.. _rms:
-The RMS aggregator supports weights
+The RMS Aggregator Supports Weights
-----------------------------------
The :data:`iris.analysis.RMS` aggregator has been extended to allow the use of
@@ -189,7 +189,7 @@ For example, an RMS weighted cube collapse is performed as follows:
.. _equalise:
-Equalise cube attributes
+Equalise Cube Attributes
------------------------
To assist with :class:`iris.cube.Cube` merging, the new experimental in-place
@@ -202,7 +202,7 @@ have the same attributes.
.. _tolerance:
-Masking a collapsed result by missing-data tolerance
+Masking a Collapsed Result by Missing-Data Tolerance
----------------------------------------------------
The result from collapsing masked cube data may now be completely
@@ -216,7 +216,7 @@ less than or equal to the provided tolerance.
.. _promote:
-Promote a scalar coordinate
+Promote a Scalar Coordinate
---------------------------
The new utility function :func:`iris.util.new_axis` creates a new cube with
@@ -229,7 +229,7 @@ Note that, this function will load the data payload of the cube.
.. _peak:
-A new PEAK aggregator providing spline interpolation
+A New PEAK Aggregator Providing Spline Interpolation
----------------------------------------------------
The new :data:`iris.analysis.PEAK` aggregator calculates the global peak
@@ -244,7 +244,7 @@ For example, to calculate the peak time:
collapsed_cube = cube.collapsed('time', PEAK)
-Bugs fixed
+Bugs Fixed
==========
* :meth:`iris.cube.Cube.rolling_window` has been extended to support masked
@@ -283,7 +283,7 @@ Bugs fixed
* Exception no longer raised for any ellipsoid definition in nimrod loading.
-Incompatible changes
+Incompatible Changes
====================
* The experimental 'concatenate' function is now a method of a
@@ -312,7 +312,7 @@ Incompatible changes
been removed.
====================================== ===========================================
- Removed property/method New method
+ Removed Property/Method New Method
====================================== ===========================================
:meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()`
:attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()`
@@ -335,7 +335,7 @@ Incompatible changes
removed.
=============================================================== =======================================================
- Removed function New function
+ Removed Function New Function
=============================================================== =======================================================
:func:`~iris.coord_categorisation.add_custom_season` :func:`~iris.coord_categorisation.add_season`
:func:`~iris.coord_categorisation.add_custom_season_number` :func:`~iris.coord_categorisation.add_season_number`
diff --git a/docs/iris/src/whatsnew/1.7.rst b/docs/iris/src/whatsnew/1.7.rst
index f6e818fedf..44ebe9ec60 100644
--- a/docs/iris/src/whatsnew/1.7.rst
+++ b/docs/iris/src/whatsnew/1.7.rst
@@ -1,5 +1,5 @@
v1.7 (04 Jul 2014)
-********************
+******************
This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
@@ -196,7 +196,7 @@ Features
* A speed improvement when loading PP or FF data and constraining on STASH code.
-Bugs fixed
+Bugs Fixed
==========
* Data containing more than one reference cube for constructing hybrid height
@@ -282,7 +282,7 @@ v1.7.4 (15 Apr 2015)
create LambertConformal coordinate systems with Cartopy >= 0.12.
-Incompatible changes
+Incompatible Changes
====================
* Saving a cube with a STASH attribute to NetCDF now produces a variable
diff --git a/docs/iris/src/whatsnew/1.8.rst b/docs/iris/src/whatsnew/1.8.rst
index 579d4d20c5..0e327b4f5a 100644
--- a/docs/iris/src/whatsnew/1.8.rst
+++ b/docs/iris/src/whatsnew/1.8.rst
@@ -1,5 +1,5 @@
v1.8 (14 Apr 2015)
-********************
+******************
This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
@@ -151,7 +151,7 @@ Features
"iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid".
-Bugs fixed
+Bugs Fixed
==========
* Fix in netCDF loader to correctly determine whether the longitude coordinate
diff --git a/docs/iris/src/whatsnew/1.9.rst b/docs/iris/src/whatsnew/1.9.rst
index c9d91bf33c..9829d8ff3b 100644
--- a/docs/iris/src/whatsnew/1.9.rst
+++ b/docs/iris/src/whatsnew/1.9.rst
@@ -1,5 +1,5 @@
v1.9 (10 Dec 2015)
-********************
+******************
This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
@@ -93,7 +93,7 @@ Features
read Fieldsfile data after the original
:class:`iris.experimental.um.FieldsFileVariant` has been closed.
-Bugs fixed
+Bugs Fixed
==========
* Fixed a bug in :meth:`iris.unit.Unit.convert`
@@ -170,7 +170,7 @@ v1.9.2 (28 Jan 2016)
* Fixed a bug regarding unsuccessful dot import.
-Incompatible changes
+Incompatible Changes
====================
* GRIB message/file reading and writing may not be available for Python 3 due
diff --git a/docs/iris/src/whatsnew/2.0.rst b/docs/iris/src/whatsnew/2.0.rst
index fbd012dd1f..400a395e90 100644
--- a/docs/iris/src/whatsnew/2.0.rst
+++ b/docs/iris/src/whatsnew/2.0.rst
@@ -60,7 +60,7 @@ Features
respectively.
-The :data:`iris.FUTURE` has arrived!
+The :data:`iris.FUTURE` has Arrived!
------------------------------------
Throughout version 1 of Iris a set of toggles in
@@ -111,7 +111,7 @@ all existing toggles in :attr:`iris.FUTURE` now default to :data:`True`.
off is now deprecated.
-Bugs fixed
+Bugs Fixed
==========
* Indexing or slicing an :class:`~iris.coords.AuxCoord` coordinate will return a coordinate with
diff --git a/docs/iris/src/whatsnew/2.1.rst b/docs/iris/src/whatsnew/2.1.rst
index ef03f023b2..18c562d3da 100644
--- a/docs/iris/src/whatsnew/2.1.rst
+++ b/docs/iris/src/whatsnew/2.1.rst
@@ -43,7 +43,7 @@ Features
the ``standard_parallel`` keyword argument (:pull:`3041`).
-Bugs fixed
+Bugs Fixed
==========
* All var names being written to NetCDF are now CF compliant.
@@ -59,7 +59,7 @@ Bugs fixed
``axes`` keyword (:pull:`3010`).
-Incompatible changes
+Incompatible Changes
====================
* The deprecated :mod:`iris.experimental.um` was removed.
@@ -94,4 +94,4 @@ Internal
* Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy.
Full requirements can be seen in the `requirements `_
- directory of the Iris' the source.
\ No newline at end of file
+ directory of the Iris' the source.
diff --git a/docs/iris/src/whatsnew/2.2.rst b/docs/iris/src/whatsnew/2.2.rst
index 48280895fe..a1f48f962b 100644
--- a/docs/iris/src/whatsnew/2.2.rst
+++ b/docs/iris/src/whatsnew/2.2.rst
@@ -66,7 +66,7 @@ Features
a NaN-tolerant array comparison.
-Bugs fixed
+Bugs Fixed
==========
* The bug has been fixed that prevented printing time coordinates with bounds
diff --git a/docs/iris/src/whatsnew/2.3.rst b/docs/iris/src/whatsnew/2.3.rst
index 5997a7f4dc..2509242c05 100644
--- a/docs/iris/src/whatsnew/2.3.rst
+++ b/docs/iris/src/whatsnew/2.3.rst
@@ -147,7 +147,7 @@ Features
`metarelate/metOcean commit 448f2ef, 2019-11-29 `_
-Bugs fixed
+Bugs Fixed
==========
* Cube equality of boolean data is now handled correctly.
diff --git a/docs/iris/src/whatsnew/2.4.rst b/docs/iris/src/whatsnew/2.4.rst
index c62e84c129..0e271389b5 100644
--- a/docs/iris/src/whatsnew/2.4.rst
+++ b/docs/iris/src/whatsnew/2.4.rst
@@ -47,7 +47,7 @@ Features
``STASH`` from the attributes dictionary of a :class:`~iris.cube.Cube`.
-Bugs fixed
+Bugs Fixed
==========
* Fixed a problem which was causing file loads to fetch *all* field data
diff --git a/docs/iris/src/whatsnew/3.0.1.rst b/docs/iris/src/whatsnew/3.0.1.rst
new file mode 100644
index 0000000000..163fe4ff3e
--- /dev/null
+++ b/docs/iris/src/whatsnew/3.0.1.rst
@@ -0,0 +1,522 @@
+.. include:: ../common_links.inc
+
+v3.0.1 (27 Jan 2021)
+********************
+
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
+
+
+.. dropdown:: :opticon:`alert` v3.0.1 Patches
+ :container: + shadow
+ :title: text-primary text-center font-weight-bold
+ :body: bg-light
+ :animate: fade-in
+ :open:
+
+ The patches included in this release include:
+
+ 💼 **Internal**
+
+ #. `@bjlittle`_ gracefully promote formula terms within :mod:`~iris.aux_factory` that have ``units`` of ``unknown``
+ to ``units`` of ``1`` (dimensionless), where the formula term **must** have dimensionless ``units``. Without this
+ graceful treatment of ``units`` the resulting :class:`~iris.cube.Cube` will **not** contain the expected auxiliary
+ factory, and the associated derived coordinate will be missing. (:pull:`3965`)
+
+
+.. dropdown:: :opticon:`report` Release Highlights
+ :container: + shadow
+ :title: text-primary text-center font-weight-bold
+ :body: bg-light
+ :animate: fade-in
+ :open:
+
+ The highlights for this major release of Iris include:
+
+ * We've finally dropped support for ``Python 2``, so welcome to ``Iris 3``
+ and ``Python 3``!
+ * We've extended our coverage of the `CF Conventions and Metadata`_ by
+ introducing support for `CF Ancillary Data`_ and `Quality Flags`_,
+ * Lazy regridding is now available for several regridding schemes,
+ * Managing and manipulating metadata within Iris is now easier and more
+ consistent thanks to the introduction of a new common metadata API,
+ * :ref:`Cube arithmetic ` has been significantly improved with
+ regards to extended broadcasting, auto-transposition and a more lenient
+ behaviour towards handling metadata and coordinates,
+ * Our :ref:`documentation ` has been refreshed,
+ restructured, revitalised and rehosted on `readthedocs`_,
+ * It's now easier than ever to :ref:`install Iris `
+ as a user or a developer, and the newly revamped developers guide walks
+ you though how you can :ref:`get involved `
+ and contribute to Iris,
+ * Also, this is a major release of Iris, so please be aware of the
+ :ref:`incompatible changes ` and
+ :ref:`deprecations `.
+
+ And finally, get in touch with us on `GitHub`_ if you have any issues or
+ feature requests for improving Iris. Enjoy!
+
+
+📢 Announcements
+================
+
+#. Congratulations to `@bouweandela`_, `@jvegasbsc`_, and `@zklaus`_ who
+ recently became Iris core developers. They bring a wealth of expertise to the
+ team, and are using Iris to underpin `ESMValTool`_ - "*A community diagnostic
+ and performance metrics tool for routine evaluation of Earth system models
+ in CMIP*". Welcome aboard! 🎉
+
+#. Congratulations also goes to `@jonseddon`_ who recently became an Iris core
+ developer. We look forward to seeing more of your awesome contributions! 🎉
+
+
+✨ Features
+===========
+
+#. `@MoseleyS`_ greatly enhanced the :mod:`~iris.fileformats.nimrod`
+ module to provide richer meta-data translation when loading ``Nimrod`` data
+ into cubes. This covers most known operational use-cases. (:pull:`3647`)
+
+#. `@stephenworsley`_ improved the handling of
+ :class:`iris.coords.CellMeasure`\ s in the :class:`~iris.cube.Cube`
+ statistical operations :meth:`~iris.cube.Cube.collapsed`,
+ :meth:`~iris.cube.Cube.aggregated_by` and
+ :meth:`~iris.cube.Cube.rolling_window`. These previously removed every
+ :class:`~iris.coords.CellMeasure` attached to the cube. Now, a
+ :class:`~iris.coords.CellMeasure` will only be removed if it is associated
+ with an axis over which the statistic is being run. (:pull:`3549`)
+
+#. `@stephenworsley`_, `@pp-mo`_ and `@abooton`_ added support for
+ `CF Ancillary Data`_ variables. These are created as
+ :class:`iris.coords.AncillaryVariable`, and appear as components of cubes
+ much like :class:`~iris.coords.AuxCoord`\ s, with the new
+ :class:`~iris.cube.Cube` methods
+ :meth:`~iris.cube.Cube.add_ancillary_variable`,
+ :meth:`~iris.cube.Cube.remove_ancillary_variable`,
+ :meth:`~iris.cube.Cube.ancillary_variable`,
+ :meth:`~iris.cube.Cube.ancillary_variables` and
+ :meth:`~iris.cube.Cube.ancillary_variable_dims`.
+ They are loaded from and saved to NetCDF-CF files. Special support for
+ `Quality Flags`_ is also provided, to ensure they load and save with
+ appropriate units. (:pull:`3800`)
+
+#. `@bouweandela`_ implemented lazy regridding for the
+ :class:`~iris.analysis.Linear`, :class:`~iris.analysis.Nearest`, and
+ :class:`~iris.analysis.AreaWeighted` regridding schemes. (:pull:`3701`)
+
+#. `@bjlittle`_ added `logging`_ support within :mod:`iris.analysis.maths`,
+ :mod:`iris.common.metadata`, and :mod:`iris.common.resolve`. Each module
+ defines a :class:`logging.Logger` instance called ``logger`` with a default
+ ``level`` of ``INFO``. To enable ``DEBUG`` logging use
+ ``logger.setLevel("DEBUG")``. (:pull:`3785`)
+
+#. `@bjlittle`_ added the :mod:`iris.common.resolve` module, which provides
+ infrastructure to support the analysis, identification and combination
+ of metadata common between two :class:`~iris.cube.Cube` operands into a
+ single resultant :class:`~iris.cube.Cube` that will be auto-transposed,
+ and with the appropriate broadcast shape. (:pull:`3785`)
+
+#. `@bjlittle`_ added the :ref:`common metadata API `, which provides
+ a unified treatment of metadata across Iris, and allows users to easily
+ manage and manipulate their metadata in a consistent way. (:pull:`3785`)
+
+#. `@bjlittle`_ added :ref:`lenient metadata ` support, to
+ allow users to control **strict** or **lenient** metadata equivalence,
+ difference and combination. (:pull:`3785`)
+
+#. `@bjlittle`_ added :ref:`lenient cube maths ` support and
+ resolved several long standing major issues with cube arithmetic regarding
+ a more robust treatment of cube broadcasting, cube dimension auto-transposition,
+ and preservation of common metadata and coordinates during cube math operations.
+ Resolves :issue:`1887`, :issue:`2765`, and :issue:`3478`. (:pull:`3785`)
+
+#. `@pp-mo`_ and `@TomekTrzeciak`_ enhanced :meth:`~iris.cube.Cube.collapse` to allow a 1-D weights array when
+ collapsing over a single dimension.
+ Previously, the weights had to be the same shape as the whole cube, which could cost a lot of memory in some cases.
+ The 1-D form is supported by most weighted array statistics (such as :meth:`np.average`), so this now works
+ with the corresponding Iris schemes (in that case, :const:`~iris.analysis.MEAN`). (:pull:`3943`)
+
+
+🐛 Bugs Fixed
+=============
+
+#. `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.remove_coord` to now also
+ remove derived coordinates by removing aux_factories. (:pull:`3641`)
+
+#. `@jonseddon`_ fixed ``isinstance(cube, collections.Iterable)`` to now behave
+ as expected if a :class:`~iris.cube.Cube` is iterated over, while also
+ ensuring that ``TypeError`` is still raised. (Fixed by setting the
+ ``__iter__()`` method in :class:`~iris.cube.Cube` to ``None``).
+ (:pull:`3656`)
+
+#. `@stephenworsley`_ enabled cube concatenation along an axis shared by cell
+ measures; these cell measures are now concatenated together in the resulting
+ cube. Such a scenario would previously cause concatenation to inappropriately
+ fail. (:pull:`3566`)
+
+#. `@stephenworsley`_ newly included :class:`~iris.coords.CellMeasure`\ s in
+ :class:`~iris.cube.Cube` copy operations. Previously copying a
+ :class:`~iris.cube.Cube` would ignore any attached
+ :class:`~iris.coords.CellMeasure`. (:pull:`3546`)
+
+#. `@bjlittle`_ set a :class:`~iris.coords.CellMeasure`'s
+ ``measure`` attribute to have a default value of ``area``.
+ Previously, the ``measure`` was provided as a keyword argument to
+ :class:`~iris.coords.CellMeasure` with a default value of ``None``, which
+ caused a ``TypeError`` when no ``measure`` was provided, since ``area`` or
+ ``volume`` are the only accepted values. (:pull:`3533`)
+
+#. `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use
+ `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot
+ axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh`
+ did not include this behaviour). (:pull:`3762`)
+
+#. `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to
+ now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_
+ (previously would take the unit from a time coordinate, if present, even
+ though the coordinate's value had been changed via ``date2num``).
+ (:pull:`3762`)
+
+#. `@pp-mo`_ newly included attributes of cell measures in NETCDF-CF
+ file loading; they were previously being discarded. They are now available on
+ the :class:`~iris.coords.CellMeasure` in the loaded :class:`~iris.cube.Cube`.
+ (:pull:`3800`)
+
+#. `@pp-mo`_ fixed the netcdf loader to now handle any grid-mapping
+ variables with missing ``false_easting`` and ``false_northing`` properties,
+ which was previously failing for some coordinate systems. See :issue:`3629`.
+ (:pull:`3804`)
+
+#. `@stephenworsley`_ changed the way tick labels are assigned from string coords.
+ Previously, the first tick label would occasionally be duplicated. This also
+ removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`)
+
+#. `@znicholls`_ fixed :meth:`~iris.quickplot._title` to only check
+ ``units.is_time_reference`` if the ``units`` symbol is not used. (:pull:`3902`)
+
+#. `@rcomer`_ fixed a bug whereby numpy array type attributes on a cube's
+ coordinates could prevent printing it. See :issue:`3921`. (:pull:`3922`)
+
+.. _whatsnew 3.0.1 changes:
+
+💣 Incompatible Changes
+=======================
+
+#. `@pp-mo`_ rationalised :class:`~iris.cube.CubeList` extraction
+ methods:
+
+ The former method ``iris.cube.CubeList.extract_strict``, and the ``strict``
+ keyword of the :meth:`~iris.cube.CubeList.extract` method have been removed,
+ and are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube`
+ and :meth:`~iris.cube.CubeList.extract_cubes`.
+ The new routines perform the same operation, but in a style more like other
+ ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`.
+ Unlike ``strict`` extraction, the type of return value is now completely
+ consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a
+ :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes`
+ always returns an :class:`iris.cube.CubeList` of a length equal to the
+ number of constraints. (:pull:`3715`)
+
+#. `@pp-mo`_ removed the former function
+ ``iris.analysis.coord_comparison``. (:pull:`3562`)
+
+#. `@bjlittle`_ moved the
+ :func:`iris.experimental.equalise_cubes.equalise_attributes` function from
+ the :mod:`iris.experimental` module into the :mod:`iris.util` module. Please
+ use the :func:`iris.util.equalise_attributes` function instead.
+ (:pull:`3527`)
+
+#. `@bjlittle`_ removed the module ``iris.experimental.concatenate``. In
+ ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the
+ :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the
+ :func:`iris.experimental.concatenate.concatenate` function raised an
+ exception. (:pull:`3523`)
+
+#. `@stephenworsley`_ changed the default units of :class:`~iris.coords.DimCoord`
+ and :class:`~iris.coords.AuxCoord` from `"1"` to `"unknown"`. (:pull:`3795`)
+
+#. `@stephenworsley`_ changed Iris objects loaded from NetCDF-CF files to have
+ ``units='unknown'`` where the corresponding NetCDF variable has no ``units``
+ property. Previously these cases defaulted to ``units='1'``.
+ This affects loading of coordinates whose file variable has no "units"
+ attribute (not valid, under `CF units rules`_): These will now have units
+ of `"unknown"`, rather than `"1"`, which **may prevent the creation of
+ a hybrid vertical coordinate**. While these cases used to "work", this was
+ never really correct behaviour. (:pull:`3795`)
+
+#. `@SimonPeatman`_ added attribute ``var_name`` to coordinates created by the
+ :func:`iris.analysis.trajectory.interpolate` function. This prevents
+ duplicate coordinate errors in certain circumstances. (:pull:`3718`)
+
+#. `@bjlittle`_ aligned the :func:`iris.analysis.maths.apply_ufunc` with the
+ rest of the :mod:`iris.analysis.maths` API by changing its keyword argument
+ from ``other_cube`` to ``other``. (:pull:`3785`)
+
+#. `@bjlittle`_ changed the :meth:`iris.analysis.maths.IFunc.__call__` to ignore
+ any surplus ``other`` keyword argument for a ``data_func`` that requires
+ **only one** argument. This aligns the behaviour of
+ :meth:`iris.analysis.maths.IFunc.__call__` with
+ :func:`~iris.analysis.maths.apply_ufunc`. Previously a ``ValueError``
+ exception was raised. (:pull:`3785`)
+
+
+.. _whatsnew 3.0.1 deprecations:
+
+🔥 Deprecations
+===============
+
+#. `@stephenworsley`_ removed the deprecated :class:`iris.Future` flags
+ ``cell_date_time_objects``, ``netcdf_promote``, ``netcdf_no_unlimited`` and
+ ``clip_latitudes``. (:pull:`3459`)
+
+#. `@stephenworsley`_ changed :attr:`iris.fileformats.pp.PPField.lbproc` to be an
+ ``int``. The deprecated attributes ``flag1``, ``flag2`` etc. have been
+ removed from it. (:pull:`3461`)
+
+#. `@bjlittle`_ deprecated :func:`~iris.util.as_compatible_shape` in preference
+ for :class:`~iris.common.resolve.Resolve` e.g., ``Resolve(src, tgt)(tgt.core_data())``.
+ The :func:`~iris.util.as_compatible_shape` function will be removed in a future
+ release of Iris. (:pull:`3892`)
+
+
+🔗 Dependencies
+===============
+
+#. `@stephenworsley`_, `@trexfeathers`_ and `@bjlittle`_ removed ``Python2``
+ support, modernising the codebase by switching to exclusive ``Python3``
+ support. (:pull:`3513`)
+
+#. `@bjlittle`_ improved the developer set up process. Configuring Iris and
+ :ref:`installing_from_source` as a developer with all the required package
+ dependencies is now easier with our curated conda environment YAML files.
+ (:pull:`3812`)
+
+#. `@stephenworsley`_ pinned Iris to require `Dask`_ ``>=2.0``. (:pull:`3460`)
+
+#. `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require
+ `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version
+ of `Matplotlib`_. (:pull:`3762`)
+
+#. `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_.
+ Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in
+ pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer
+ necessary now that ``Python2`` support has been dropped. (:pull:`3468`)
+
+#. `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version
+ of `Proj`_. (:pull:`3762`)
+
+#. `@stephenworsley`_ and `@trexfeathers`_ removed GDAL from the extensions
+ dependency group. We no longer consider it to be an extension. (:pull:`3762`)
+
+
+.. _whatsnew 3.0.1 docs:
+
+📚 Documentation
+================
+
+#. `@tkknight`_ moved the
+ :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py`
+ from the general part of the gallery to oceanography. (:pull:`3761`)
+
+#. `@tkknight`_ updated documentation to use a modern sphinx theme and be
+ served from https://scitools-iris.readthedocs.io/en/latest/. (:pull:`3752`)
+
+#. `@bjlittle`_ added support for the `black`_ code formatter. This is
+ now automatically checked on GitHub PRs, replacing the older, unittest-based
+ ``iris.tests.test_coding_standards.TestCodeFormat``. Black provides automatic
+ code format correction for most IDEs. See the new developer guide section on
+ :ref:`code_formatting`. (:pull:`3518`)
+
+#. `@tkknight`_ and `@trexfeathers`_ refreshed the :ref:`whats_new_contributions`
+ for the :ref:`iris_whatsnew`. This includes always creating the ``latest``
+ what's new page so it appears on the latest documentation at
+ https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves
+ :issue:`2104`, :issue:`3451`, :issue:`3818`, :issue:`3837`. Also updated the
+ :ref:`iris_development_releases_steps` to follow when making a release.
+ (:pull:`3769`, :pull:`3838`, :pull:`3843`)
+
+#. `@tkknight`_ enabled the PDF creation of the documentation on the
+ `Read the Docs`_ service. The PDF may be accessed by clicking on the version
+ at the bottom of the side bar, then selecting ``PDF`` from the ``Downloads``
+ section. (:pull:`3765`)
+
+#. `@stephenworsley`_ added a warning to the
+ :func:`iris.analysis.cartography.project` function regarding its behaviour on
+ projections with non-rectangular boundaries. (:pull:`3762`)
+
+#. `@stephenworsley`_ added the :ref:`cube_maths_combining_units` section to the
+ user guide to clarify how ``Units`` are handled during cube arithmetic.
+ (:pull:`3803`)
+
+#. `@tkknight`_ overhauled the :ref:`developers_guide` including information on
+ getting involved in becoming a contributor and general structure of the
+ guide. This resolves :issue:`2170`, :issue:`2331`, :issue:`3453`,
+ :issue:`314`, :issue:`2902`. (:pull:`3852`)
+
+#. `@rcomer`_ added argument descriptions to the :class:`~iris.coords.DimCoord`
+ docstring. (:pull:`3681`)
+
+#. `@tkknight`_ added two url's to be ignored for the ``make linkcheck``. This
+ will ensure the Iris github project is not repeatedly hit during the
+ linkcheck for issues and pull requests as it can result in connection
+ refused and thus travis-ci_ job failures. For more information on linkcheck,
+ see :ref:`contributing.documentation.testing`. (:pull:`3873`)
+
+#. `@tkknight`_ enabled the napolean_ package that is used by sphinx_ to cater
+ for the existing google style docstrings and to also allow for `numpy`_
+ docstrings. This resolves :issue:`3841`. (:pull:`3871`)
+
+#. `@tkknight`_ configured ``sphinx-build`` to promote warnings to errors when
+ building the documentation via ``make html``. This will minimise technical
+ debt accruing for the documentation. (:pull:`3877`)
+
+#. `@tkknight`_ updated :ref:`installing_iris` to include a reference to
+ Windows Subsystem for Linux. (:pull:`3885`)
+
+#. `@tkknight`_ updated the :ref:`iris_docs` homepage to include panels so the
+ links are more visible to users. This uses the sphinx-panels_ extension.
+ (:pull:`3884`)
+
+#. `@bjlittle`_ created the :ref:`Further topics ` section and
+ included documentation for :ref:`metadata`, :ref:`lenient metadata`, and
+ :ref:`lenient maths`. (:pull:`3890`)
+
+#. `@jonseddon`_ updated the CF version of the netCDF saver in the
+ :ref:`saving_iris_cubes` section and in the equivalent function docstring.
+ (:pull:`3925`)
+
+#. `@bjlittle`_ applied `Title Case Capitalization`_ to the documentation.
+ (:pull:`3940`)
+
+
+💼 Internal
+===========
+
+#. `@pp-mo`_ and `@lbdreyer`_ removed all Iris test dependencies on `iris-grib`_
+ by transferring all relevant content to the `iris-grib`_ repository. (:pull:`3662`,
+ :pull:`3663`, :pull:`3664`, :pull:`3665`, :pull:`3666`, :pull:`3669`,
+ :pull:`3670`, :pull:`3671`, :pull:`3672`, :pull:`3742`, :pull:`3746`)
+
+#. `@lbdreyer`_ and `@pp-mo`_ overhauled the handling of dimensional
+ metadata to remove duplication. (:pull:`3422`, :pull:`3551`)
+
+#. `@trexfeathers`_ simplified the standard license header for all files, which
+ removes the need to repeatedly update year numbers in the header.
+ (:pull:`3489`)
+
+#. `@stephenworsley`_ changed the numerical values in tests involving the
+ Robinson projection due to improvements made in
+ `Proj`_. (:pull:`3762`) (see also `Proj#1292`_ and `Proj#2151`_)
+
+#. `@stephenworsley`_ changed tests to account for more detailed descriptions of
+ projections in `GDAL`_. (:pull:`3762`) (see also `GDAL#1185`_)
+
+#. `@stephenworsley`_ changed tests to account for `GDAL`_ now saving fill values
+ for data without masked points. (:pull:`3762`)
+
+#. `@trexfeathers`_ changed every graphics test that includes `Cartopy's coastlines`_
+ to account for new adaptive coastline scaling. (:pull:`3762`)
+ (see also `Cartopy#1105`_)
+
+#. `@trexfeathers`_ changed graphics tests to account for some new default
+ grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_)
+
+#. `@trexfeathers`_ added additional acceptable graphics test targets to account
+ for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and
+ axes borders). (:pull:`3762`)
+
+#. `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore
+ `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``.
+ (:pull:`3846`)
+
+#. `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``.
+ (:pull:`3866`)
+
+#. `@lbdreyer`_ updated the CF standard name table to the latest version: `v75`_.
+ (:pull:`3867`)
+
+#. `@bjlittle`_ added :pep:`517` and :pep:`518` support for building and
+ installing Iris, in particular to handle the `PyKE`_ package dependency.
+ (:pull:`3812`)
+
+#. `@bjlittle`_ added metadata support for comparing :attr:`~iris.cube.Cube.attributes`
+ dictionaries that contain `numpy`_ arrays using `xxHash`_, an extremely fast
+ non-cryptographic hash algorithm, running at RAM speed limits.
+
+#. `@bjlittle`_ added the ``iris.tests.assertDictEqual`` method to override
+ :meth:`unittest.TestCase.assertDictEqual` in order to cope with testing
+ metadata :attr:`~iris.cube.Cube.attributes` dictionary comparison where
+ the value of a key may be a `numpy`_ array. (:pull:`3785`)
+
+#. `@bjlittle`_ added the :func:`~iris.config.get_logger` function for creating
+ a generic :class:`logging.Logger` with a :class:`logging.StreamHandler` and
+ custom :class:`logging.Formatter`. (:pull:`3785`)
+
+#. `@owena11`_ identified and optimised a bottleneck in ``FieldsFile`` header
+ loading due to the use of :func:`numpy.fromfile`. (:pull:`3791`)
+
+#. `@znicholls`_ added a test for plotting with the label being taken from the unit's symbol,
+ see :meth:`~iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol` (:pull:`3902`).
+
+#. `@znicholls`_ made :func:`~iris.tests.idiff.step_over_diffs` robust to hyphens (``-``) in
+ the input path (i.e. the ``result_dir`` argument) (:pull:`3902`).
+
+#. `@bjlittle`_ migrated the CIaaS from `travis-ci`_ to `cirrus-ci`_, and removed `stickler-ci`_
+ support. (:pull:`3928`)
+
+#. `@bjlittle`_ introduced `nox`_ as a common and easy entry-point for test automation.
+ It can be used both from `cirrus-ci`_ in the cloud, and locally by the developer to
+ run the Iris tests, the doc-tests, the gallery doc-tests, and lint Iris
+ with `flake8`_ and `black`_. (:pull:`3928`)
+
+.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/
+.. _Matplotlib: https://matplotlib.org/
+.. _CF units rules: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#units
+.. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data
+.. _Quality Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags
+.. _iris-grib: https://github.com/SciTools/iris-grib
+.. _Cartopy: https://github.com/SciTools/cartopy
+.. _Cartopy's coastlines: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html?highlight=coastlines#cartopy.mpl.geoaxes.GeoAxes.coastlines
+.. _Cartopy#1105: https://github.com/SciTools/cartopy/pull/1105
+.. _Cartopy#1117: https://github.com/SciTools/cartopy/pull/1117
+.. _Dask: https://github.com/dask/dask
+.. _matplotlib.dates.date2num: https://matplotlib.org/api/dates_api.html#matplotlib.dates.date2num
+.. _Proj: https://github.com/OSGeo/PROJ
+.. _black: https://black.readthedocs.io/en/stable/
+.. _Proj#1292: https://github.com/OSGeo/PROJ/pull/1292
+.. _Proj#2151: https://github.com/OSGeo/PROJ/pull/2151
+.. _GDAL: https://github.com/OSGeo/gdal
+.. _GDAL#1185: https://github.com/OSGeo/gdal/pull/1185
+.. _@MoseleyS: https://github.com/MoseleyS
+.. _@stephenworsley: https://github.com/stephenworsley
+.. _@pp-mo: https://github.com/pp-mo
+.. _@abooton: https://github.com/abooton
+.. _@bouweandela: https://github.com/bouweandela
+.. _@bjlittle: https://github.com/bjlittle
+.. _@trexfeathers: https://github.com/trexfeathers
+.. _@jonseddon: https://github.com/jonseddon
+.. _@tkknight: https://github.com/tkknight
+.. _@lbdreyer: https://github.com/lbdreyer
+.. _@SimonPeatman: https://github.com/SimonPeatman
+.. _@TomekTrzeciak: https://github.com/TomekTrzeciak
+.. _@rcomer: https://github.com/rcomer
+.. _@jvegasbsc: https://github.com/jvegasbsc
+.. _@zklaus: https://github.com/zklaus
+.. _@znicholls: https://github.com/znicholls
+.. _ESMValTool: https://github.com/ESMValGroup/ESMValTool
+.. _v75: https://cfconventions.org/Data/cf-standard-names/75/build/cf-standard-name-table.html
+.. _sphinx-panels: https://sphinx-panels.readthedocs.io/en/latest/
+.. _logging: https://docs.python.org/3/library/logging.html
+.. _numpy: https://github.com/numpy/numpy
+.. _xxHash: https://github.com/Cyan4973/xxHash
+.. _PyKE: https://pypi.org/project/scitools-pyke/
+.. _matplotlib.rcdefaults: https://matplotlib.org/3.1.1/api/matplotlib_configuration_api.html?highlight=rcdefaults#matplotlib.rcdefaults
+.. _@owena11: https://github.com/owena11
+.. _GitHub: https://github.com/SciTools/iris/issues/new/choose
+.. _readthedocs: https://readthedocs.org/
+.. _CF Conventions and Metadata: https://cfconventions.org/
+.. _flake8: https://flake8.pycqa.org/en/stable/
+.. _nox: https://nox.thea.codes/en/stable/
+.. _Title Case Capitalization: https://apastyle.apa.org/style-grammar-guidelines/capitalization/title-case
+.. _travis-ci: https://travis-ci.org/github/SciTools/iris
+.. _stickler-ci: https://stickler-ci.com/
diff --git a/docs/iris/src/whatsnew/3.0.rst b/docs/iris/src/whatsnew/3.0.rst
index 0a9dcd89b0..0f61d62033 100644
--- a/docs/iris/src/whatsnew/3.0.rst
+++ b/docs/iris/src/whatsnew/3.0.rst
@@ -1,6 +1,6 @@
.. include:: ../common_links.inc
-v3.0 (02 Oct 2020)
+v3.0 (25 Jan 2021)
******************
This document explains the changes made to Iris for this release
@@ -43,194 +43,204 @@ This document explains the changes made to Iris for this release
📢 Announcements
================
-* Congratulations to `@bouweandela`_, `@jvegasbsc`_, and `@zklaus`_ who
- recently became Iris core developers. They bring a wealth of expertise to the
- team, and are using Iris to underpin `ESMValTool`_ - "*A community diagnostic
- and performance metrics tool for routine evaluation of Earth system models
- in CMIP*". Welcome aboard! 🎉
+#. Congratulations to `@bouweandela`_, `@jvegasbsc`_, and `@zklaus`_ who
+ recently became Iris core developers. They bring a wealth of expertise to the
+ team, and are using Iris to underpin `ESMValTool`_ - "*A community diagnostic
+ and performance metrics tool for routine evaluation of Earth system models
+ in CMIP*". Welcome aboard! 🎉
-* Congratulations also goes to `@jonseddon`_ who recently became an Iris core
- developer. We look forward to seeing more of your awesome contributions! 🎉
+#. Congratulations also goes to `@jonseddon`_ who recently became an Iris core
+ developer. We look forward to seeing more of your awesome contributions! 🎉
✨ Features
===========
-* `@MoseleyS`_ greatly enhanced the :mod:`~iris.fileformats.nimrod`
- module to provide richer meta-data translation when loading ``Nimrod`` data
- into cubes. This covers most known operational use-cases. (:pull:`3647`)
-
-* `@stephenworsley`_ improved the handling of
- :class:`iris.coords.CellMeasure`\ s in the :class:`~iris.cube.Cube`
- statistical operations :meth:`~iris.cube.Cube.collapsed`,
- :meth:`~iris.cube.Cube.aggregated_by` and
- :meth:`~iris.cube.Cube.rolling_window`. These previously removed every
- :class:`~iris.coords.CellMeasure` attached to the cube. Now, a
- :class:`~iris.coords.CellMeasure` will only be removed if it is associated
- with an axis over which the statistic is being run. (:pull:`3549`)
-
-* `@stephenworsley`_, `@pp-mo`_ and `@abooton`_ added support for
- `CF Ancillary Data`_ variables. These are created as
- :class:`iris.coords.AncillaryVariable`, and appear as components of cubes
- much like :class:`~iris.coords.AuxCoord`\ s, with the new
- :class:`~iris.cube.Cube` methods
- :meth:`~iris.cube.Cube.add_ancillary_variable`,
- :meth:`~iris.cube.Cube.remove_ancillary_variable`,
- :meth:`~iris.cube.Cube.ancillary_variable`,
- :meth:`~iris.cube.Cube.ancillary_variables` and
- :meth:`~iris.cube.Cube.ancillary_variable_dims`.
- They are loaded from and saved to NetCDF-CF files. Special support for
- `Quality Flags`_ is also provided, to ensure they load and save with
- appropriate units. (:pull:`3800`)
-
-* `@bouweandela`_ implemented lazy regridding for the
- :class:`~iris.analysis.Linear`, :class:`~iris.analysis.Nearest`, and
- :class:`~iris.analysis.AreaWeighted` regridding schemes. (:pull:`3701`)
-
-* `@bjlittle`_ added `logging`_ support within :mod:`iris.analysis.maths`,
- :mod:`iris.common.metadata`, and :mod:`iris.common.resolve`. Each module
- defines a :class:`logging.Logger` instance called ``logger`` with a default
- ``level`` of ``INFO``. To enable ``DEBUG`` logging use
- ``logger.setLevel("DEBUG")``. (:pull:`3785`)
-
-* `@bjlittle`_ added the :mod:`iris.common.resolve` module, which provides
- infrastructure to support the analysis, identification and combination
- of metadata common between two :class:`~iris.cube.Cube` operands into a
- single resultant :class:`~iris.cube.Cube` that will be auto-transposed,
- and with the appropriate broadcast shape. (:pull:`3785`)
-
-* `@bjlittle`_ added the :ref:`common metadata API `, which provides
- a unified treatment of metadata across Iris, and allows users to easily
- manage and manipulate their metadata in a consistent way. (:pull:`3785`)
-
-* `@bjlittle`_ added :ref:`lenient metadata ` support, to
- allow users to control **strict** or **lenient** metadata equivalence,
- difference and combination. (:pull:`3785`)
-
-* `@bjlittle`_ added :ref:`lenient cube maths ` support and
- resolved several long standing major issues with cube arithmetic regarding
- a more robust treatment of cube broadcasting, cube dimension auto-transposition,
- and preservation of common metadata and coordinates during cube math operations.
- Resolves :issue:`1887`, :issue:`2765`, and :issue:`3478`. (:pull:`3785`)
+#. `@MoseleyS`_ greatly enhanced the :mod:`~iris.fileformats.nimrod`
+ module to provide richer meta-data translation when loading ``Nimrod`` data
+ into cubes. This covers most known operational use-cases. (:pull:`3647`)
+
+#. `@stephenworsley`_ improved the handling of
+ :class:`iris.coords.CellMeasure`\ s in the :class:`~iris.cube.Cube`
+ statistical operations :meth:`~iris.cube.Cube.collapsed`,
+ :meth:`~iris.cube.Cube.aggregated_by` and
+ :meth:`~iris.cube.Cube.rolling_window`. These previously removed every
+ :class:`~iris.coords.CellMeasure` attached to the cube. Now, a
+ :class:`~iris.coords.CellMeasure` will only be removed if it is associated
+ with an axis over which the statistic is being run. (:pull:`3549`)
+
+#. `@stephenworsley`_, `@pp-mo`_ and `@abooton`_ added support for
+ `CF Ancillary Data`_ variables. These are created as
+ :class:`iris.coords.AncillaryVariable`, and appear as components of cubes
+ much like :class:`~iris.coords.AuxCoord`\ s, with the new
+ :class:`~iris.cube.Cube` methods
+ :meth:`~iris.cube.Cube.add_ancillary_variable`,
+ :meth:`~iris.cube.Cube.remove_ancillary_variable`,
+ :meth:`~iris.cube.Cube.ancillary_variable`,
+ :meth:`~iris.cube.Cube.ancillary_variables` and
+ :meth:`~iris.cube.Cube.ancillary_variable_dims`.
+ They are loaded from and saved to NetCDF-CF files. Special support for
+ `Quality Flags`_ is also provided, to ensure they load and save with
+ appropriate units. (:pull:`3800`)
+
+#. `@bouweandela`_ implemented lazy regridding for the
+ :class:`~iris.analysis.Linear`, :class:`~iris.analysis.Nearest`, and
+ :class:`~iris.analysis.AreaWeighted` regridding schemes. (:pull:`3701`)
+
+#. `@bjlittle`_ added `logging`_ support within :mod:`iris.analysis.maths`,
+ :mod:`iris.common.metadata`, and :mod:`iris.common.resolve`. Each module
+ defines a :class:`logging.Logger` instance called ``logger`` with a default
+ ``level`` of ``INFO``. To enable ``DEBUG`` logging use
+ ``logger.setLevel("DEBUG")``. (:pull:`3785`)
+
+#. `@bjlittle`_ added the :mod:`iris.common.resolve` module, which provides
+ infrastructure to support the analysis, identification and combination
+ of metadata common between two :class:`~iris.cube.Cube` operands into a
+ single resultant :class:`~iris.cube.Cube` that will be auto-transposed,
+ and with the appropriate broadcast shape. (:pull:`3785`)
+
+#. `@bjlittle`_ added the :ref:`common metadata API `, which provides
+ a unified treatment of metadata across Iris, and allows users to easily
+ manage and manipulate their metadata in a consistent way. (:pull:`3785`)
+
+#. `@bjlittle`_ added :ref:`lenient metadata ` support, to
+ allow users to control **strict** or **lenient** metadata equivalence,
+ difference and combination. (:pull:`3785`)
+
+#. `@bjlittle`_ added :ref:`lenient cube maths ` support and
+ resolved several long standing major issues with cube arithmetic regarding
+ a more robust treatment of cube broadcasting, cube dimension auto-transposition,
+ and preservation of common metadata and coordinates during cube math operations.
+ Resolves :issue:`1887`, :issue:`2765`, and :issue:`3478`. (:pull:`3785`)
+
+#. `@pp-mo`_ and `@TomekTrzeciak`_ enhanced :meth:`~iris.cube.Cube.collapse` to allow a 1-D weights array when
+ collapsing over a single dimension.
+ Previously, the weights had to be the same shape as the whole cube, which could cost a lot of memory in some cases.
+ The 1-D form is supported by most weighted array statistics (such as :meth:`np.average`), so this now works
+ with the corresponding Iris schemes (in that case, :const:`~iris.analysis.MEAN`). (:pull:`3943`)
🐛 Bugs Fixed
=============
-* `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.remove_coord` to now also
- remove derived coordinates by removing aux_factories. (:pull:`3641`)
-
-* `@jonseddon`_ fixed ``isinstance(cube, collections.Iterable)`` to now behave
- as expected if a :class:`~iris.cube.Cube` is iterated over, while also
- ensuring that ``TypeError`` is still raised. (Fixed by setting the
- ``__iter__()`` method in :class:`~iris.cube.Cube` to ``None``).
- (:pull:`3656`)
-
-* `@stephenworsley`_ enabled cube concatenation along an axis shared by cell
- measures; these cell measures are now concatenated together in the resulting
- cube. Such a scenario would previously cause concatenation to inappropriately
- fail. (:pull:`3566`)
-
-* `@stephenworsley`_ newly included :class:`~iris.coords.CellMeasure`\ s in
- :class:`~iris.cube.Cube` copy operations. Previously copying a
- :class:`~iris.cube.Cube` would ignore any attached
- :class:`~iris.coords.CellMeasure`. (:pull:`3546`)
-
-* `@bjlittle`_ set a :class:`~iris.coords.CellMeasure`'s
- ``measure`` attribute to have a default value of ``area``.
- Previously, the ``measure`` was provided as a keyword argument to
- :class:`~iris.coords.CellMeasure` with a default value of ``None``, which
- caused a ``TypeError`` when no ``measure`` was provided, since ``area`` or
- ``volume`` are the only accepted values. (:pull:`3533`)
-
-* `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use
- `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot
- axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh`
- did not include this behaviour). (:pull:`3762`)
-
-* `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to
- now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_
- (previously would take the unit from a time coordinate, if present, even
- though the coordinate's value had been changed via ``date2num``).
- (:pull:`3762`)
-
-* `@pp-mo`_ newly included attributes of cell measures in NETCDF-CF
- file loading; they were previously being discarded. They are now available on
- the :class:`~iris.coords.CellMeasure` in the loaded :class:`~iris.cube.Cube`.
- (:pull:`3800`)
-
-* `@pp-mo`_ fixed the netcdf loader to now handle any grid-mapping
- variables with missing ``false_easting`` and ``false_northing`` properties,
- which was previously failing for some coordinate systems. See :issue:`3629`.
- (:pull:`3804`)
-
-* `@stephenworsley`_ changed the way tick labels are assigned from string coords.
- Previously, the first tick label would occasionally be duplicated. This also
- removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`)
-
-* `@znicholls`_ fixed :meth:`~iris.quickplot._title` to only check ``units.is_time_reference`` if the ``units`` symbol is not used. (:pull:`3902`)
+#. `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.remove_coord` to now also
+ remove derived coordinates by removing aux_factories. (:pull:`3641`)
+
+#. `@jonseddon`_ fixed ``isinstance(cube, collections.Iterable)`` to now behave
+ as expected if a :class:`~iris.cube.Cube` is iterated over, while also
+ ensuring that ``TypeError`` is still raised. (Fixed by setting the
+ ``__iter__()`` method in :class:`~iris.cube.Cube` to ``None``).
+ (:pull:`3656`)
+
+#. `@stephenworsley`_ enabled cube concatenation along an axis shared by cell
+ measures; these cell measures are now concatenated together in the resulting
+ cube. Such a scenario would previously cause concatenation to inappropriately
+ fail. (:pull:`3566`)
+
+#. `@stephenworsley`_ newly included :class:`~iris.coords.CellMeasure`\ s in
+ :class:`~iris.cube.Cube` copy operations. Previously copying a
+ :class:`~iris.cube.Cube` would ignore any attached
+ :class:`~iris.coords.CellMeasure`. (:pull:`3546`)
+
+#. `@bjlittle`_ set a :class:`~iris.coords.CellMeasure`'s
+ ``measure`` attribute to have a default value of ``area``.
+ Previously, the ``measure`` was provided as a keyword argument to
+ :class:`~iris.coords.CellMeasure` with a default value of ``None``, which
+ caused a ``TypeError`` when no ``measure`` was provided, since ``area`` or
+ ``volume`` are the only accepted values. (:pull:`3533`)
+
+#. `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use
+ `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot
+ axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh`
+ did not include this behaviour). (:pull:`3762`)
+
+#. `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to
+ now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_
+ (previously would take the unit from a time coordinate, if present, even
+ though the coordinate's value had been changed via ``date2num``).
+ (:pull:`3762`)
+
+#. `@pp-mo`_ newly included attributes of cell measures in NETCDF-CF
+ file loading; they were previously being discarded. They are now available on
+ the :class:`~iris.coords.CellMeasure` in the loaded :class:`~iris.cube.Cube`.
+ (:pull:`3800`)
+
+#. `@pp-mo`_ fixed the netcdf loader to now handle any grid-mapping
+ variables with missing ``false_easting`` and ``false_northing`` properties,
+ which was previously failing for some coordinate systems. See :issue:`3629`.
+ (:pull:`3804`)
+
+#. `@stephenworsley`_ changed the way tick labels are assigned from string coords.
+ Previously, the first tick label would occasionally be duplicated. This also
+ removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`)
+
+#. `@znicholls`_ fixed :meth:`~iris.quickplot._title` to only check
+ ``units.is_time_reference`` if the ``units`` symbol is not used. (:pull:`3902`)
+
+#. `@rcomer`_ fixed a bug whereby numpy array type attributes on a cube's
+ coordinates could prevent printing it. See :issue:`3921`. (:pull:`3922`)
.. _whatsnew 3.0 changes:
💣 Incompatible Changes
=======================
-* `@pp-mo`_ rationalised :class:`~iris.cube.CubeList` extraction
- methods:
-
- The former method ``iris.cube.CubeList.extract_strict``, and the ``strict``
- keyword of the :meth:`~iris.cube.CubeList.extract` method have been removed,
- and are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube`
- and :meth:`~iris.cube.CubeList.extract_cubes`.
- The new routines perform the same operation, but in a style more like other
- ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`.
- Unlike ``strict`` extraction, the type of return value is now completely
- consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a
- :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes`
- always returns an :class:`iris.cube.CubeList` of a length equal to the
- number of constraints. (:pull:`3715`)
-
-* `@pp-mo`_ removed the former function
- ``iris.analysis.coord_comparison``. (:pull:`3562`)
-
-* `@bjlittle`_ moved the
- :func:`iris.experimental.equalise_cubes.equalise_attributes` function from
- the :mod:`iris.experimental` module into the :mod:`iris.util` module. Please
- use the :func:`iris.util.equalise_attributes` function instead.
- (:pull:`3527`)
-
-* `@bjlittle`_ removed the module ``iris.experimental.concatenate``. In
- ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the
- :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the
- :func:`iris.experimental.concatenate.concatenate` function raised an
- exception. (:pull:`3523`)
-
-* `@stephenworsley`_ changed the default units of :class:`~iris.coords.DimCoord`
- and :class:`~iris.coords.AuxCoord` from `"1"` to `"unknown"`. (:pull:`3795`)
-
-* `@stephenworsley`_ changed Iris objects loaded from NetCDF-CF files to have
- ``units='unknown'`` where the corresponding NetCDF variable has no ``units``
- property. Previously these cases defaulted to ``units='1'``.
- This affects loading of coordinates whose file variable has no "units"
- attribute (not valid, under `CF units rules`_): These will now have units
- of `"unknown"`, rather than `"1"`, which **may prevent the creation of
- a hybrid vertical coordinate**. While these cases used to "work", this was
- never really correct behaviour. (:pull:`3795`)
-
-* `@SimonPeatman`_ added attribute ``var_name`` to coordinates created by the
- :func:`iris.analysis.trajectory.interpolate` function. This prevents
- duplicate coordinate errors in certain circumstances. (:pull:`3718`)
-
-* `@bjlittle`_ aligned the :func:`iris.analysis.maths.apply_ufunc` with the
- rest of the :mod:`iris.analysis.maths` API by changing its keyword argument
- from ``other_cube`` to ``other``. (:pull:`3785`)
-
-* `@bjlittle`_ changed the :meth:`iris.analysis.maths.IFunc.__call__` to ignore
- any surplus ``other`` keyword argument for a ``data_func`` that requires
- **only one** argument. This aligns the behaviour of
- :meth:`iris.analysis.maths.IFunc.__call__` with
- :func:`~iris.analysis.maths.apply_ufunc`. Previously a ``ValueError``
- exception was raised. (:pull:`3785`)
+#. `@pp-mo`_ rationalised :class:`~iris.cube.CubeList` extraction
+ methods:
+
+ The former method ``iris.cube.CubeList.extract_strict``, and the ``strict``
+ keyword of the :meth:`~iris.cube.CubeList.extract` method have been removed,
+ and are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube`
+ and :meth:`~iris.cube.CubeList.extract_cubes`.
+ The new routines perform the same operation, but in a style more like other
+ ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`.
+ Unlike ``strict`` extraction, the type of return value is now completely
+ consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a
+ :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes`
+ always returns an :class:`iris.cube.CubeList` of a length equal to the
+ number of constraints. (:pull:`3715`)
+
+#. `@pp-mo`_ removed the former function
+ ``iris.analysis.coord_comparison``. (:pull:`3562`)
+
+#. `@bjlittle`_ moved the
+ :func:`iris.experimental.equalise_cubes.equalise_attributes` function from
+ the :mod:`iris.experimental` module into the :mod:`iris.util` module. Please
+ use the :func:`iris.util.equalise_attributes` function instead.
+ (:pull:`3527`)
+
+#. `@bjlittle`_ removed the module ``iris.experimental.concatenate``. In
+ ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the
+ :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the
+ :func:`iris.experimental.concatenate.concatenate` function raised an
+ exception. (:pull:`3523`)
+
+#. `@stephenworsley`_ changed the default units of :class:`~iris.coords.DimCoord`
+ and :class:`~iris.coords.AuxCoord` from `"1"` to `"unknown"`. (:pull:`3795`)
+
+#. `@stephenworsley`_ changed Iris objects loaded from NetCDF-CF files to have
+ ``units='unknown'`` where the corresponding NetCDF variable has no ``units``
+ property. Previously these cases defaulted to ``units='1'``.
+ This affects loading of coordinates whose file variable has no "units"
+ attribute (not valid, under `CF units rules`_): These will now have units
+ of `"unknown"`, rather than `"1"`, which **may prevent the creation of
+ a hybrid vertical coordinate**. While these cases used to "work", this was
+ never really correct behaviour. (:pull:`3795`)
+
+#. `@SimonPeatman`_ added attribute ``var_name`` to coordinates created by the
+ :func:`iris.analysis.trajectory.interpolate` function. This prevents
+ duplicate coordinate errors in certain circumstances. (:pull:`3718`)
+
+#. `@bjlittle`_ aligned the :func:`iris.analysis.maths.apply_ufunc` with the
+ rest of the :mod:`iris.analysis.maths` API by changing its keyword argument
+ from ``other_cube`` to ``other``. (:pull:`3785`)
+
+#. `@bjlittle`_ changed the :meth:`iris.analysis.maths.IFunc.__call__` to ignore
+ any surplus ``other`` keyword argument for a ``data_func`` that requires
+ **only one** argument. This aligns the behaviour of
+ :meth:`iris.analysis.maths.IFunc.__call__` with
+ :func:`~iris.analysis.maths.apply_ufunc`. Previously a ``ValueError``
+ exception was raised. (:pull:`3785`)
.. _whatsnew 3.0 deprecations:
@@ -238,48 +248,48 @@ This document explains the changes made to Iris for this release
🔥 Deprecations
===============
-* `@stephenworsley`_ removed the deprecated :class:`iris.Future` flags
- ``cell_date_time_objects``, ``netcdf_promote``, ``netcdf_no_unlimited`` and
- ``clip_latitudes``. (:pull:`3459`)
+#. `@stephenworsley`_ removed the deprecated :class:`iris.Future` flags
+ ``cell_date_time_objects``, ``netcdf_promote``, ``netcdf_no_unlimited`` and
+ ``clip_latitudes``. (:pull:`3459`)
-* `@stephenworsley`_ changed :attr:`iris.fileformats.pp.PPField.lbproc` to be an
- ``int``. The deprecated attributes ``flag1``, ``flag2`` etc. have been
- removed from it. (:pull:`3461`)
+#. `@stephenworsley`_ changed :attr:`iris.fileformats.pp.PPField.lbproc` to be an
+ ``int``. The deprecated attributes ``flag1``, ``flag2`` etc. have been
+ removed from it. (:pull:`3461`)
-* `@bjlittle`_ deprecated :func:`~iris.util.as_compatible_shape` in preference
- for :class:`~iris.common.resolve.Resolve` e.g., ``Resolve(src, tgt)(tgt.core_data())``.
- The :func:`~iris.util.as_compatible_shape` function will be removed in a future
- release of Iris. (:pull:`3892`)
+#. `@bjlittle`_ deprecated :func:`~iris.util.as_compatible_shape` in preference
+ for :class:`~iris.common.resolve.Resolve` e.g., ``Resolve(src, tgt)(tgt.core_data())``.
+ The :func:`~iris.util.as_compatible_shape` function will be removed in a future
+ release of Iris. (:pull:`3892`)
🔗 Dependencies
===============
-* `@stephenworsley`_, `@trexfeathers`_ and `@bjlittle`_ removed ``Python2``
- support, modernising the codebase by switching to exclusive ``Python3``
- support. (:pull:`3513`)
+#. `@stephenworsley`_, `@trexfeathers`_ and `@bjlittle`_ removed ``Python2``
+ support, modernising the codebase by switching to exclusive ``Python3``
+ support. (:pull:`3513`)
-* `@bjlittle`_ improved the developer set up process. Configuring Iris and
- :ref:`installing_from_source` as a developer with all the required package
- dependencies is now easier with our curated conda environment YAML files.
- (:pull:`3812`)
+#. `@bjlittle`_ improved the developer set up process. Configuring Iris and
+ :ref:`installing_from_source` as a developer with all the required package
+ dependencies is now easier with our curated conda environment YAML files.
+ (:pull:`3812`)
-* `@stephenworsley`_ pinned Iris to require `Dask`_ ``>=2.0``. (:pull:`3460`)
+#. `@stephenworsley`_ pinned Iris to require `Dask`_ ``>=2.0``. (:pull:`3460`)
-* `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require
- `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version
- of `Matplotlib`_. (:pull:`3762`)
+#. `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require
+ `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version
+ of `Matplotlib`_. (:pull:`3762`)
-* `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_.
- Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in
- pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer
- necessary now that ``Python2`` support has been dropped. (:pull:`3468`)
+#. `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_.
+ Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in
+ pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer
+ necessary now that ``Python2`` support has been dropped. (:pull:`3468`)
-* `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version
- of `Proj`_. (:pull:`3762`)
+#. `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version
+ of `Proj`_. (:pull:`3762`)
-* `@stephenworsley`_ and `@trexfeathers`_ removed GDAL from the extensions
- dependency group. We no longer consider it to be an extension. (:pull:`3762`)
+#. `@stephenworsley`_ and `@trexfeathers`_ removed GDAL from the extensions
+ dependency group. We no longer consider it to be an extension. (:pull:`3762`)
.. _whatsnew 3.0 docs:
@@ -287,152 +297,160 @@ This document explains the changes made to Iris for this release
📚 Documentation
================
-* `@tkknight`_ moved the
- :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py`
- from the general part of the gallery to oceanography. (:pull:`3761`)
+#. `@tkknight`_ moved the
+ :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py`
+ from the general part of the gallery to oceanography. (:pull:`3761`)
-* `@tkknight`_ updated documentation to use a modern sphinx theme and be
- served from https://scitools-iris.readthedocs.io/en/latest/. (:pull:`3752`)
+#. `@tkknight`_ updated documentation to use a modern sphinx theme and be
+ served from https://scitools-iris.readthedocs.io/en/latest/. (:pull:`3752`)
-* `@bjlittle`_ added support for the `black`_ code formatter. This is
- now automatically checked on GitHub PRs, replacing the older, unittest-based
- ``iris.tests.test_coding_standards.TestCodeFormat``. Black provides automatic
- code format correction for most IDEs. See the new developer guide section on
- :ref:`code_formatting`. (:pull:`3518`)
+#. `@bjlittle`_ added support for the `black`_ code formatter. This is
+ now automatically checked on GitHub PRs, replacing the older, unittest-based
+ ``iris.tests.test_coding_standards.TestCodeFormat``. Black provides automatic
+ code format correction for most IDEs. See the new developer guide section on
+ :ref:`code_formatting`. (:pull:`3518`)
-* `@tkknight`_ and `@trexfeathers`_ refreshed the :ref:`whats_new_contributions`
- for the :ref:`iris_whatsnew`. This includes always creating the ``latest``
- what's new page so it appears on the latest documentation at
- https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves
- :issue:`2104`, :issue:`3451`, :issue:`3818`, :issue:`3837`. Also updated the
- :ref:`iris_development_releases_steps` to follow when making a release.
- (:pull:`3769`, :pull:`3838`, :pull:`3843`)
+#. `@tkknight`_ and `@trexfeathers`_ refreshed the :ref:`whats_new_contributions`
+ for the :ref:`iris_whatsnew`. This includes always creating the ``latest``
+ what's new page so it appears on the latest documentation at
+ https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves
+ :issue:`2104`, :issue:`3451`, :issue:`3818`, :issue:`3837`. Also updated the
+ :ref:`iris_development_releases_steps` to follow when making a release.
+ (:pull:`3769`, :pull:`3838`, :pull:`3843`)
-* `@tkknight`_ enabled the PDF creation of the documentation on the
- `Read the Docs`_ service. The PDF may be accessed by clicking on the version
- at the bottom of the side bar, then selecting ``PDF`` from the ``Downloads``
- section. (:pull:`3765`)
+#. `@tkknight`_ enabled the PDF creation of the documentation on the
+ `Read the Docs`_ service. The PDF may be accessed by clicking on the version
+ at the bottom of the side bar, then selecting ``PDF`` from the ``Downloads``
+ section. (:pull:`3765`)
-* `@stephenworsley`_ added a warning to the
- :func:`iris.analysis.cartography.project` function regarding its behaviour on
- projections with non-rectangular boundaries. (:pull:`3762`)
+#. `@stephenworsley`_ added a warning to the
+ :func:`iris.analysis.cartography.project` function regarding its behaviour on
+ projections with non-rectangular boundaries. (:pull:`3762`)
-* `@stephenworsley`_ added the :ref:`cube_maths_combining_units` section to the
- user guide to clarify how ``Units`` are handled during cube arithmetic.
- (:pull:`3803`)
+#. `@stephenworsley`_ added the :ref:`cube_maths_combining_units` section to the
+ user guide to clarify how ``Units`` are handled during cube arithmetic.
+ (:pull:`3803`)
-* `@tkknight`_ overhauled the :ref:`developers_guide` including information on
- getting involved in becoming a contributor and general structure of the
- guide. This resolves :issue:`2170`, :issue:`2331`, :issue:`3453`,
- :issue:`314`, :issue:`2902`. (:pull:`3852`)
+#. `@tkknight`_ overhauled the :ref:`developers_guide` including information on
+ getting involved in becoming a contributor and general structure of the
+ guide. This resolves :issue:`2170`, :issue:`2331`, :issue:`3453`,
+ :issue:`314`, :issue:`2902`. (:pull:`3852`)
-* `@rcomer`_ added argument descriptions to the :class:`~iris.coords.DimCoord`
- docstring. (:pull:`3681`)
+#. `@rcomer`_ added argument descriptions to the :class:`~iris.coords.DimCoord`
+ docstring. (:pull:`3681`)
-* `@tkknight`_ added two url's to be ignored for the ``make linkcheck``. This
- will ensure the Iris github project is not repeatedly hit during the
- linkcheck for issues and pull requests as it can result in connection
- refused and thus travis-ci_ job failures. For more information on linkcheck,
- see :ref:`contributing.documentation.testing`. (:pull:`3873`)
+#. `@tkknight`_ added two url's to be ignored for the ``make linkcheck``. This
+ will ensure the Iris github project is not repeatedly hit during the
+ linkcheck for issues and pull requests as it can result in connection
+ refused and thus travis-ci_ job failures. For more information on linkcheck,
+ see :ref:`contributing.documentation.testing`. (:pull:`3873`)
-* `@tkknight`_ enabled the napolean_ package that is used by sphinx_ to cater
- for the existing google style docstrings and to also allow for `numpy`_
- docstrings. This resolves :issue:`3841`. (:pull:`3871`)
+#. `@tkknight`_ enabled the napolean_ package that is used by sphinx_ to cater
+ for the existing google style docstrings and to also allow for `numpy`_
+ docstrings. This resolves :issue:`3841`. (:pull:`3871`)
-* `@tkknight`_ configured ``sphinx-build`` to promote warnings to errors when
- building the documentation via ``make html``. This will minimise technical
- debt accruing for the documentation. (:pull:`3877`)
+#. `@tkknight`_ configured ``sphinx-build`` to promote warnings to errors when
+ building the documentation via ``make html``. This will minimise technical
+ debt accruing for the documentation. (:pull:`3877`)
-* `@tkknight`_ updated :ref:`installing_iris` to include a reference to
- Windows Subsystem for Linux. (:pull:`3885`)
+#. `@tkknight`_ updated :ref:`installing_iris` to include a reference to
+ Windows Subsystem for Linux. (:pull:`3885`)
-* `@tkknight`_ updated the :ref:`iris_docs` homepage to include panels so the
- links are more visible to users. This uses the sphinx-panels_ extension.
- (:pull:`3884`)
+#. `@tkknight`_ updated the :ref:`iris_docs` homepage to include panels so the
+ links are more visible to users. This uses the sphinx-panels_ extension.
+ (:pull:`3884`)
-* `@bjlittle`_ created the :ref:`Further topics ` section and
- included documentation for :ref:`metadata`, :ref:`lenient metadata`, and
- :ref:`lenient maths`. (:pull:`3890`)
+#. `@bjlittle`_ created the :ref:`Further topics ` section and
+ included documentation for :ref:`metadata`, :ref:`lenient metadata`, and
+ :ref:`lenient maths`. (:pull:`3890`)
+
+#. `@jonseddon`_ updated the CF version of the netCDF saver in the
+ :ref:`saving_iris_cubes` section and in the equivalent function docstring.
+ (:pull:`3925`)
+
+#. `@bjlittle`_ applied `Title Case Capitalization`_ to the documentation.
+ (:pull:`3940`)
-* `@jonseddon`_ updated the CF version of the netCDF saver in the
- :ref:`saving_iris_cubes` section and in the equivalent function docstring.
💼 Internal
===========
-* `@pp-mo`_ and `@lbdreyer`_ removed all Iris test dependencies on `iris-grib`_
- by transferring all relevant content to the `iris-grib`_ repository. (:pull:`3662`,
- :pull:`3663`, :pull:`3664`, :pull:`3665`, :pull:`3666`, :pull:`3669`,
- :pull:`3670`, :pull:`3671`, :pull:`3672`, :pull:`3742`, :pull:`3746`)
+#. `@pp-mo`_ and `@lbdreyer`_ removed all Iris test dependencies on `iris-grib`_
+ by transferring all relevant content to the `iris-grib`_ repository. (:pull:`3662`,
+ :pull:`3663`, :pull:`3664`, :pull:`3665`, :pull:`3666`, :pull:`3669`,
+ :pull:`3670`, :pull:`3671`, :pull:`3672`, :pull:`3742`, :pull:`3746`)
-* `@lbdreyer`_ and `@pp-mo`_ overhauled the handling of dimensional
- metadata to remove duplication. (:pull:`3422`, :pull:`3551`)
+#. `@lbdreyer`_ and `@pp-mo`_ overhauled the handling of dimensional
+ metadata to remove duplication. (:pull:`3422`, :pull:`3551`)
-* `@trexfeathers`_ simplified the standard license header for all files, which
- removes the need to repeatedly update year numbers in the header.
- (:pull:`3489`)
+#. `@trexfeathers`_ simplified the standard license header for all files, which
+ removes the need to repeatedly update year numbers in the header.
+ (:pull:`3489`)
-* `@stephenworsley`_ changed the numerical values in tests involving the
- Robinson projection due to improvements made in
- `Proj`_. (:pull:`3762`) (see also `Proj#1292`_ and `Proj#2151`_)
+#. `@stephenworsley`_ changed the numerical values in tests involving the
+ Robinson projection due to improvements made in
+ `Proj`_. (:pull:`3762`) (see also `Proj#1292`_ and `Proj#2151`_)
-* `@stephenworsley`_ changed tests to account for more detailed descriptions of
- projections in `GDAL`_. (:pull:`3762`) (see also `GDAL#1185`_)
+#. `@stephenworsley`_ changed tests to account for more detailed descriptions of
+ projections in `GDAL`_. (:pull:`3762`) (see also `GDAL#1185`_)
-* `@stephenworsley`_ changed tests to account for `GDAL`_ now saving fill values
- for data without masked points. (:pull:`3762`)
+#. `@stephenworsley`_ changed tests to account for `GDAL`_ now saving fill values
+ for data without masked points. (:pull:`3762`)
-* `@trexfeathers`_ changed every graphics test that includes `Cartopy's coastlines`_
- to account for new adaptive coastline scaling. (:pull:`3762`)
- (see also `Cartopy#1105`_)
+#. `@trexfeathers`_ changed every graphics test that includes `Cartopy's coastlines`_
+ to account for new adaptive coastline scaling. (:pull:`3762`)
+ (see also `Cartopy#1105`_)
-* `@trexfeathers`_ changed graphics tests to account for some new default
- grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_)
+#. `@trexfeathers`_ changed graphics tests to account for some new default
+ grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_)
-* `@trexfeathers`_ added additional acceptable graphics test targets to account
- for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and
- axes borders). (:pull:`3762`)
+#. `@trexfeathers`_ added additional acceptable graphics test targets to account
+ for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and
+ axes borders). (:pull:`3762`)
-* `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore
- `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``.
- (:pull:`3846`)
+#. `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore
+ `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``.
+ (:pull:`3846`)
-* `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``.
- (:pull:`3866`)
+#. `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``.
+ (:pull:`3866`)
-* `@lbdreyer`_ updated the CF standard name table to the latest version: `v75`_.
- (:pull:`3867`)
+#. `@lbdreyer`_ updated the CF standard name table to the latest version: `v75`_.
+ (:pull:`3867`)
-* `@bjlittle`_ added :pep:`517` and :pep:`518` support for building and
- installing Iris, in particular to handle the `PyKE`_ package dependency.
- (:pull:`3812`)
+#. `@bjlittle`_ added :pep:`517` and :pep:`518` support for building and
+ installing Iris, in particular to handle the `PyKE`_ package dependency.
+ (:pull:`3812`)
-* `@bjlittle`_ added metadata support for comparing :attr:`~iris.cube.Cube.attributes`
- dictionaries that contain `numpy`_ arrays using `xxHash`_, an extremely fast
- non-cryptographic hash algorithm, running at RAM speed limits.
+#. `@bjlittle`_ added metadata support for comparing :attr:`~iris.cube.Cube.attributes`
+ dictionaries that contain `numpy`_ arrays using `xxHash`_, an extremely fast
+ non-cryptographic hash algorithm, running at RAM speed limits.
-* `@bjlittle`_ added the ``iris.tests.assertDictEqual`` method to override
- :meth:`unittest.TestCase.assertDictEqual` in order to cope with testing
- metadata :attr:`~iris.cube.Cube.attributes` dictionary comparison where
- the value of a key may be a `numpy`_ array. (:pull:`3785`)
+#. `@bjlittle`_ added the ``iris.tests.assertDictEqual`` method to override
+ :meth:`unittest.TestCase.assertDictEqual` in order to cope with testing
+ metadata :attr:`~iris.cube.Cube.attributes` dictionary comparison where
+ the value of a key may be a `numpy`_ array. (:pull:`3785`)
-* `@bjlittle`_ added the :func:`~iris.config.get_logger` function for creating
- a generic :class:`logging.Logger` with a :class:`logging.StreamHandler` and
- custom :class:`logging.Formatter`. (:pull:`3785`)
+#. `@bjlittle`_ added the :func:`~iris.config.get_logger` function for creating
+ a generic :class:`logging.Logger` with a :class:`logging.StreamHandler` and
+ custom :class:`logging.Formatter`. (:pull:`3785`)
-* `@owena11`_ identified and optimised a bottleneck in ``FieldsFile`` header
- loading due to the use of :func:`numpy.fromfile`. (:pull:`3791`)
+#. `@owena11`_ identified and optimised a bottleneck in ``FieldsFile`` header
+ loading due to the use of :func:`numpy.fromfile`. (:pull:`3791`)
-* `@znicholls`_ added a test for plotting with the label being taken from the unit's symbol, see :meth:`~iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol` (:pull:`3902`).
+#. `@znicholls`_ added a test for plotting with the label being taken from the unit's symbol,
+ see :meth:`~iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol` (:pull:`3902`).
-* `@znicholls`_ made :func:`~iris.tests.idiff.step_over_diffs` robust to hyphens (``-``) in the input path (i.e. the ``result_dir`` argument) (:pull:`3902`).
+#. `@znicholls`_ made :func:`~iris.tests.idiff.step_over_diffs` robust to hyphens (``-``) in
+ the input path (i.e. the ``result_dir`` argument) (:pull:`3902`).
-* `@bjlittle`_ migrated the CIaaS from `travis-ci`_ to `cirrus-ci`_. (:pull:`3928`)
+#. `@bjlittle`_ migrated the CIaaS from `travis-ci`_ to `cirrus-ci`_, and removed `stickler-ci`_
+ support. (:pull:`3928`)
-* `@bjlittle`_ introduced `nox`_ as a common and easy entry-point for test automation.
- It can be used both from `cirrus-ci`_ in the cloud, and locally by the developer to
- run the Iris tests, the doc-tests, the gallery doc-tests, and lint Iris
- with `flake8`_ and `black`_. (:pull:`3928`)
+#. `@bjlittle`_ introduced `nox`_ as a common and easy entry-point for test automation.
+ It can be used both from `cirrus-ci`_ in the cloud, and locally by the developer to
+ run the Iris tests, the doc-tests, the gallery doc-tests, and lint Iris
+ with `flake8`_ and `black`_. (:pull:`3928`)
.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/
.. _Matplotlib: https://matplotlib.org/
@@ -463,6 +481,7 @@ This document explains the changes made to Iris for this release
.. _@tkknight: https://github.com/tkknight
.. _@lbdreyer: https://github.com/lbdreyer
.. _@SimonPeatman: https://github.com/SimonPeatman
+.. _@TomekTrzeciak: https://github.com/TomekTrzeciak
.. _@rcomer: https://github.com/rcomer
.. _@jvegasbsc: https://github.com/jvegasbsc
.. _@zklaus: https://github.com/zklaus
@@ -481,3 +500,6 @@ This document explains the changes made to Iris for this release
.. _CF Conventions and Metadata: https://cfconventions.org/
.. _flake8: https://flake8.pycqa.org/en/stable/
.. _nox: https://nox.thea.codes/en/stable/
+.. _Title Case Capitalization: https://apastyle.apa.org/style-grammar-guidelines/capitalization/title-case
+.. _travis-ci: https://travis-ci.org/github/SciTools/iris
+.. _stickler-ci: https://stickler-ci.com/
diff --git a/docs/iris/src/whatsnew/index.rst b/docs/iris/src/whatsnew/index.rst
index 3fd5fe6070..257674718a 100644
--- a/docs/iris/src/whatsnew/index.rst
+++ b/docs/iris/src/whatsnew/index.rst
@@ -1,6 +1,6 @@
.. _iris_whatsnew:
-What's new in Iris
+What's New in Iris
******************
These "What's new" pages describe the important changes between major
@@ -11,6 +11,7 @@ Iris versions.
:maxdepth: 1
latest.rst
+ 3.0.1.rst
3.0.rst
2.4.rst
2.3.rst
diff --git a/docs/iris/src/whatsnew/latest.rst b/docs/iris/src/whatsnew/latest.rst
index 302cab7817..3cdf5fe691 100644
--- a/docs/iris/src/whatsnew/latest.rst
+++ b/docs/iris/src/whatsnew/latest.rst
@@ -7,62 +7,92 @@ This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
+.. dropdown:: :opticon:`report` Release Highlights
+ :container: + shadow
+ :title: text-primary text-center font-weight-bold
+ :body: bg-light
+ :animate: fade-in
+ :open:
+
+ The highlights for this major/minor release of Iris include:
+
+ * N/A
+
+ And finally, get in touch with us on `GitHub`_ if you have any issues or
+ feature requests for improving Iris. Enjoy!
+
+
📢 Announcements
================
-* N/A
+#. N/A
✨ Features
===========
-* `@pelson`_ and `@trexfeathers`_ enhanced :meth:iris.plot.plot and
- :meth:iris.quickplot.plot to automatically place the cube on the x axis if
- the primary coordinate being plotted against is a vertical coordinate. E.g.
- ``iris.plot.plot(z_cube)`` will produce a z-vs-phenomenon plot, where before
- it would have produced a phenomenon-vs-z plot. (:pull:`3906`)
+#. `@pelson`_ and `@trexfeathers`_ enhanced :meth:iris.plot.plot and
+ :meth:iris.quickplot.plot to automatically place the cube on the x axis if
+ the primary coordinate being plotted against is a vertical coordinate. E.g.
+ ``iris.plot.plot(z_cube)`` will produce a z-vs-phenomenon plot, where before
+ it would have produced a phenomenon-vs-z plot. (:pull:`3906`)
🐛 Bugs Fixed
=============
-* `@gcaria`_ fixed :meth:`~iris.cube.Cube.cell_measure_dims` to also accept the string name of a :class:`~iris.coords.CellMeasure`. (:pull:`3931`)
-* `@gcaria`_ fixed :meth:`~iris.cube.Cube.ancillary_variable_dims` to also accept the string name of a :class:`~iris.coords.AncillaryVariable`. (:pull:`3931`)
+#. `@gcaria`_ fixed :meth:`~iris.cube.Cube.cell_measure_dims` to also accept the
+ string name of a :class:`~iris.coords.CellMeasure`. (:pull:`3931`)
+
+#. `@gcaria`_ fixed :meth:`~iris.cube.Cube.ancillary_variable_dims` to also accept
+ the string name of a :class:`~iris.coords.AncillaryVariable`. (:pull:`3931`)
💣 Incompatible Changes
=======================
-* N/A
+#. N/A
🔥 Deprecations
===============
-* N/A
+#. N/A
🔗 Dependencies
===============
-* N/A
+#. N/A
📚 Documentation
================
-* `@rcomer`_ updated the "Seasonal ensemble model plots" Gallery example.
- (:pull:`3933`)
+#. `@rcomer`_ updated the "Seasonal ensemble model plots" Gallery example. (:pull:`3933`)
+
+#. `@MHBalsmeier`_ described non-conda installation on Debian-based distros. (:pull:`3958`)
+
+#. `@bjlittle`_ clarified in the doc-string that :class:`~iris.coords.Coord` is now an `abstract base class`_ of
+ coordinates since ``v3.0.0``, and it is **not** possible to create an instance of it. (:pull:`3971`)
💼 Internal
===========
-* `@rcomer`_ removed an old unused test file. (:pull:`3913`)
+#. `@rcomer`_ removed an old unused test file. (:pull:`3913`)
+.. comment
+ Whatsnew author names (@github name) in alphabetical order. Note that,
+ core dev names are automatically included by the common_links.inc:
-.. _@pelson: https://github.com/pelson
-.. _@trexfeathers: https://github.com/trexfeathers
.. _@gcaria: https://github.com/gcaria
-.. _@rcomer: https://github.com/rcomer
+.. _@MHBalsmeier: https://github.com/MHBalsmeier
+
+
+.. comment
+ Whatsnew resources in alphabetical order:
+
+.. _abstract base class: https://docs.python.org/3/library/abc.html
+.. _GitHub: https://github.com/SciTools/iris/issues/new/choose
diff --git a/docs/iris/src/whatsnew/latest.rst.template b/docs/iris/src/whatsnew/latest.rst.template
index 67518e539a..0992a5c9bc 100644
--- a/docs/iris/src/whatsnew/latest.rst.template
+++ b/docs/iris/src/whatsnew/latest.rst.template
@@ -7,49 +7,89 @@ This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
+.. dropdown:: :opticon:`alert` v3.X.X Patches
+ :container: + shadow
+ :title: text-primary text-center font-weight-bold
+ :body: bg-light
+ :animate: fade-in
+ :open:
+
+ The patches in this release of Iris include:
+
+ #. N/A
+
+
+.. dropdown:: :opticon:`report` Release Highlights
+ :container: + shadow
+ :title: text-primary text-center font-weight-bold
+ :body: bg-light
+ :animate: fade-in
+ :open:
+
+ The highlights for this major/minor release of Iris include:
+
+ * N/A
+
+ And finally, get in touch with us on `GitHub`_ if you have any issues or
+ feature requests for improving Iris. Enjoy!
+
+
📢 Announcements
================
-* N/A
+#. N/A
✨ Features
===========
-* N/A
+#. N/A
🐛 Bugs Fixed
=============
-* N/A
+#. N/A
💣 Incompatible Changes
=======================
-* N/A
+#. N/A
🔥 Deprecations
===============
-* N/A
+#. N/A
🔗 Dependencies
===============
-* N/A
+#. N/A
📚 Documentation
================
-* N/A
+#. N/A
💼 Internal
===========
-* N/A
+#. N/A
+
+
+.. comment
+ Whatsnew author names (@github name) in alphabetical order. Note that,
+ core dev names are automatically included by the common_links.inc:
+
+
+
+
+.. comment
+ Whatsnew resources in alphabetical order:
+
+.. _GitHub: https://github.com/SciTools/iris/issues/new/choose
diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py
index e31c7b58d7..a78d0a7682 100644
--- a/lib/iris/__init__.py
+++ b/lib/iris/__init__.py
@@ -106,7 +106,7 @@ def callback(cube, field, filename):
# Iris revision.
-__version__ = "3.1.dev0"
+__version__ = "3.1.0dev0"
# Restrict the names imported when using "from iris import *"
__all__ = [
diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py
index 5b63ff53ed..962b46e9e2 100644
--- a/lib/iris/aux_factory.py
+++ b/lib/iris/aux_factory.py
@@ -11,6 +11,7 @@
from abc import ABCMeta, abstractmethod
import warnings
+import cf_units
import dask.array as da
import numpy as np
@@ -619,6 +620,10 @@ def _check_dependencies(delta, sigma, surface_air_pressure):
warnings.warn(msg, UserWarning, stacklevel=2)
# Check units.
+ if sigma is not None and sigma.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ sigma.units = cf_units.Unit("1")
+
if sigma is not None and not sigma.units.is_dimensionless():
raise ValueError("Invalid units: sigma must be dimensionless.")
if (
@@ -863,6 +868,10 @@ def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev):
)
raise ValueError(msg)
+ if sigma is not None and sigma.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ sigma.units = cf_units.Unit("1")
+
if sigma is not None and not sigma.units.is_dimensionless():
msg = (
"Invalid units: sigma coordinate {!r} "
@@ -1127,6 +1136,10 @@ def _check_dependencies(sigma, eta, depth):
warnings.warn(msg, UserWarning, stacklevel=2)
# Check units.
+ if sigma is not None and sigma.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ sigma.units = cf_units.Unit("1")
+
if sigma is not None and not sigma.units.is_dimensionless():
msg = (
"Invalid units: sigma coordinate {!r} "
@@ -1335,6 +1348,10 @@ def _check_dependencies(s, c, eta, depth, depth_c):
# Check units.
coords = ((s, "s"), (c, "c"))
for coord, term in coords:
+ if coord is not None and coord.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ coord.units = cf_units.Unit("1")
+
if coord is not None and not coord.units.is_dimensionless():
msg = (
"Invalid units: {} coordinate {!r} "
@@ -1551,6 +1568,10 @@ def _check_dependencies(s, eta, depth, a, b, depth_c):
raise ValueError(msg)
# Check units.
+ if s is not None and s.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ s.units = cf_units.Unit("1")
+
if s is not None and not s.units.is_dimensionless():
msg = (
"Invalid units: s coordinate {!r} "
@@ -1776,6 +1797,10 @@ def _check_dependencies(s, c, eta, depth, depth_c):
# Check units.
coords = ((s, "s"), (c, "c"))
for coord, term in coords:
+ if coord is not None and coord.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ coord.units = cf_units.Unit("1")
+
if coord is not None and not coord.units.is_dimensionless():
msg = (
"Invalid units: {} coordinate {!r} "
diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py
index ad37247809..e772eeefce 100644
--- a/lib/iris/common/resolve.py
+++ b/lib/iris/common/resolve.py
@@ -230,7 +230,7 @@ def __init__(self, lhs=None, rhs=None):
"""
#: The ``lhs`` operand to be resolved into the resultant :class:`~iris.cube.Cube`.
- self.lhs_cube = None # set in _call__
+ self.lhs_cube = None # set in __call__
#: The ``rhs`` operand to be resolved into the resultant :class:`~iris.cube.Cube`.
self.rhs_cube = None # set in __call__
@@ -294,6 +294,25 @@ def __init__(self, lhs=None, rhs=None):
self(lhs, rhs)
def __call__(self, lhs, rhs):
+ """
+ Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs``
+ :class:`~iris.cube.Cube` operand metadata.
+
+ Involves determining all the common coordinate metadata shared between
+ the operands, and the metadata that is local to each operand. Given
+ the common metadata, the broadcast shape of the resultant resolved
+ :class:`~iris.cube.Cube`, which may be auto-transposed, can be
+ determined.
+
+ Args:
+
+ * lhs:
+ The left-hand-side :class:`~iris.cube.Cube` operand.
+
+ * rhs:
+ The right-hand-side :class:`~iris.cube.Cube` operand.
+
+ """
from iris.cube import Cube
emsg = (
@@ -338,11 +357,31 @@ def __call__(self, lhs, rhs):
return self
def _as_compatible_cubes(self):
+ """
+ Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can
+ be transposed and/or broadcast successfully together.
+
+ If compatible, the ``_broadcast_shape`` of the resultant resolved cube is
+ calculated, and the ``_src_cube_resolved`` (transposed/broadcast ``src``
+ cube) and ``_tgt_cube_resolved`` (same as the ``tgt`` cube) are
+ calculated.
+
+ An exception will be raised if the ``src`` and ``tgt`` cannot be
+ broadcast, even after a suitable transpose has been performed.
+
+ .. note::
+
+ Requires that **all** ``src`` cube dimensions have been mapped
+ successfully to an appropriate ``tgt`` cube dimension.
+
+ """
from iris.cube import Cube
src_cube = self._src_cube
tgt_cube = self._tgt_cube
+ assert src_cube.ndim == len(self.mapping)
+
# Use the mapping to calculate the new src cube shape.
new_src_shape = [1] * tgt_cube.ndim
for src_dim, tgt_dim in self.mapping.items():
@@ -430,6 +469,40 @@ def _aux_coverage(
common_aux_metadata,
common_scalar_metadata,
):
+ """
+ Determine the dimensions covered by each of the local and common
+ auxiliary coordinates of the provided :class:`~iris.cube.Cube`.
+
+ The cube dimensions not covered by any of the auxiliary coordinates is
+ also determined; these are known as `free` dimensions.
+
+ The scalar coordinates local to the cube are also determined.
+
+ Args:
+
+ * cube:
+ The :class:`~iris.cube.Cube` to be analysed for coverage.
+
+ * cube_items_aux:
+ The list of associated :class:`~iris.common.resolve._Item` metadata
+ for each auxiliary coordinate owned by the cube.
+
+ * cube_items_scalar:
+ The list of associated :class:`~iris.common.resolve._Item` metadata
+ for each scalar coordinate owned by the cube.
+
+ * common_aux_metadata:
+ The list of common auxiliary coordinate metadata shared by both
+ the LHS and RHS cube operands being resolved.
+
+ * common_scalar_metadata:
+ The list of common scalar coordinate metadata shared by both
+ the LHS and RHS cube operands being resolved.
+
+ Returns:
+ :class:`~iris.common.resolve._AuxCoverage`
+
+ """
common_items_aux = []
common_items_scalar = []
local_items_aux = []
@@ -465,7 +538,33 @@ def _aux_coverage(
dims_free=sorted(dims_free),
)
- def _aux_mapping(self, src_coverage, tgt_coverage):
+ @staticmethod
+ def _aux_mapping(src_coverage, tgt_coverage):
+ """
+ Establish the mapping of dimensions from the ``src`` to ``tgt``
+ :class:`~iris.cube.Cube` using the auxiliary coordinate metadata
+ common between each of the operands.
+
+ The ``src`` to ``tgt`` common auxiliary coordinate mapping is held by
+ the :attr:`~iris.common.resolve.Resolve.mapping`.
+
+ Args:
+
+ * src_coverage:
+ The :class:`~iris.common.resolve._DimCoverage` of the ``src``
+ :class:`~iris.cube.Cube` i.e., map from the common ``src``
+ dimensions.
+
+ * tgt_coverage:
+ The :class:`~iris.common.resolve._DimCoverage` of the ``tgt``
+ :class:`~iris.cube.Cube` i.e., map to the common ``tgt``
+ dimensions.
+
+ Returns:
+ Dictionary of ``src`` to ``tgt`` dimension mapping.
+
+ """
+ mapping = {}
for tgt_item in tgt_coverage.common_items_aux:
# Search for a src aux metadata match.
tgt_metadata = tgt_item.metadata
@@ -484,7 +583,7 @@ def _aux_mapping(self, src_coverage, tgt_coverage):
tgt_dims = tgt_item.dims
if len(src_dims) == len(tgt_dims):
for src_dim, tgt_dim in zip(src_dims, tgt_dims):
- self.mapping[src_dim] = tgt_dim
+ mapping[src_dim] = tgt_dim
logger.debug(f"{src_dim}->{tgt_dim}")
else:
# This situation can only occur due to a systemic internal
@@ -504,9 +603,26 @@ def _aux_mapping(self, src_coverage, tgt_coverage):
tgt_item.dims,
)
)
+ return mapping
@staticmethod
def _categorise_items(cube):
+ """
+ Inspect the provided :class:`~iris.cube.Cube` and group its
+ coordinates and associated metadata into dimension, auxiliary and
+ scalar categories.
+
+ Args:
+
+ * cube:
+ The :class:`~iris.cube.Cube` that will have its coordinates and
+ metadata grouped into their associated dimension, auxiliary and
+ scalar categories.
+
+ Returns:
+ :class:`~iris.common.resolve._CategoryItems`
+
+ """
category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[])
# Categorise the dim coordinates of the cube.
@@ -530,15 +646,40 @@ def _categorise_items(cube):
return category
@staticmethod
- def _create_prepared_item(coord, dims, src=None, tgt=None):
- if src is not None and tgt is not None:
- combined = src.combine(tgt)
+ def _create_prepared_item(
+ coord, dims, src_metadata=None, tgt_metadata=None
+ ):
+ """
+ Convenience method that creates a :class:`~iris.common.resolve._PreparedItem`
+ containing the data and metadata required to construct and attach a coordinate
+ to the resultant resolved cube.
+
+ Args:
+
+ * coord:
+ The coordinate with the ``points`` and ``bounds`` to be extracted.
+
+ * dims:
+ The dimensions that the ``coord`` spans on the resulting resolved :class:`~iris.cube.Cube`.
+
+ * src_metadata:
+ The coordinate metadata from the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_metadata:
+ The coordinate metadata from the ``tgt`` :class:`~iris.cube.Cube`.
+
+ Returns:
+ The :class:`~iris.common.resolve._PreparedItem`.
+
+ """
+ if src_metadata is not None and tgt_metadata is not None:
+ combined = src_metadata.combine(tgt_metadata)
else:
- combined = src or tgt
+ combined = src_metadata or tgt_metadata
if not isinstance(dims, Iterable):
dims = (dims,)
prepared_metadata = _PreparedMetadata(
- combined=combined, src=src, tgt=tgt
+ combined=combined, src=src_metadata, tgt=tgt_metadata
)
bounds = coord.bounds
result = _PreparedItem(
@@ -573,6 +714,30 @@ def _show(items, heading):
@staticmethod
def _dim_coverage(cube, cube_items_dim, common_dim_metadata):
+ """
+ Determine the dimensions covered by each of the local and common
+ dimension coordinates of the provided :class:`~iris.cube.Cube`.
+
+ The cube dimensions not covered by any of the dimension coordinates is
+ also determined; these are known as `free` dimensions.
+
+ Args:
+
+ * cube:
+ The :class:`~iris.cube.Cube` to be analysed for coverage.
+
+ * cube_items_dim:
+ The list of associated :class:`~iris.common.resolve._Item` metadata
+ for each dimension coordinate owned by the cube.
+
+ * common_dim_metadata:
+ The list of common dimension coordinate metadata shared by both
+ the LHS and RHS cube operands being resolved.
+
+ Returns:
+ :class:`~iris.common.resolve._DimCoverage`
+
+ """
ndim = cube.ndim
metadata = [None] * ndim
coords = [None] * ndim
@@ -599,13 +764,39 @@ def _dim_coverage(cube, cube_items_dim, common_dim_metadata):
dims_free=sorted(dims_free),
)
- def _dim_mapping(self, src_coverage, tgt_coverage):
+ @staticmethod
+ def _dim_mapping(src_coverage, tgt_coverage):
+ """
+ Establish the mapping of dimensions from the ``src`` to ``tgt``
+ :class:`~iris.cube.Cube` using the dimension coordinate metadata
+ common between each of the operands.
+
+ The ``src`` to ``tgt`` common dimension coordinate mapping is held by
+ the :attr:`~iris.common.resolve.Resolve.mapping`.
+
+ Args:
+
+ * src_coverage:
+ The :class:`~iris.common.resolve._DimCoverage` of the ``src``
+ :class:`~iris.cube.Cube` i.e., map from the common ``src``
+ dimensions.
+
+ * tgt_coverage:
+ The :class:`~iris.common.resolve._DimCoverage` of the ``tgt``
+ :class:`~iris.cube.Cube` i.e., map to the common ``tgt``
+ dimensions.
+
+ Returns:
+ Dictionary of ``src`` to ``tgt`` dimension mapping.
+
+ """
+ mapping = {}
for tgt_dim in tgt_coverage.dims_common:
# Search for a src dim metadata match.
tgt_metadata = tgt_coverage.metadata[tgt_dim]
try:
src_dim = src_coverage.metadata.index(tgt_metadata)
- self.mapping[src_dim] = tgt_dim
+ mapping[src_dim] = tgt_dim
logger.debug(f"{src_dim}->{tgt_dim}")
except ValueError:
# This exception can only occur due to a systemic internal
@@ -621,9 +812,10 @@ def _dim_mapping(self, src_coverage, tgt_coverage):
src_coverage.cube.name(),
tgt_coverage.cube.name(),
tgt_metadata,
- tuple([tgt_dim]),
+ (tgt_dim,),
)
)
+ return mapping
def _free_mapping(
self,
@@ -632,6 +824,57 @@ def _free_mapping(
src_aux_coverage,
tgt_aux_coverage,
):
+ """
+ Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with
+ ``src`` to ``tgt`` :class:`~iris.cube.Cube` mappings from unmapped ``src``
+ dimensions that are free from coordinate metadata coverage to ``tgt``
+ dimensions that have local metadata coverage (i.e., is not common between
+ the ``src`` and ``tgt``) or dimensions that are free from coordinate
+ metadata coverage.
+
+ If the ``src`` :class:`~iris.cube.Cube` does not have any free dimensions,
+ the attempt to map unmapped ``tgt`` dimensions that have local metadata
+ coverage to ``src`` dimensions that are free from coordinate metadata
+ coverage.
+
+ An exception will be raised if there are any ``src`` :class:`~iris.cube.Cube`
+ dimensions not mapped to an associated ``tgt`` dimension.
+
+ Args:
+
+ * src_dim_coverage:
+ The :class:`~iris.common.resolve.._DimCoverage` of the ``src``
+ :class:`~iris.cube.Cube`.
+
+ * tgt_dim_coverage:
+ The :class:`~iris.common.resolve.._DimCoverage` of the ``tgt``
+ :class:`~iris.cube.Cube`.
+
+ * src_aux_coverage:
+ The :class:`~iris.common.resolve._AuxCoverage` of the ``src``
+ :class:`~iris.cube.Cube`.
+
+ * tgt_aux_coverage:
+ The :class:`~iris.common.resolve._AuxCoverage` of the ``tgt``
+ :class:`~iris.cube.Cube`.
+
+ .. note::
+
+ All unmapped dimensions with an extend >1 are mapped before those
+ with an extent of 1, as such dimensions cannot be broadcast. It
+ is important to map specific non-broadcastable dimensions before
+ generic broadcastable dimensions otherwise we are open to failing to
+ map all the src dimensions as a generic src broadcast dimension has
+ been mapped to the only tgt dimension that a specific non-broadcastable
+ dimension can be mapped to.
+
+ .. note::
+
+ A local dimension cannot be mapped to another local dimension,
+ by definition, otherwise this dimension would be classed as a
+ common dimension.
+
+ """
src_cube = src_dim_coverage.cube
tgt_cube = tgt_dim_coverage.cube
src_ndim = src_cube.ndim
@@ -663,11 +906,16 @@ def _free_mapping(
tgt_shape = tgt_cube.shape
src_max, tgt_max = max(src_shape), max(tgt_shape)
- def assign_mapping(extent, unmapped_local_items, free_items=None):
+ def _assign_mapping(extent, unmapped_local_items, free_items=None):
result = None
if free_items is None:
free_items = []
if extent == 1:
+ # Map to the first available unmapped local dimension or
+ # the first available free dimension.
+ # Dimension shape doesn't matter here as the extent is 1,
+ # therefore broadcasting will take care of any discrepency
+ # between src and tgt dimension extent.
if unmapped_local_items:
result, _ = unmapped_local_items.pop(0)
elif free_items:
@@ -680,10 +928,10 @@ def _filter(items):
)
def _pop(item, items):
- result, _ = item
+ dim, _ = item
index = items.index(item)
items.pop(index)
- return result
+ return dim
items = _filter(unmapped_local_items)
if items:
@@ -700,11 +948,12 @@ def _pop(item, items):
(dim, tgt_shape[dim]) for dim in tgt_unmapped_local
]
tgt_free_items = [(dim, tgt_shape[dim]) for dim in tgt_free]
+ # Sort by decreasing src dimension extent and increasing src dimension
+ # as we want broadcast src dimensions to be mapped last.
+ src_key_func = lambda dim: (src_max - src_shape[dim], dim)
- for src_dim in sorted(
- src_free, key=lambda dim: (src_max - src_shape[dim], dim)
- ):
- tgt_dim = assign_mapping(
+ for src_dim in sorted(src_free, key=src_key_func):
+ tgt_dim = _assign_mapping(
src_shape[src_dim],
tgt_unmapped_local_items,
tgt_free_items,
@@ -725,11 +974,12 @@ def _pop(item, items):
src_unmapped_local_items = [
(dim, src_shape[dim]) for dim in src_unmapped_local
]
+ # Sort by decreasing tgt dimension extent and increasing tgt dimension
+ # as we want broadcast tgt dimensions to be mapped last.
+ tgt_key_func = lambda dim: (tgt_max - tgt_shape[dim], dim)
- for tgt_dim in sorted(
- tgt_free, key=lambda dim: (tgt_max - tgt_shape[dim], dim)
- ):
- src_dim = assign_mapping(
+ for tgt_dim in sorted(tgt_free, key=tgt_key_func):
+ src_dim = _assign_mapping(
tgt_shape[tgt_dim], src_unmapped_local_items
)
if src_dim is not None:
@@ -758,6 +1008,17 @@ def _pop(item, items):
logger.debug(f"mapping free dimensions gives, mapping={self.mapping}")
def _metadata_coverage(self):
+ """
+ Using the pre-categorised metadata of the cubes, determine the dimensions
+ covered by their associated dimension and auxiliary coordinates, and which
+ dimensions are free of metadata coverage.
+
+ This coverage analysis clarifies how the dimensions covered by common
+ metadata are related, thus establishing a dimensional mapping between
+ the cubes. It also identifies the dimensions covered by metadata that
+ is local to each cube, and indeed which dimensions are free of metadata.
+
+ """
# Determine the common dim coordinate metadata coverage.
common_dim_metadata = [
item.metadata for item in self.category_common.items_dim
@@ -798,6 +1059,37 @@ def _metadata_coverage(self):
)
def _metadata_mapping(self):
+ """
+ Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an associated
+ ``tgt`` :class:`~iris.cube.Cube` dimension using the common dim and aux coordinate metadata.
+
+ If the common metadata does not result in a full mapping of ``src`` to ``tgt`` dimensions
+ then free dimensions are analysed to determine whether the mapping can be completed.
+
+ Once the ``src`` has been mapped to the ``tgt``, the cubes are checked to ensure that they
+ will successfully broadcast, and the ``src`` :class:`~iris.cube.Cube` is transposed appropriately,
+ if necessary.
+
+ The :attr:`~iris.common.resolve.Resolve._broadcast_shape` is set, along with the
+ :attr:`~iris.common.resolve.Resolve._src_cube_resolved` and :attr:`~iris.common.resolve.Resolve._tgt_cube_resolved`,
+ which are the broadcast/transposed ``src`` and ``tgt``.
+
+ .. note::
+
+ An exception will be raised if a ``src`` dimension cannot be mapped to a ``tgt`` dimension.
+
+ .. note::
+
+ An exception will be raised if the full mapped ``src`` :class:`~iris.cube.Cube` cannot be
+ broadcast or transposed with the ``tgt`` :class:`~iris.cube.Cube`.
+
+ .. note::
+
+ The ``src`` and ``tgt`` may be swapped in the case where they both have equal dimensionality and
+ the ``tgt`` does have the same shape as the resolved broadcast shape (and the ``src`` does) or
+ the ``tgt`` has more free dimensions than the ``src``.
+
+ """
# Initialise the state.
self.mapping = {}
@@ -819,7 +1111,9 @@ def _metadata_mapping(self):
# Use the dim coordinates to fully map the
# src cube dimensions to the tgt cube dimensions.
- self._dim_mapping(src_dim_coverage, tgt_dim_coverage)
+ self.mapping.update(
+ self._dim_mapping(src_dim_coverage, tgt_dim_coverage)
+ )
logger.debug(
f"mapping common dim coordinates gives, mapping={self.mapping}"
)
@@ -827,7 +1121,9 @@ def _metadata_mapping(self):
# If necessary, use the aux coordinates to fully map the
# src cube dimensions to the tgt cube dimensions.
if not self.mapped:
- self._aux_mapping(src_aux_coverage, tgt_aux_coverage)
+ self.mapping.update(
+ self._aux_mapping(src_aux_coverage, tgt_aux_coverage)
+ )
logger.debug(
f"mapping common aux coordinates, mapping={self.mapping}"
)
@@ -886,6 +1182,12 @@ def _metadata_mapping(self):
self._as_compatible_cubes()
def _metadata_prepare(self):
+ """
+ Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and
+ :attr:`~iris.common.resolve.Resolve.prepared_factories` with the necessary metadata to be constructed
+ and attached to the resulting resolved :class:`~iris.cube.Cube`.
+
+ """
# Initialise the state.
self.prepared_category = _CategoryItems(
items_dim=[], items_aux=[], items_scalar=[]
@@ -1053,6 +1355,41 @@ def _prepare_common_aux_payload(
prepared_items,
ignore_mismatch=None,
):
+ """
+ Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing
+ the necessary metadata for each auxiliary coordinate to be constructed and attached to the
+ resulting resolved :class:`~iris.cube.Cube`.
+
+ .. note::
+
+ For mixed ``src`` and ``tgt`` coordinate types with matching metadata, an
+ :class:`~iris.coords.AuxCoord` will be nominated for construction.
+
+ Args:
+
+ * src_common_items:
+ The list of :attr:`~iris.common.resolve._AuxCoverage.common_items_aux` metadata
+ for the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_common_items:
+ The list of :attr:`~iris.common.resolve._AuxCoverage.common_items_aux` metadata
+ for the ``tgt`` :class:`~iris.cube.Cube`.
+
+ * prepared_items:
+ The list of :class:`~iris.common.resolve._PreparedItem` metadata that will be used
+ to construct the auxiliary coordinates that will be attached to the resulting
+ resolved :class:`~iris.cube.Cube`.
+
+ Kwargs:
+
+ * ignore_mismatch:
+ When ``False``, an exception will be raised if a difference is detected between corresponding
+ ``src`` and ``tgt`` coordinate ``points`` and/or ``bounds``.
+ When ``True``, the coverage metadata is ignored i.e., a coordinate will not be constructed and
+ added to the resulting resolved :class:`~iris.cube.Cube`.
+ Defaults to ``False``.
+
+ """
from iris.coords import AuxCoord
if ignore_mismatch is None:
@@ -1115,6 +1452,30 @@ def _prepare_common_aux_payload(
def _prepare_common_dim_payload(
self, src_coverage, tgt_coverage, ignore_mismatch=None
):
+ """
+ Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items`
+ with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for
+ each :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved
+ :class:`~iris.cube.Cube`.
+
+ Args:
+
+ * src_coverage:
+ The :class:`~iris.common.resolve._DimCoverage` metadata for the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_coverage:
+ The :class:`~iris.common.resolve._DimCoverage` metadata for the ``tgt`` :class:`~iris.cube.Cube`.
+
+ Kwargs:
+
+ * ignore_mismatch:
+ When ``False``, an exception will be raised if a difference is detected between corresponding
+ ``src`` and ``tgt`` :class:`~iris.coords.DimCoord` ``points`` and/or ``bounds``.
+ When ``True``, the coverage metadata is ignored i.e., a :class:`~iris.coords.DimCoord` will not
+ be constructed and added to the resulting resolved :class:`~iris.cube.Cube`.
+ Defaults to ``False``.
+
+ """
from iris.coords import DimCoord
if ignore_mismatch is None:
@@ -1153,55 +1514,123 @@ def _prepare_common_dim_payload(
)
self.prepared_category.items_dim.append(prepared_item)
- def _prepare_factory_payload(self, cube, category_local, from_src=True):
- def _get_prepared_item(metadata, from_src=True, from_local=False):
- result = None
- if from_local:
- category = category_local
- match = lambda item: item.metadata == metadata
+ def _get_prepared_item(
+ self, metadata, category_local, from_src=True, from_local=False
+ ):
+ """
+ Find the :attr:`~iris.common.resolve._PreparedItem` from the
+ :attr:`~iris.common.resolve.Resolve.prepared_category` that matches the provided ``metadata``.
+
+ Alternatively, the ``category_local`` is searched to find a :class:`~iris.common.resolve._Item`
+ with matching ``metadata`` from either the local ``src`` or ``tgt`` :class:`~iris.cube.Cube`.
+ If a match is found, then a new `~iris.common.resolve._PreparedItem` is created and added to
+ :attr:`~iris.common.resolve.Resolve.prepared_category` and returned. See ``from_local``.
+
+ Args:
+
+ * metadata:
+ The target metadata of the prepared (or local) item to retrieve.
+
+ * category_local:
+ The :class:`~iris.common.resolve._CategoryItems` containing the
+ local metadata of either the ``src`` or ``tgt`` :class:`~iris.cube.Cube`.
+ See ``from_local``.
+
+ Kwargs:
+
+ * from_src:
+ Boolean stating whether the ``metadata`` is from the ``src`` (``True``)
+ or ``tgt`` :class:`~iris.cube.Cube`.
+ Defaults to ``True``.
+
+ * from_local:
+ Boolean controlling whether the ``metadata`` is used to search the
+ ``category_local`` (``True``) or the :attr:`~iris.common.resolve.Resolve.prepared_category`.
+ Defaults to ``False``.
+
+ Returns:
+ The :class:`~iris.common.resolve._PreparedItem` matching the provided ``metadata``.
+
+ """
+ result = None
+
+ if from_local:
+ category = category_local
+ match = lambda item: item.metadata == metadata
+ else:
+ category = self.prepared_category
+ if from_src:
+ match = lambda item: item.metadata.src == metadata
else:
- category = self.prepared_category
- if from_src:
- match = lambda item: item.metadata.src == metadata
+ match = lambda item: item.metadata.tgt == metadata
+
+ for member in category._fields:
+ category_items = getattr(category, member)
+ matched_items = tuple(filter(match, category_items))
+ if matched_items:
+ if len(matched_items) > 1:
+ dmsg = (
+ f"ignoring factory dependency {metadata}, multiple {'src' if from_src else 'tgt'} "
+ f"{'local' if from_local else 'prepared'} metadata matches"
+ )
+ logger.debug(dmsg)
else:
- match = lambda item: item.metadata.tgt == metadata
- for member in category._fields:
- category_items = getattr(category, member)
- matched_items = tuple(filter(match, category_items))
- if matched_items:
- if len(matched_items) > 1:
- dmsg = (
- f"ignoring factory dependency {metadata}, multiple {'src' if from_src else 'tgt'} "
- f"{'local' if from_local else 'prepared'} metadata matches"
- )
- logger.debug(dmsg)
- else:
- (item,) = matched_items
- if from_local:
- src = tgt = None
- if from_src:
- src = item.metadata
- dims = tuple(
- [self.mapping[dim] for dim in item.dims]
- )
- else:
- tgt = item.metadata
- dims = item.dims
- result = self._create_prepared_item(
- item.coord, dims, src=src, tgt=tgt
- )
- getattr(self.prepared_category, member).append(
- result
+ (item,) = matched_items
+ if from_local:
+ src = tgt = None
+ if from_src:
+ src = item.metadata
+ dims = tuple(
+ [self.mapping[dim] for dim in item.dims]
)
else:
- result = item
- break
- return result
+ tgt = item.metadata
+ dims = item.dims
+ result = self._create_prepared_item(
+ item.coord,
+ dims,
+ src_metadata=src,
+ tgt_metadata=tgt,
+ )
+ getattr(self.prepared_category, member).append(result)
+ else:
+ result = item
+ break
+ return result
+
+ def _prepare_factory_payload(self, cube, category_local, from_src=True):
+ """
+ Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a :class:`~iris.common.resolve._PreparedFactory`
+ containing the necessary metadata for each ``src`` and/or ``tgt`` auxiliary factory to be constructed and
+ attached to the resulting resolved :class:`~iris.cube.Cube`.
+
+ .. note::
+
+ The required dependencies of an auxiliary factory may not all be available in the
+ :attr:`~iris.common.resolve.Resolve.prepared_category` and therefore this is a legitimate
+ reason to add the associated metadata of the local dependency to the ``prepared_category``.
+
+ Args:
+
+ * cube:
+ The :class:`~iris.cube.Cube` that may contain an auxiliary factory to be prepared.
+
+ * category_local:
+ The :class:`~iris.common.resolve._CategoryItems` of all metadata local to the provided ``cube``.
+ Kwargs:
+
+ * from_src:
+ Boolean stating whether the provided ``cube`` is either a ``src`` or ``tgt``
+ :class:`~iris.cube.Cube` - used to retrieve the appropriate metadata from a
+ :class:`~iris.common.resolve._PreparedMetadata`.
+
+ """
for factory in cube.aux_factories:
container = type(factory)
dependencies = {}
prepared_item = None
+ found = True
if tuple(
filter(
@@ -1222,18 +1651,24 @@ def _get_prepared_item(metadata, from_src=True, from_local=False):
dependency_coord,
) in factory.dependencies.items():
metadata = dependency_coord.metadata
- prepared_item = _get_prepared_item(metadata, from_src=from_src)
+ prepared_item = self._get_prepared_item(
+ metadata, category_local, from_src=from_src
+ )
if prepared_item is None:
- prepared_item = _get_prepared_item(
- metadata, from_src=from_src, from_local=True
+ prepared_item = self._get_prepared_item(
+ metadata,
+ category_local,
+ from_src=from_src,
+ from_local=True,
)
if prepared_item is None:
dmsg = f"cannot find matching {metadata} for {container} dependency {dependency_name}"
logger.debug(dmsg)
+ found = False
break
dependencies[dependency_name] = prepared_item.metadata
- if prepared_item is not None:
+ if found and prepared_item is not None:
prepared_factory = _PreparedFactory(
container=container, dependencies=dependencies
)
@@ -1243,6 +1678,29 @@ def _get_prepared_item(metadata, from_src=True, from_local=False):
logger.debug(dmsg)
def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage):
+ """
+ Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items`
+ with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each
+ ``src`` or ``tgt`` local auxiliary coordinate to be constructed and attached to the resulting
+ resolved :class:`~iris.cube.Cube`.
+
+ .. note::
+
+ In general, lenient behaviour subscribes to the philosophy that it is easier to remove
+ metadata than it is to find then add metadata. To those ends, lenient behaviour supports
+ metadata richness by adding both local ``src`` and ``tgt`` auxiliary coordinates.
+ Alternatively, strict behaviour will only add a ``tgt`` local auxiliary coordinate that
+ spans dimensions not mapped to by the ``src`` e.g., extra ``tgt`` dimensions.
+
+ Args:
+
+ * src_aux_coverage:
+ The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_aux_coverage:
+ The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`.
+
+ """
# Determine whether there are tgt dimensions not mapped to by an
# associated src dimension, and thus may be covered by any local
# tgt aux coordinates.
@@ -1259,7 +1717,7 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage):
if all([dim in mapped_src_dims for dim in item.dims]):
tgt_dims = tuple([self.mapping[dim] for dim in item.dims])
prepared_item = self._create_prepared_item(
- item.coord, tgt_dims, src=item.metadata
+ item.coord, tgt_dims, src_metadata=item.metadata
)
self.prepared_category.items_aux.append(prepared_item)
else:
@@ -1281,7 +1739,7 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage):
[dim in extra_tgt_dims for dim in tgt_dims]
):
prepared_item = self._create_prepared_item(
- item.coord, tgt_dims, tgt=item.metadata
+ item.coord, tgt_dims, tgt_metadata=item.metadata
)
self.prepared_category.items_aux.append(prepared_item)
else:
@@ -1293,6 +1751,28 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage):
logger.debug(dmsg)
def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage):
+ """
+ Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items`
+ with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each
+ ``src`` or ``tgt`` local :class:`~iris.coords.DimCoord` to be constructed and attached to the
+ resulting resolved :class:`~iris.cube.Cube`.
+
+ .. note::
+
+ In general, a local coordinate will only be added if there is no other metadata competing
+ to describe the same dimension/s on the ``tgt`` :class:`~iris.cube.Cube`. Lenient behaviour
+ is more liberal, whereas strict behaviour will only add a local ``tgt`` coordinate covering
+ an unmapped "extra" ``tgt`` dimension/s.
+
+ Args:
+
+ * src_dim_coverage:
+ The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_dim_coverage:
+ The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``tgt`` :class:`~iris.cube.Cube`.
+
+ """
mapped_tgt_dims = self.mapping.values()
# Determine whether there are tgt dimensions not mapped to by an
@@ -1314,7 +1794,7 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage):
metadata = src_dim_coverage.metadata[src_dim]
coord = src_dim_coverage.coords[src_dim]
prepared_item = self._create_prepared_item(
- coord, tgt_dim, src=metadata
+ coord, tgt_dim, src_metadata=metadata
)
self.prepared_category.items_dim.append(prepared_item)
else:
@@ -1347,13 +1827,36 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage):
if metadata is not None:
coord = tgt_dim_coverage.coords[tgt_dim]
prepared_item = self._create_prepared_item(
- coord, tgt_dim, tgt=metadata
+ coord, tgt_dim, tgt_metadata=metadata
)
self.prepared_category.items_dim.append(prepared_item)
def _prepare_local_payload_scalar(
self, src_aux_coverage, tgt_aux_coverage
):
+ """
+ Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items`
+ with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each
+ ``src`` or ``tgt`` local scalar coordinate to be constructed and attached to the resulting
+ resolved :class:`~iris.cube.Cube`.
+
+ .. note::
+
+ In general, lenient behaviour subscribes to the philosophy that it is easier to remove
+ metadata than it is to find then add metadata. To those ends, lenient behaviour supports
+ metadata richness by adding both local ``src`` and ``tgt`` scalar coordinates.
+ Alternatively, strict behaviour will only add a ``tgt`` local scalar coordinate when the
+ ``src`` is a scalar :class:`~iris.cube.Cube` with no local scalar coordinates.
+
+ Args:
+
+ * src_aux_coverage:
+ The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_aux_coverage:
+ The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`.
+
+ """
# Add all local tgt scalar coordinates iff the src cube is a
# scalar cube with no local src scalar coordinates.
# Only for strict maths.
@@ -1367,14 +1870,14 @@ def _prepare_local_payload_scalar(
# Add any local src scalar coordinates, if available.
for item in src_aux_coverage.local_items_scalar:
prepared_item = self._create_prepared_item(
- item.coord, item.dims, src=item.metadata
+ item.coord, item.dims, src_metadata=item.metadata
)
self.prepared_category.items_scalar.append(prepared_item)
# Add any local tgt scalar coordinates, if available.
for item in tgt_aux_coverage.local_items_scalar:
prepared_item = self._create_prepared_item(
- item.coord, item.dims, tgt=item.metadata
+ item.coord, item.dims, tgt_metadata=item.metadata
)
self.prepared_category.items_scalar.append(prepared_item)
@@ -1385,6 +1888,27 @@ def _prepare_local_payload(
tgt_dim_coverage,
tgt_aux_coverage,
):
+ """
+ Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a
+ :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata from the ``src``
+ and/or ``tgt`` :class:`~iris.cube.Cube` for each coordinate to be constructed and attached
+ to the resulting resolved :class:`~iris.cube.Cube`.
+
+ Args:
+
+ * src_dim_coverage:
+ The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``src`` :class:`~iris.cube.Cube`.
+
+ * src_aux_coverage:
+ The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_dim_coverage:
+ The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``tgt`` :class:`~iris.cube.Cube`.
+
+ * tgt_aux_coverage:
+ The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`.
+
+ """
# Add local src/tgt dim coordinates.
self._prepare_local_payload_dim(src_dim_coverage, tgt_dim_coverage)
@@ -1397,6 +1921,47 @@ def _prepare_local_payload(
def _prepare_points_and_bounds(
self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None
):
+ """
+ Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure
+ that they are equivalent, taking into account broadcasting when appropriate.
+
+ .. note::
+
+ An exception will be raised if the ``src`` and ``tgt`` coordinates cannot
+ be broadcast.
+
+ .. note::
+
+ An exception will be raised if either the points or bounds are different,
+ however appropriate lenient behaviour concessions are applied.
+
+ Args:
+
+ * src_coord:
+ The ``src`` :class:`~iris.cube.Cube` coordinate with metadata matching
+ the ``tgt_coord``.
+
+ * tgt_coord:
+ The ``tgt`` :class`~iris.cube.Cube` coordinate with metadata matching
+ the ``src_coord``.
+
+ * src_dims:
+ The dimension/s of the ``src_coord`` attached to the ``src`` :class:`~iris.cube.Cube`.
+
+ * tgt_dims:
+ The dimension/s of the ``tgt_coord`` attached to the ``tgt`` :class:`~iris.cube.Cube`.
+
+ Kwargs:
+
+ * ignore_mismatch:
+ For lenient behaviour only, don't raise an exception if there is a difference between
+ the ``src`` and ``tgt`` coordinate points or bounds.
+ Defaults to ``False``.
+
+ Returns:
+ Tuple of equivalent ``points`` and ``bounds``, otherwise ``None``.
+
+ """
from iris.util import array_equal
if ignore_mismatch is None:
@@ -1443,6 +2008,7 @@ def _prepare_points_and_bounds(
tgt_broadcasting = tgt_shape != tgt_shape_broadcast
if src_broadcasting and tgt_broadcasting:
+ # TBD: Extend capability to support attempting to broadcast two-way multi-dimensional coordinates.
emsg = (
f"Cannot broadcast the coordinate {src_coord.name()!r} on "
f"{self._src_cube_position} cube {self._src_cube.name()!r} and "
diff --git a/lib/iris/coords.py b/lib/iris/coords.py
index 76ca83cd96..cfeb24cdcb 100644
--- a/lib/iris/coords.py
+++ b/lib/iris/coords.py
@@ -12,7 +12,6 @@
from collections import namedtuple
from collections.abc import Iterator
import copy
-from functools import wraps
from itertools import chain, zip_longest
import operator
import warnings
@@ -1272,7 +1271,7 @@ def contains_point(self, point):
class Coord(_DimensionalMetadata):
"""
- Superclass for coordinates.
+ Abstract base class for coordinates.
"""
@@ -1291,7 +1290,7 @@ def __init__(
):
"""
- Constructs a single coordinate.
+ Coordinate abstract base class. As of ``v3.0.0`` you **cannot** create an instance of :class:`Coord`.
Args:
@@ -1313,17 +1312,17 @@ def __init__(
* bounds
An array of values describing the bounds of each cell. Given n
bounds for each cell, the shape of the bounds array should be
- points.shape + (n,). For example, a 1d coordinate with 100 points
+ points.shape + (n,). For example, a 1D coordinate with 100 points
and two bounds per cell would have a bounds array of shape
(100, 2)
Note if the data is a climatology, `climatological`
should be set.
* attributes
- A dictionary containing other cf and user-defined attributes.
+ A dictionary containing other CF and user-defined attributes.
* coord_system
A :class:`~iris.coord_systems.CoordSystem` representing the
coordinate system of the coordinate,
- e.g. a :class:`~iris.coord_systems.GeogCS` for a longitude Coord.
+ e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate.
* climatological (bool):
When True: the coordinate is a NetCDF climatological time axis.
When True: saving in NetCDF will give the coordinate variable a
@@ -2250,7 +2249,8 @@ def _xml_id_extra(self, unique_value):
class DimCoord(Coord):
"""
- A coordinate that is 1D, numeric, and strictly monotonic.
+ A coordinate that is 1D, and numeric, with values that have a strict monotonic ordering. Missing values are not
+ permitted in a :class:`DimCoord`.
"""
@@ -2275,7 +2275,7 @@ def from_regular(
optionally bounds.
The majority of the arguments are defined as for
- :meth:`Coord.__init__`, but those which differ are defined below.
+ :class:`Coord`, but those which differ are defined below.
Args:
@@ -2336,8 +2336,9 @@ def __init__(
climatological=False,
):
"""
- Create a 1D, numeric, and strictly monotonic :class:`Coord` with
- read-only points and bounds.
+ Create a 1D, numeric, and strictly monotonic coordinate with **immutable** points and bounds.
+
+ Missing values are not permitted.
Args:
@@ -2369,11 +2370,11 @@ def __init__(
Note if the data is a climatology, `climatological`
should be set.
* attributes:
- A dictionary containing other cf and user-defined attributes.
+ A dictionary containing other CF and user-defined attributes.
* coord_system:
A :class:`~iris.coord_systems.CoordSystem` representing the
coordinate system of the coordinate,
- e.g. a :class:`~iris.coord_systems.GeogCS` for a longitude Coord.
+ e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate.
* circular (bool):
Whether the coordinate wraps by the :attr:`~iris.coords.DimCoord.units.modulus`
i.e., the longitude coordinate wraps around the full great circle.
@@ -2624,15 +2625,54 @@ class AuxCoord(Coord):
"""
A CF auxiliary coordinate.
- .. note::
-
- There are currently no specific properties of :class:`AuxCoord`,
- everything is inherited from :class:`Coord`.
-
"""
- @wraps(Coord.__init__, assigned=("__doc__",), updated=())
def __init__(self, *args, **kwargs):
+ """
+ Create a coordinate with **mutable** points and bounds.
+
+ Args:
+
+ * points:
+ The values (or value in the case of a scalar coordinate) for each
+ cell of the coordinate.
+
+ Kwargs:
+
+ * standard_name:
+ CF standard name of the coordinate.
+ * long_name:
+ Descriptive name of the coordinate.
+ * var_name:
+ The netCDF variable name for the coordinate.
+ * units
+ The :class:`~cf_units.Unit` of the coordinate's values.
+ Can be a string, which will be converted to a Unit object.
+ * bounds
+ An array of values describing the bounds of each cell. Given n
+ bounds for each cell, the shape of the bounds array should be
+ points.shape + (n,). For example, a 1D coordinate with 100 points
+ and two bounds per cell would have a bounds array of shape
+ (100, 2)
+ Note if the data is a climatology, `climatological`
+ should be set.
+ * attributes
+ A dictionary containing other CF and user-defined attributes.
+ * coord_system
+ A :class:`~iris.coord_systems.CoordSystem` representing the
+ coordinate system of the coordinate,
+ e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate.
+ * climatological (bool):
+ When True: the coordinate is a NetCDF climatological time axis.
+ When True: saving in NetCDF will give the coordinate variable a
+ 'climatology' attribute and will create a boundary variable called
+ '_climatology' in place of a standard bounds
+ attribute and bounds variable.
+ Will set to True when a climatological time axis is loaded
+ from NetCDF.
+ Always False if no bounds exist.
+
+ """
super().__init__(*args, **kwargs)
# Logically, :class:`Coord` is an abstract class and all actual coords must
diff --git a/lib/iris/cube.py b/lib/iris/cube.py
index bb631cae73..7c7d6c58e9 100644
--- a/lib/iris/cube.py
+++ b/lib/iris/cube.py
@@ -981,9 +981,7 @@ def convert_units(self, unit):
celsius and subtract 273.15 from each value in
:attr:`~iris.cube.Cube.data`.
- .. warning::
- Calling this method will trigger any deferred loading, causing
- the cube's data array to be loaded into memory.
+ This operation preserves lazy data.
"""
# If the cube has units convert the data.
@@ -2186,23 +2184,20 @@ def _summary_coord_extra(self, coord, indent):
extra = ""
similar_coords = self.coords(coord.name())
if len(similar_coords) > 1:
- # Find all the attribute keys
- keys = set()
- for similar_coord in similar_coords:
- keys.update(similar_coord.attributes.keys())
- # Look for any attributes that vary
+ similar_coords.remove(coord)
+ # Look for any attributes that vary.
vary = set()
- attributes = {}
- for key in keys:
+ for key, value in coord.attributes.items():
for similar_coord in similar_coords:
if key not in similar_coord.attributes:
vary.add(key)
break
- value = similar_coord.attributes[key]
- if attributes.setdefault(key, value) != value:
+ if not np.array_equal(
+ similar_coord.attributes[key], value
+ ):
vary.add(key)
break
- keys = sorted(vary & set(coord.attributes.keys()))
+ keys = sorted(vary)
bits = [
"{}={!r}".format(key, coord.attributes[key]) for key in keys
]
@@ -3923,10 +3918,15 @@ def collapsed(self, coords, aggregator, **kwargs):
# on the cube lazy array.
# NOTE: do not reform the data in this case, as 'lazy_aggregate'
# accepts multiple axes (unlike 'aggregate').
- collapse_axis = list(dims_to_collapse)
+ collapse_axes = list(dims_to_collapse)
+ if len(collapse_axes) == 1:
+ # Replace a "list of 1 axes" with just a number : This single-axis form is *required* by functions
+ # like da.average (and np.average), if a 1d weights array is specified.
+ collapse_axes = collapse_axes[0]
+
try:
data_result = aggregator.lazy_aggregate(
- self.lazy_data(), axis=collapse_axis, **kwargs
+ self.lazy_data(), axis=collapse_axes, **kwargs
)
except TypeError:
# TypeError - when unexpected keywords passed through (such as
@@ -3950,8 +3950,10 @@ def collapsed(self, coords, aggregator, **kwargs):
unrolled_data = np.transpose(self.data, dims).reshape(new_shape)
# Perform the same operation on the weights if applicable
- if kwargs.get("weights") is not None:
- weights = kwargs["weights"].view()
+ weights = kwargs.get("weights")
+ if weights is not None and weights.ndim > 1:
+ # Note: *don't* adjust 1d weights arrays, these have a special meaning for statistics functions.
+ weights = weights.view()
kwargs["weights"] = np.transpose(weights, dims).reshape(
new_shape
)
diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py
index 98f712a970..bb7a870d58 100644
--- a/lib/iris/fileformats/netcdf.py
+++ b/lib/iris/fileformats/netcdf.py
@@ -719,6 +719,9 @@ def coord_from_term(term):
warnings.warn(msg)
coord_a = coord_from_term("a")
if coord_a is not None:
+ if coord_a.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ coord_a.units = "1"
delta = coord_a * coord_p0.points[0]
delta.units = coord_a.units * coord_p0.units
delta.rename("vertical pressure")
diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py
index 14944891f2..32091c7d63 100644
--- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py
+++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py
@@ -113,6 +113,15 @@ def test_factory_metadata(self):
self.assertIsNone(factory.coord_system)
self.assertEqual(factory.attributes, {})
+ def test_promote_sigma_units_unknown_to_dimensionless(self):
+ sigma = mock.Mock(units=cf_units.Unit("unknown"), nbounds=0)
+ factory = HybridPressureFactory(
+ delta=self.delta,
+ sigma=sigma,
+ surface_air_pressure=self.surface_air_pressure,
+ )
+ self.assertEqual("1", factory.dependencies["sigma"].units)
+
class Test_dependencies(tests.IrisTest):
def setUp(self):
diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py
index caf9d303c6..6e8e40cd1b 100644
--- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py
+++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py
@@ -137,6 +137,12 @@ def test_depth_incompatible_units(self):
with self.assertRaises(ValueError):
OceanSFactory(**self.kwargs)
+ def test_promote_s_units_unknown_to_dimensionless(self):
+ s = mock.Mock(units=Unit("unknown"), nbounds=0)
+ self.kwargs["s"] = s
+ factory = OceanSFactory(**self.kwargs)
+ self.assertEqual("1", factory.dependencies["s"].units)
+
class Test_dependencies(tests.IrisTest):
def setUp(self):
diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py
index 99a4fe1732..238df2f073 100644
--- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py
+++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py
@@ -121,6 +121,15 @@ def test_depth_incompatible_units(self):
with self.assertRaises(ValueError):
OceanSg1Factory(**self.kwargs)
+ def test_promote_c_and_s_units_unknown_to_dimensionless(self):
+ c = mock.Mock(units=Unit("unknown"), nbounds=0)
+ s = mock.Mock(units=Unit("unknown"), nbounds=0)
+ self.kwargs["c"] = c
+ self.kwargs["s"] = s
+ factory = OceanSg1Factory(**self.kwargs)
+ self.assertEqual("1", factory.dependencies["c"].units)
+ self.assertEqual("1", factory.dependencies["s"].units)
+
class Test_dependencies(tests.IrisTest):
def setUp(self):
diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py
index 387f0e48d1..fb3ada382e 100644
--- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py
+++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py
@@ -121,6 +121,15 @@ def test_depth_incompatible_units(self):
with self.assertRaises(ValueError):
OceanSg2Factory(**self.kwargs)
+ def test_promote_c_and_s_units_unknown_to_dimensionless(self):
+ c = mock.Mock(units=Unit("unknown"), nbounds=0)
+ s = mock.Mock(units=Unit("unknown"), nbounds=0)
+ self.kwargs["c"] = c
+ self.kwargs["s"] = s
+ factory = OceanSg2Factory(**self.kwargs)
+ self.assertEqual("1", factory.dependencies["c"].units)
+ self.assertEqual("1", factory.dependencies["s"].units)
+
class Test_dependencies(tests.IrisTest):
def setUp(self):
diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py
index 07c970ad7e..69a8a32c6e 100644
--- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py
+++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py
@@ -59,6 +59,12 @@ def test_depth_incompatible_units(self):
with self.assertRaises(ValueError):
OceanSigmaFactory(**self.kwargs)
+ def test_promote_sigma_units_unknown_to_dimensionless(self):
+ sigma = mock.Mock(units=Unit("unknown"), nbounds=0)
+ self.kwargs["sigma"] = sigma
+ factory = OceanSigmaFactory(**self.kwargs)
+ self.assertEqual("1", factory.dependencies["sigma"].units)
+
class Test_dependencies(tests.IrisTest):
def setUp(self):
diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py
index 6f1e8cd57a..4a4e30b9ca 100644
--- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py
+++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py
@@ -138,6 +138,12 @@ def test_depth_incompatible_units(self):
with self.assertRaises(ValueError):
OceanSigmaZFactory(**self.kwargs)
+ def test_promote_sigma_units_unknown_to_dimensionless(self):
+ sigma = mock.Mock(units=Unit("unknown"), nbounds=0)
+ self.kwargs["sigma"] = sigma
+ factory = OceanSigmaZFactory(**self.kwargs)
+ self.assertEqual("1", factory.dependencies["sigma"].units)
+
class Test_dependencies(tests.IrisTest):
def setUp(self):
diff --git a/lib/iris/tests/unit/common/resolve/__init__.py b/lib/iris/tests/unit/common/resolve/__init__.py
new file mode 100644
index 0000000000..d0b189e59d
--- /dev/null
+++ b/lib/iris/tests/unit/common/resolve/__init__.py
@@ -0,0 +1,6 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Unit tests for the :mod:`iris.common.resolve` package."""
diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py
new file mode 100644
index 0000000000..94ec48de88
--- /dev/null
+++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py
@@ -0,0 +1,4795 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""
+Unit tests for the :class:`iris.common.resolve.Resolve`.
+
+"""
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests
+
+from collections import namedtuple
+from copy import deepcopy
+
+from cf_units import Unit
+import numpy as np
+import unittest.mock as mock
+from unittest.mock import sentinel
+
+from iris.common.lenient import LENIENT
+from iris.common.metadata import CubeMetadata
+from iris.common.resolve import (
+ Resolve,
+ _AuxCoverage,
+ _CategoryItems,
+ _DimCoverage,
+ _Item,
+ _PreparedItem,
+ _PreparedFactory,
+ _PreparedMetadata,
+)
+from iris.coords import DimCoord
+from iris.cube import Cube
+
+
+class Test___init__(tests.IrisTest):
+ def setUp(self):
+ target = "iris.common.resolve.Resolve.__call__"
+ self.m_call = mock.MagicMock(return_value=sentinel.return_value)
+ _ = self.patch(target, new=self.m_call)
+
+ def _assert_members_none(self, resolve):
+ self.assertIsNone(resolve.lhs_cube_resolved)
+ self.assertIsNone(resolve.rhs_cube_resolved)
+ self.assertIsNone(resolve.lhs_cube_category)
+ self.assertIsNone(resolve.rhs_cube_category)
+ self.assertIsNone(resolve.lhs_cube_category_local)
+ self.assertIsNone(resolve.rhs_cube_category_local)
+ self.assertIsNone(resolve.category_common)
+ self.assertIsNone(resolve.lhs_cube_dim_coverage)
+ self.assertIsNone(resolve.lhs_cube_aux_coverage)
+ self.assertIsNone(resolve.rhs_cube_dim_coverage)
+ self.assertIsNone(resolve.rhs_cube_aux_coverage)
+ self.assertIsNone(resolve.map_rhs_to_lhs)
+ self.assertIsNone(resolve.mapping)
+ self.assertIsNone(resolve.prepared_category)
+ self.assertIsNone(resolve.prepared_factories)
+ self.assertIsNone(resolve._broadcast_shape)
+
+ def test_lhs_rhs_default(self):
+ resolve = Resolve()
+ self.assertIsNone(resolve.lhs_cube)
+ self.assertIsNone(resolve.rhs_cube)
+ self._assert_members_none(resolve)
+ self.assertEqual(0, self.m_call.call_count)
+
+ def test_lhs_rhs_provided(self):
+ m_lhs = sentinel.lhs
+ m_rhs = sentinel.rhs
+ resolve = Resolve(lhs=m_lhs, rhs=m_rhs)
+ # The lhs_cube and rhs_cube are only None due
+ # to __call__ being mocked. See Test___call__
+ # for appropriate test coverage.
+ self.assertIsNone(resolve.lhs_cube)
+ self.assertIsNone(resolve.rhs_cube)
+ self._assert_members_none(resolve)
+ self.assertEqual(1, self.m_call.call_count)
+ call_args = mock.call(m_lhs, m_rhs)
+ self.assertEqual(call_args, self.m_call.call_args)
+
+
+class Test___call__(tests.IrisTest):
+ def setUp(self):
+ self.m_lhs = mock.MagicMock(spec=Cube)
+ self.m_rhs = mock.MagicMock(spec=Cube)
+ target = "iris.common.resolve.Resolve.{method}"
+ method = target.format(method="_metadata_resolve")
+ self.m_metadata_resolve = self.patch(method)
+ method = target.format(method="_metadata_coverage")
+ self.m_metadata_coverage = self.patch(method)
+ method = target.format(method="_metadata_mapping")
+ self.m_metadata_mapping = self.patch(method)
+ method = target.format(method="_metadata_prepare")
+ self.m_metadata_prepare = self.patch(method)
+
+ def test_lhs_not_cube(self):
+ emsg = "'LHS' argument to be a 'Cube'"
+ with self.assertRaisesRegex(TypeError, emsg):
+ _ = Resolve(rhs=self.m_rhs)
+
+ def test_rhs_not_cube(self):
+ emsg = "'RHS' argument to be a 'Cube'"
+ with self.assertRaisesRegex(TypeError, emsg):
+ _ = Resolve(lhs=self.m_lhs)
+
+ def _assert_called_metadata_methods(self):
+ call_args = mock.call()
+ self.assertEqual(1, self.m_metadata_resolve.call_count)
+ self.assertEqual(call_args, self.m_metadata_resolve.call_args)
+ self.assertEqual(1, self.m_metadata_coverage.call_count)
+ self.assertEqual(call_args, self.m_metadata_coverage.call_args)
+ self.assertEqual(1, self.m_metadata_mapping.call_count)
+ self.assertEqual(call_args, self.m_metadata_mapping.call_args)
+ self.assertEqual(1, self.m_metadata_prepare.call_count)
+ self.assertEqual(call_args, self.m_metadata_prepare.call_args)
+
+ def test_map_rhs_to_lhs__less_than(self):
+ self.m_lhs.ndim = 2
+ self.m_rhs.ndim = 1
+ resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs)
+ self.assertEqual(self.m_lhs, resolve.lhs_cube)
+ self.assertEqual(self.m_rhs, resolve.rhs_cube)
+ self.assertTrue(resolve.map_rhs_to_lhs)
+ self._assert_called_metadata_methods()
+
+ def test_map_rhs_to_lhs__equal(self):
+ self.m_lhs.ndim = 2
+ self.m_rhs.ndim = 2
+ resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs)
+ self.assertEqual(self.m_lhs, resolve.lhs_cube)
+ self.assertEqual(self.m_rhs, resolve.rhs_cube)
+ self.assertTrue(resolve.map_rhs_to_lhs)
+ self._assert_called_metadata_methods()
+
+ def test_map_lhs_to_rhs(self):
+ self.m_lhs.ndim = 2
+ self.m_rhs.ndim = 3
+ resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs)
+ self.assertEqual(self.m_lhs, resolve.lhs_cube)
+ self.assertEqual(self.m_rhs, resolve.rhs_cube)
+ self.assertFalse(resolve.map_rhs_to_lhs)
+ self._assert_called_metadata_methods()
+
+
+class Test__categorise_items(tests.IrisTest):
+ def setUp(self):
+ self.coord_dims = {}
+ # configure dim coords
+ coord = mock.Mock(metadata=sentinel.dim_metadata1)
+ self.dim_coords = [coord]
+ self.coord_dims[coord] = sentinel.dims1
+ # configure aux and scalar coords
+ self.aux_coords = []
+ pairs = [
+ (sentinel.aux_metadata2, sentinel.dims2),
+ (sentinel.aux_metadata3, sentinel.dims3),
+ (sentinel.scalar_metadata4, None),
+ (sentinel.scalar_metadata5, None),
+ (sentinel.scalar_metadata6, None),
+ ]
+ for metadata, dims in pairs:
+ coord = mock.Mock(metadata=metadata)
+ self.aux_coords.append(coord)
+ self.coord_dims[coord] = dims
+ func = lambda coord: self.coord_dims[coord]
+ self.cube = mock.Mock(
+ aux_coords=self.aux_coords,
+ dim_coords=self.dim_coords,
+ coord_dims=func,
+ )
+
+ def test(self):
+ result = Resolve._categorise_items(self.cube)
+ self.assertIsInstance(result, _CategoryItems)
+ self.assertEqual(1, len(result.items_dim))
+ # check dim coords
+ for item in result.items_dim:
+ self.assertIsInstance(item, _Item)
+ (coord,) = self.dim_coords
+ dims = self.coord_dims[coord]
+ expected = [_Item(metadata=coord.metadata, coord=coord, dims=dims)]
+ self.assertEqual(expected, result.items_dim)
+ # check aux coords
+ self.assertEqual(2, len(result.items_aux))
+ for item in result.items_aux:
+ self.assertIsInstance(item, _Item)
+ expected_aux, expected_scalar = [], []
+ for coord in self.aux_coords:
+ dims = self.coord_dims[coord]
+ item = _Item(metadata=coord.metadata, coord=coord, dims=dims)
+ if dims:
+ expected_aux.append(item)
+ else:
+ expected_scalar.append(item)
+ self.assertEqual(expected_aux, result.items_aux)
+ # check scalar coords
+ self.assertEqual(3, len(result.items_scalar))
+ for item in result.items_scalar:
+ self.assertIsInstance(item, _Item)
+ self.assertEqual(expected_scalar, result.items_scalar)
+
+
+class Test__metadata_resolve(tests.IrisTest):
+ def setUp(self):
+ self.target = "iris.common.resolve.Resolve._categorise_items"
+ self.m_lhs_cube = sentinel.lhs_cube
+ self.m_rhs_cube = sentinel.rhs_cube
+
+ @staticmethod
+ def _create_items(pairs):
+ # this wrapper (hack) is necessary in order to support mocking
+ # the "name" method (callable) of the metadata, as "name" is already
+ # part of the mock API - this is always troublesome in mock-world.
+ Wrapper = namedtuple("Wrapper", ("name", "value"))
+ result = []
+ for name, dims in pairs:
+ metadata = Wrapper(name=lambda: str(name), value=name)
+ coord = mock.Mock(metadata=metadata)
+ item = _Item(metadata=metadata, coord=coord, dims=dims)
+ result.append(item)
+ return result
+
+ def test_metadata_same(self):
+ category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[])
+ # configure dim coords
+ pairs = [(sentinel.dim_metadata1, sentinel.dims1)]
+ category.items_dim.extend(self._create_items(pairs))
+ # configure aux coords
+ pairs = [
+ (sentinel.aux_metadata1, sentinel.dims2),
+ (sentinel.aux_metadata2, sentinel.dims3),
+ ]
+ category.items_aux.extend(self._create_items(pairs))
+ # configure scalar coords
+ pairs = [
+ (sentinel.scalar_metadata1, None),
+ (sentinel.scalar_metadata2, None),
+ (sentinel.scalar_metadata3, None),
+ ]
+ category.items_scalar.extend(self._create_items(pairs))
+
+ side_effect = (category, category)
+ mocker = self.patch(self.target, side_effect=side_effect)
+
+ resolve = Resolve()
+ self.assertIsNone(resolve.lhs_cube)
+ self.assertIsNone(resolve.rhs_cube)
+ self.assertIsNone(resolve.lhs_cube_category)
+ self.assertIsNone(resolve.rhs_cube_category)
+ self.assertIsNone(resolve.lhs_cube_category_local)
+ self.assertIsNone(resolve.rhs_cube_category_local)
+ self.assertIsNone(resolve.category_common)
+
+ # require to explicitly configure cubes
+ resolve.lhs_cube = self.m_lhs_cube
+ resolve.rhs_cube = self.m_rhs_cube
+ resolve._metadata_resolve()
+
+ self.assertEqual(mocker.call_count, 2)
+ calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)]
+ self.assertEqual(calls, mocker.call_args_list)
+
+ self.assertEqual(category, resolve.lhs_cube_category)
+ self.assertEqual(category, resolve.rhs_cube_category)
+ expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[])
+ self.assertEqual(expected, resolve.lhs_cube_category_local)
+ self.assertEqual(expected, resolve.rhs_cube_category_local)
+ self.assertEqual(category, resolve.category_common)
+
+ def test_metadata_overlap(self):
+ # configure the lhs cube category
+ category_lhs = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ # configure dim coords
+ pairs = [
+ (sentinel.dim_metadata1, sentinel.dims1),
+ (sentinel.dim_metadata2, sentinel.dims2),
+ ]
+ category_lhs.items_dim.extend(self._create_items(pairs))
+ # configure aux coords
+ pairs = [
+ (sentinel.aux_metadata1, sentinel.dims3),
+ (sentinel.aux_metadata2, sentinel.dims4),
+ ]
+ category_lhs.items_aux.extend(self._create_items(pairs))
+ # configure scalar coords
+ pairs = [
+ (sentinel.scalar_metadata1, None),
+ (sentinel.scalar_metadata2, None),
+ ]
+ category_lhs.items_scalar.extend(self._create_items(pairs))
+
+ # configure the rhs cube category
+ category_rhs = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ # configure dim coords
+ category_rhs.items_dim.append(category_lhs.items_dim[0])
+ pairs = [(sentinel.dim_metadata200, sentinel.dims2)]
+ category_rhs.items_dim.extend(self._create_items(pairs))
+ # configure aux coords
+ category_rhs.items_aux.append(category_lhs.items_aux[0])
+ pairs = [(sentinel.aux_metadata200, sentinel.dims4)]
+ category_rhs.items_aux.extend(self._create_items(pairs))
+ # configure scalar coords
+ category_rhs.items_scalar.append(category_lhs.items_scalar[0])
+ pairs = [(sentinel.scalar_metadata200, None)]
+ category_rhs.items_scalar.extend(self._create_items(pairs))
+
+ side_effect = (category_lhs, category_rhs)
+ mocker = self.patch(self.target, side_effect=side_effect)
+
+ resolve = Resolve()
+ self.assertIsNone(resolve.lhs_cube)
+ self.assertIsNone(resolve.rhs_cube)
+ self.assertIsNone(resolve.lhs_cube_category)
+ self.assertIsNone(resolve.rhs_cube_category)
+ self.assertIsNone(resolve.lhs_cube_category_local)
+ self.assertIsNone(resolve.rhs_cube_category_local)
+ self.assertIsNone(resolve.category_common)
+
+ # require to explicitly configure cubes
+ resolve.lhs_cube = self.m_lhs_cube
+ resolve.rhs_cube = self.m_rhs_cube
+ resolve._metadata_resolve()
+
+ self.assertEqual(2, mocker.call_count)
+ calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)]
+ self.assertEqual(calls, mocker.call_args_list)
+
+ self.assertEqual(category_lhs, resolve.lhs_cube_category)
+ self.assertEqual(category_rhs, resolve.rhs_cube_category)
+
+ items_dim = [category_lhs.items_dim[1]]
+ items_aux = [category_lhs.items_aux[1]]
+ items_scalar = [category_lhs.items_scalar[1]]
+ expected = _CategoryItems(
+ items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar
+ )
+ self.assertEqual(expected, resolve.lhs_cube_category_local)
+
+ items_dim = [category_rhs.items_dim[1]]
+ items_aux = [category_rhs.items_aux[1]]
+ items_scalar = [category_rhs.items_scalar[1]]
+ expected = _CategoryItems(
+ items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar
+ )
+ self.assertEqual(expected, resolve.rhs_cube_category_local)
+
+ items_dim = [category_lhs.items_dim[0]]
+ items_aux = [category_lhs.items_aux[0]]
+ items_scalar = [category_lhs.items_scalar[0]]
+ expected = _CategoryItems(
+ items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar
+ )
+ self.assertEqual(expected, resolve.category_common)
+
+ def test_metadata_different(self):
+ # configure the lhs cube category
+ category_lhs = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ # configure dim coords
+ pairs = [
+ (sentinel.dim_metadata1, sentinel.dims1),
+ (sentinel.dim_metadata2, sentinel.dims2),
+ ]
+ category_lhs.items_dim.extend(self._create_items(pairs))
+ # configure aux coords
+ pairs = [
+ (sentinel.aux_metadata1, sentinel.dims3),
+ (sentinel.aux_metadata2, sentinel.dims4),
+ ]
+ category_lhs.items_aux.extend(self._create_items(pairs))
+ # configure scalar coords
+ pairs = [
+ (sentinel.scalar_metadata1, None),
+ (sentinel.scalar_metadata2, None),
+ ]
+ category_lhs.items_scalar.extend(self._create_items(pairs))
+
+ # configure the rhs cube category
+ category_rhs = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ # configure dim coords
+ pairs = [
+ (sentinel.dim_metadata100, sentinel.dims1),
+ (sentinel.dim_metadata200, sentinel.dims2),
+ ]
+ category_rhs.items_dim.extend(self._create_items(pairs))
+ # configure aux coords
+ pairs = [
+ (sentinel.aux_metadata100, sentinel.dims3),
+ (sentinel.aux_metadata200, sentinel.dims4),
+ ]
+ category_rhs.items_aux.extend(self._create_items(pairs))
+ # configure scalar coords
+ pairs = [
+ (sentinel.scalar_metadata100, None),
+ (sentinel.scalar_metadata200, None),
+ ]
+ category_rhs.items_scalar.extend(self._create_items(pairs))
+
+ side_effect = (category_lhs, category_rhs)
+ mocker = self.patch(self.target, side_effect=side_effect)
+
+ resolve = Resolve()
+ self.assertIsNone(resolve.lhs_cube)
+ self.assertIsNone(resolve.rhs_cube)
+ self.assertIsNone(resolve.lhs_cube_category)
+ self.assertIsNone(resolve.rhs_cube_category)
+ self.assertIsNone(resolve.lhs_cube_category_local)
+ self.assertIsNone(resolve.rhs_cube_category_local)
+ self.assertIsNone(resolve.category_common)
+
+ # first require to explicitly lhs/rhs configure cubes
+ resolve.lhs_cube = self.m_lhs_cube
+ resolve.rhs_cube = self.m_rhs_cube
+ resolve._metadata_resolve()
+
+ self.assertEqual(2, mocker.call_count)
+ calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)]
+ self.assertEqual(calls, mocker.call_args_list)
+
+ self.assertEqual(category_lhs, resolve.lhs_cube_category)
+ self.assertEqual(category_rhs, resolve.rhs_cube_category)
+ self.assertEqual(category_lhs, resolve.lhs_cube_category_local)
+ self.assertEqual(category_rhs, resolve.rhs_cube_category_local)
+ expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[])
+ self.assertEqual(expected, resolve.category_common)
+
+
+class Test__dim_coverage(tests.IrisTest):
+ def setUp(self):
+ self.ndim = 4
+ self.cube = mock.Mock(ndim=self.ndim)
+ self.items = []
+ parts = [
+ (sentinel.metadata0, sentinel.coord0, (0,)),
+ (sentinel.metadata1, sentinel.coord1, (1,)),
+ (sentinel.metadata2, sentinel.coord2, (2,)),
+ (sentinel.metadata3, sentinel.coord3, (3,)),
+ ]
+ column_parts = [x for x in zip(*parts)]
+ self.metadata, self.coords, self.dims = [list(x) for x in column_parts]
+ self.dims = [dim for dim, in self.dims]
+ for metadata, coord, dims in parts:
+ item = _Item(metadata=metadata, coord=coord, dims=dims)
+ self.items.append(item)
+
+ def test_coverage_no_local_no_common_all_free(self):
+ items = []
+ common = []
+ result = Resolve._dim_coverage(self.cube, items, common)
+ self.assertIsInstance(result, _DimCoverage)
+ self.assertEqual(self.cube, result.cube)
+ expected = [None] * self.ndim
+ self.assertEqual(expected, result.metadata)
+ self.assertEqual(expected, result.coords)
+ self.assertEqual([], result.dims_common)
+ self.assertEqual([], result.dims_local)
+ expected = list(range(self.ndim))
+ self.assertEqual(expected, result.dims_free)
+
+ def test_coverage_all_local_no_common_no_free(self):
+ common = []
+ result = Resolve._dim_coverage(self.cube, self.items, common)
+ self.assertIsInstance(result, _DimCoverage)
+ self.assertEqual(self.cube, result.cube)
+ self.assertEqual(self.metadata, result.metadata)
+ self.assertEqual(self.coords, result.coords)
+ self.assertEqual([], result.dims_common)
+ self.assertEqual(self.dims, result.dims_local)
+ self.assertEqual([], result.dims_free)
+
+ def test_coverage_no_local_all_common_no_free(self):
+ result = Resolve._dim_coverage(self.cube, self.items, self.metadata)
+ self.assertIsInstance(result, _DimCoverage)
+ self.assertEqual(self.cube, result.cube)
+ self.assertEqual(self.metadata, result.metadata)
+ self.assertEqual(self.coords, result.coords)
+ self.assertEqual(self.dims, result.dims_common)
+ self.assertEqual([], result.dims_local)
+ self.assertEqual([], result.dims_free)
+
+ def test_coverage_mixed(self):
+ common = [self.items[1].metadata, self.items[2].metadata]
+ self.items.pop(0)
+ self.items.pop(-1)
+ metadata, coord, dims = sentinel.metadata100, sentinel.coord100, (0,)
+ self.items.append(_Item(metadata=metadata, coord=coord, dims=dims))
+ result = Resolve._dim_coverage(self.cube, self.items, common)
+ self.assertIsInstance(result, _DimCoverage)
+ self.assertEqual(self.cube, result.cube)
+ expected = [
+ metadata,
+ self.items[0].metadata,
+ self.items[1].metadata,
+ None,
+ ]
+ self.assertEqual(expected, result.metadata)
+ expected = [coord, self.items[0].coord, self.items[1].coord, None]
+ self.assertEqual(expected, result.coords)
+ self.assertEqual([1, 2], result.dims_common)
+ self.assertEqual([0], result.dims_local)
+ self.assertEqual([3], result.dims_free)
+
+
+class Test__aux_coverage(tests.IrisTest):
+ def setUp(self):
+ self.ndim = 4
+ self.cube = mock.Mock(ndim=self.ndim)
+ # configure aux coords
+ self.items_aux = []
+ aux_parts = [
+ (sentinel.aux_metadata0, sentinel.aux_coord0, (0,)),
+ (sentinel.aux_metadata1, sentinel.aux_coord1, (1,)),
+ (sentinel.aux_metadata23, sentinel.aux_coord23, (2, 3)),
+ ]
+ column_aux_parts = [x for x in zip(*aux_parts)]
+ self.aux_metadata, self.aux_coords, self.aux_dims = [
+ list(x) for x in column_aux_parts
+ ]
+ for metadata, coord, dims in aux_parts:
+ item = _Item(metadata=metadata, coord=coord, dims=dims)
+ self.items_aux.append(item)
+ # configure scalar coords
+ self.items_scalar = []
+ scalar_parts = [
+ (sentinel.scalar_metadata0, sentinel.scalar_coord0, ()),
+ (sentinel.scalar_metadata1, sentinel.scalar_coord1, ()),
+ (sentinel.scalar_metadata2, sentinel.scalar_coord2, ()),
+ ]
+ column_scalar_parts = [x for x in zip(*scalar_parts)]
+ self.scalar_metadata, self.scalar_coords, self.scalar_dims = [
+ list(x) for x in column_scalar_parts
+ ]
+ for metadata, coord, dims in scalar_parts:
+ item = _Item(metadata=metadata, coord=coord, dims=dims)
+ self.items_scalar.append(item)
+
+ def test_coverage_no_local_no_common_all_free(self):
+ items_aux, items_scalar = [], []
+ common_aux, common_scalar = [], []
+ result = Resolve._aux_coverage(
+ self.cube, items_aux, items_scalar, common_aux, common_scalar
+ )
+ self.assertIsInstance(result, _AuxCoverage)
+ self.assertEqual(self.cube, result.cube)
+ self.assertEqual([], result.common_items_aux)
+ self.assertEqual([], result.common_items_scalar)
+ self.assertEqual([], result.local_items_aux)
+ self.assertEqual([], result.local_items_scalar)
+ self.assertEqual([], result.dims_common)
+ self.assertEqual([], result.dims_local)
+ expected = list(range(self.ndim))
+ self.assertEqual(expected, result.dims_free)
+
+ def test_coverage_all_local_no_common_no_free(self):
+ common_aux, common_scalar = [], []
+ result = Resolve._aux_coverage(
+ self.cube,
+ self.items_aux,
+ self.items_scalar,
+ common_aux,
+ common_scalar,
+ )
+ self.assertIsInstance(result, _AuxCoverage)
+ self.assertEqual(self.cube, result.cube)
+ expected = []
+ self.assertEqual(expected, result.common_items_aux)
+ self.assertEqual(expected, result.common_items_scalar)
+ self.assertEqual(self.items_aux, result.local_items_aux)
+ self.assertEqual(self.items_scalar, result.local_items_scalar)
+ self.assertEqual([], result.dims_common)
+ expected = list(range(self.ndim))
+ self.assertEqual(expected, result.dims_local)
+ self.assertEqual([], result.dims_free)
+
+ def test_coverage_no_local_all_common_no_free(self):
+ result = Resolve._aux_coverage(
+ self.cube,
+ self.items_aux,
+ self.items_scalar,
+ self.aux_metadata,
+ self.scalar_metadata,
+ )
+ self.assertIsInstance(result, _AuxCoverage)
+ self.assertEqual(self.cube, result.cube)
+ self.assertEqual(self.items_aux, result.common_items_aux)
+ self.assertEqual(self.items_scalar, result.common_items_scalar)
+ self.assertEqual([], result.local_items_aux)
+ self.assertEqual([], result.local_items_scalar)
+ expected = list(range(self.ndim))
+ self.assertEqual(expected, result.dims_common)
+ self.assertEqual([], result.dims_local)
+ self.assertEqual([], result.dims_free)
+
+ def test_coverage_mixed(self):
+ common_aux = [self.items_aux[-1].metadata]
+ common_scalar = [self.items_scalar[1].metadata]
+ self.items_aux.pop(1)
+ result = Resolve._aux_coverage(
+ self.cube,
+ self.items_aux,
+ self.items_scalar,
+ common_aux,
+ common_scalar,
+ )
+ self.assertIsInstance(result, _AuxCoverage)
+ self.assertEqual(self.cube, result.cube)
+ expected = [self.items_aux[-1]]
+ self.assertEqual(expected, result.common_items_aux)
+ expected = [self.items_scalar[1]]
+ self.assertEqual(expected, result.common_items_scalar)
+ expected = [self.items_aux[0]]
+ self.assertEqual(expected, result.local_items_aux)
+ expected = [self.items_scalar[0], self.items_scalar[2]]
+ self.assertEqual(expected, result.local_items_scalar)
+ self.assertEqual([2, 3], result.dims_common)
+ self.assertEqual([0], result.dims_local)
+ self.assertEqual([1], result.dims_free)
+
+
+class Test__metadata_coverage(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+ self.m_lhs_cube = sentinel.lhs_cube
+ self.resolve.lhs_cube = self.m_lhs_cube
+ self.m_rhs_cube = sentinel.rhs_cube
+ self.resolve.rhs_cube = self.m_rhs_cube
+ self.m_items_dim_metadata = sentinel.items_dim_metadata
+ self.m_items_aux_metadata = sentinel.items_aux_metadata
+ self.m_items_scalar_metadata = sentinel.items_scalar_metadata
+ items_dim = [mock.Mock(metadata=self.m_items_dim_metadata)]
+ items_aux = [mock.Mock(metadata=self.m_items_aux_metadata)]
+ items_scalar = [mock.Mock(metadata=self.m_items_scalar_metadata)]
+ category = _CategoryItems(
+ items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar
+ )
+ self.resolve.category_common = category
+ self.m_items_dim = sentinel.items_dim
+ self.m_items_aux = sentinel.items_aux
+ self.m_items_scalar = sentinel.items_scalar
+ category = _CategoryItems(
+ items_dim=self.m_items_dim,
+ items_aux=self.m_items_aux,
+ items_scalar=self.m_items_scalar,
+ )
+ self.resolve.lhs_cube_category = category
+ self.resolve.rhs_cube_category = category
+ target = "iris.common.resolve.Resolve._dim_coverage"
+ self.m_lhs_cube_dim_coverage = sentinel.lhs_cube_dim_coverage
+ self.m_rhs_cube_dim_coverage = sentinel.rhs_cube_dim_coverage
+ side_effect = (
+ self.m_lhs_cube_dim_coverage,
+ self.m_rhs_cube_dim_coverage,
+ )
+ self.mocker_dim_coverage = self.patch(target, side_effect=side_effect)
+ target = "iris.common.resolve.Resolve._aux_coverage"
+ self.m_lhs_cube_aux_coverage = sentinel.lhs_cube_aux_coverage
+ self.m_rhs_cube_aux_coverage = sentinel.rhs_cube_aux_coverage
+ side_effect = (
+ self.m_lhs_cube_aux_coverage,
+ self.m_rhs_cube_aux_coverage,
+ )
+ self.mocker_aux_coverage = self.patch(target, side_effect=side_effect)
+
+ def test(self):
+ self.resolve._metadata_coverage()
+ self.assertEqual(2, self.mocker_dim_coverage.call_count)
+ calls = [
+ mock.call(
+ self.m_lhs_cube, self.m_items_dim, [self.m_items_dim_metadata]
+ ),
+ mock.call(
+ self.m_rhs_cube, self.m_items_dim, [self.m_items_dim_metadata]
+ ),
+ ]
+ self.assertEqual(calls, self.mocker_dim_coverage.call_args_list)
+ self.assertEqual(2, self.mocker_aux_coverage.call_count)
+ calls = [
+ mock.call(
+ self.m_lhs_cube,
+ self.m_items_aux,
+ self.m_items_scalar,
+ [self.m_items_aux_metadata],
+ [self.m_items_scalar_metadata],
+ ),
+ mock.call(
+ self.m_rhs_cube,
+ self.m_items_aux,
+ self.m_items_scalar,
+ [self.m_items_aux_metadata],
+ [self.m_items_scalar_metadata],
+ ),
+ ]
+ self.assertEqual(calls, self.mocker_aux_coverage.call_args_list)
+ self.assertEqual(
+ self.m_lhs_cube_dim_coverage, self.resolve.lhs_cube_dim_coverage
+ )
+ self.assertEqual(
+ self.m_rhs_cube_dim_coverage, self.resolve.rhs_cube_dim_coverage
+ )
+ self.assertEqual(
+ self.m_lhs_cube_aux_coverage, self.resolve.lhs_cube_aux_coverage
+ )
+ self.assertEqual(
+ self.m_rhs_cube_aux_coverage, self.resolve.rhs_cube_aux_coverage
+ )
+
+
+class Test__dim_mapping(tests.IrisTest):
+ def setUp(self):
+ self.ndim = 3
+ Wrapper = namedtuple("Wrapper", ("name",))
+ cube = Wrapper(name=lambda: sentinel.name)
+ self.src_coverage = _DimCoverage(
+ cube=cube,
+ metadata=[],
+ coords=None,
+ dims_common=None,
+ dims_local=None,
+ dims_free=None,
+ )
+ self.tgt_coverage = _DimCoverage(
+ cube=cube,
+ metadata=[],
+ coords=None,
+ dims_common=[],
+ dims_local=None,
+ dims_free=None,
+ )
+ self.metadata = [
+ sentinel.metadata_0,
+ sentinel.metadata_1,
+ sentinel.metadata_2,
+ ]
+ self.dummy = [sentinel.dummy_0, sentinel.dummy_1, sentinel.dummy_2]
+
+ def test_no_mapping(self):
+ self.src_coverage.metadata.extend(self.metadata)
+ self.tgt_coverage.metadata.extend(self.dummy)
+ result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage)
+ self.assertEqual(dict(), result)
+
+ def test_full_mapping(self):
+ self.src_coverage.metadata.extend(self.metadata)
+ self.tgt_coverage.metadata.extend(self.metadata)
+ dims_common = list(range(self.ndim))
+ self.tgt_coverage.dims_common.extend(dims_common)
+ result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 0, 1: 1, 2: 2}
+ self.assertEqual(expected, result)
+
+ def test_transpose_mapping(self):
+ self.src_coverage.metadata.extend(self.metadata[::-1])
+ self.tgt_coverage.metadata.extend(self.metadata)
+ dims_common = list(range(self.ndim))
+ self.tgt_coverage.dims_common.extend(dims_common)
+ result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 2, 1: 1, 2: 0}
+ self.assertEqual(expected, result)
+
+ def test_partial_mapping__transposed(self):
+ self.src_coverage.metadata.extend(self.metadata)
+ self.metadata[1] = sentinel.nope
+ self.tgt_coverage.metadata.extend(self.metadata[::-1])
+ dims_common = [0, 2]
+ self.tgt_coverage.dims_common.extend(dims_common)
+ result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 2, 2: 0}
+ self.assertEqual(expected, result)
+
+ def test_bad_metadata_mapping(self):
+ self.src_coverage.metadata.extend(self.metadata)
+ self.metadata[0] = sentinel.bad
+ self.tgt_coverage.metadata.extend(self.metadata)
+ dims_common = [0]
+ self.tgt_coverage.dims_common.extend(dims_common)
+ emsg = "Failed to map common dim coordinate metadata"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage)
+
+
+class Test__aux_mapping(tests.IrisTest):
+ def setUp(self):
+ self.ndim = 3
+ Wrapper = namedtuple("Wrapper", ("name",))
+ cube = Wrapper(name=lambda: sentinel.name)
+ self.src_coverage = _AuxCoverage(
+ cube=cube,
+ common_items_aux=[],
+ common_items_scalar=None,
+ local_items_aux=None,
+ local_items_scalar=None,
+ dims_common=None,
+ dims_local=None,
+ dims_free=None,
+ )
+ self.tgt_coverage = _AuxCoverage(
+ cube=cube,
+ common_items_aux=[],
+ common_items_scalar=None,
+ local_items_aux=None,
+ local_items_scalar=None,
+ dims_common=None,
+ dims_local=None,
+ dims_free=None,
+ )
+ self.items = [
+ _Item(
+ metadata=sentinel.metadata0, coord=sentinel.coord0, dims=[0]
+ ),
+ _Item(
+ metadata=sentinel.metadata1, coord=sentinel.coord1, dims=[1]
+ ),
+ _Item(
+ metadata=sentinel.metadata2, coord=sentinel.coord2, dims=[2]
+ ),
+ ]
+
+ def _copy(self, items):
+ # Due to a bug in python 3.6.x, performing a deepcopy of a mock.sentinel
+ # will yield an object that is not equivalent to its parent, so this
+ # is a work-around until we drop support for python 3.6.x.
+ import sys
+
+ version = sys.version_info
+ major, minor = version.major, version.minor
+ result = deepcopy(items)
+ if major == 3 and minor <= 6:
+ for i, item in enumerate(items):
+ result[i] = result[i]._replace(metadata=item.metadata)
+ return result
+
+ def test_no_mapping(self):
+ result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+ self.assertEqual(dict(), result)
+
+ def test_full_mapping(self):
+ self.src_coverage.common_items_aux.extend(self.items)
+ self.tgt_coverage.common_items_aux.extend(self.items)
+ result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 0, 1: 1, 2: 2}
+ self.assertEqual(expected, result)
+
+ def test_transpose_mapping(self):
+ self.src_coverage.common_items_aux.extend(self.items)
+ items = self._copy(self.items)
+ items[0].dims[0] = 2
+ items[2].dims[0] = 0
+ self.tgt_coverage.common_items_aux.extend(items)
+ result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 2, 1: 1, 2: 0}
+ self.assertEqual(expected, result)
+
+ def test_partial_mapping__transposed(self):
+ _ = self.items.pop(1)
+ self.src_coverage.common_items_aux.extend(self.items)
+ items = self._copy(self.items)
+ items[0].dims[0] = 2
+ items[1].dims[0] = 0
+ self.tgt_coverage.common_items_aux.extend(items)
+ result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 2, 2: 0}
+ self.assertEqual(expected, result)
+
+ def test_mapping__match_multiple_src_metadata(self):
+ items = self._copy(self.items)
+ _ = self.items.pop(1)
+ self.src_coverage.common_items_aux.extend(self.items)
+ items[1] = items[0]
+ self.tgt_coverage.common_items_aux.extend(items)
+ result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 0, 2: 2}
+ self.assertEqual(expected, result)
+
+ def test_mapping__skip_match_multiple_src_metadata(self):
+ items = self._copy(self.items)
+ _ = self.items.pop(1)
+ self.tgt_coverage.common_items_aux.extend(self.items)
+ items[1] = items[0]._replace(dims=[1])
+ self.src_coverage.common_items_aux.extend(items)
+ result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {2: 2}
+ self.assertEqual(expected, result)
+
+ def test_mapping__skip_different_rank(self):
+ items = self._copy(self.items)
+ self.src_coverage.common_items_aux.extend(self.items)
+ items[2] = items[2]._replace(dims=[1, 2])
+ self.tgt_coverage.common_items_aux.extend(items)
+ result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+ expected = {0: 0, 1: 1}
+ self.assertEqual(expected, result)
+
+ def test_bad_metadata_mapping(self):
+ self.src_coverage.common_items_aux.extend(self.items)
+ items = self._copy(self.items)
+ items[0] = items[0]._replace(metadata=sentinel.bad)
+ self.tgt_coverage.common_items_aux.extend(items)
+ emsg = "Failed to map common aux coordinate metadata"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage)
+
+
+class Test_mapped(tests.IrisTest):
+ def test_mapping_none(self):
+ resolve = Resolve()
+ self.assertIsNone(resolve.mapping)
+ self.assertIsNone(resolve.mapped)
+
+ def test_mapped__src_cube_lhs(self):
+ resolve = Resolve()
+ lhs = mock.Mock(ndim=2)
+ rhs = mock.Mock(ndim=3)
+ resolve.lhs_cube = lhs
+ resolve.rhs_cube = rhs
+ resolve.map_rhs_to_lhs = False
+ resolve.mapping = {0: 0, 1: 1}
+ self.assertTrue(resolve.mapped)
+
+ def test_mapped__src_cube_rhs(self):
+ resolve = Resolve()
+ lhs = mock.Mock(ndim=3)
+ rhs = mock.Mock(ndim=2)
+ resolve.lhs_cube = lhs
+ resolve.rhs_cube = rhs
+ resolve.map_rhs_to_lhs = True
+ resolve.mapping = {0: 0, 1: 1}
+ self.assertTrue(resolve.mapped)
+
+ def test_partial_mapping(self):
+ resolve = Resolve()
+ lhs = mock.Mock(ndim=3)
+ rhs = mock.Mock(ndim=2)
+ resolve.lhs_cube = lhs
+ resolve.rhs_cube = rhs
+ resolve.map_rhs_to_lhs = True
+ resolve.mapping = {0: 0}
+ self.assertFalse(resolve.mapped)
+
+
+class Test__free_mapping(tests.IrisTest):
+ def setUp(self):
+ self.Cube = namedtuple("Wrapper", ("name", "ndim", "shape"))
+ self.src_dim_coverage = dict(
+ cube=None,
+ metadata=None,
+ coords=None,
+ dims_common=None,
+ dims_local=None,
+ dims_free=[],
+ )
+ self.tgt_dim_coverage = deepcopy(self.src_dim_coverage)
+ self.src_aux_coverage = dict(
+ cube=None,
+ common_items_aux=None,
+ common_items_scalar=None,
+ local_items_aux=None,
+ local_items_scalar=None,
+ dims_common=None,
+ dims_local=None,
+ dims_free=[],
+ )
+ self.tgt_aux_coverage = deepcopy(self.src_aux_coverage)
+ self.resolve = Resolve()
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.mapping = {}
+
+ def _make_args(self):
+ args = dict(
+ src_dim_coverage=_DimCoverage(**self.src_dim_coverage),
+ tgt_dim_coverage=_DimCoverage(**self.tgt_dim_coverage),
+ src_aux_coverage=_AuxCoverage(**self.src_aux_coverage),
+ tgt_aux_coverage=_AuxCoverage(**self.tgt_aux_coverage),
+ )
+ return args
+
+ def test_mapping_no_dims_free(self):
+ ndim = 4
+ shape = tuple(range(ndim))
+ cube = self.Cube(name=lambda: "name", ndim=ndim, shape=shape)
+ self.src_dim_coverage["cube"] = cube
+ self.tgt_dim_coverage["cube"] = cube
+ args = self._make_args()
+ emsg = "Insufficient matching coordinate metadata"
+ with self.assertRaisesRegex(ValueError, emsg):
+ self.resolve._free_mapping(**args)
+
+ def _make_coverage(self, name, shape, dims_free):
+ if name == "src":
+ dim_coverage = self.src_dim_coverage
+ aux_coverage = self.src_aux_coverage
+ else:
+ dim_coverage = self.tgt_dim_coverage
+ aux_coverage = self.tgt_aux_coverage
+ ndim = len(shape)
+ cube = self.Cube(name=lambda: name, ndim=ndim, shape=shape)
+ dim_coverage["cube"] = cube
+ dim_coverage["dims_free"].extend(dims_free)
+ aux_coverage["cube"] = cube
+ aux_coverage["dims_free"].extend(dims_free)
+
+ def test_mapping_src_free_to_tgt_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 2 3 4
+ # state f l c l state f c f
+ # coord d d d a coord a d d
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->3 1->2 2->1
+ src_shape = (2, 3, 4)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 3, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt_local__broadcast_src_first(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 1 3 4
+ # state f l c l state f c f
+ # coord d d d a coord a d d
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->3 1->2 2->1
+ src_shape = (1, 3, 4)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 3, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt_local__broadcast_src_last(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 2 3 1
+ # state f l c l state f c f
+ # coord d d d a coord a d d
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->3 1->2 2->1
+ src_shape = (2, 3, 1)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 3, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt_local__broadcast_src_both(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 1 3 1
+ # state f l c l state f c f
+ # coord d d d a coord a d d
+ # bcast ^ ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->1 1->2 2->3
+ src_shape = (1, 3, 1)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 1, 1: 2, 2: 3}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt_free(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 2 3 4
+ # state f f c f state f c f
+ # coord d d d a coord a d d
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->0 1->2 2->1
+ src_shape = (2, 3, 4)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0, 1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 0, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt_free__broadcast_src_first(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 1 3 4
+ # state f f c f state f c f
+ # coord d d d a coord a d d
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->0 1->2 2->1
+ src_shape = (1, 3, 4)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0, 1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 0, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt_free__broadcast_src_last(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 2 3 1
+ # state f f c f state f c f
+ # coord d d d a coord a d d
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->0 1->2 2->1
+ src_shape = (2, 3, 1)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0, 1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 0, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt_free__broadcast_src_both(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 1 3 1
+ # state f f c f state f c f
+ # coord d d d a coord a d d
+ # bcast ^ ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->0 1->2 2->1
+ src_shape = (1, 3, 1)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0, 1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 0, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_src_free_to_tgt__fail(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 2 3 5
+ # state f f c f state f c f
+ # coord d d d a coord a d d
+ # fail ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->0 1->2 2->?
+ src_shape = (2, 3, 5)
+ src_free = [0, 2]
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [0, 1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ emsg = "Insufficient matching coordinate metadata to resolve cubes"
+ with self.assertRaisesRegex(ValueError, emsg):
+ self.resolve._free_mapping(**args)
+
+ def test_mapping_tgt_free_to_src_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: -> src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 2 3 4
+ # state l f c f state l c l
+ # coord d d d a coord a d d
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->3 1->2 2->1
+ src_shape = (2, 3, 4)
+ src_free = []
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 2)
+ tgt_free = [1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 3, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_tgt_free_to_src_local__broadcast_tgt_first(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: -> src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 1 3 2 shape 2 3 4
+ # state l f c f state l c l
+ # coord d d d a coord a d d
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->3 1->2 2->1
+ src_shape = (2, 3, 4)
+ src_free = []
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 1, 3, 2)
+ tgt_free = [1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 3, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_tgt_free_to_src_local__broadcast_tgt_last(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: -> src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 1 shape 2 3 4
+ # state l f c f state l c l
+ # coord d d d a coord a d d
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->3 1->2 2->1
+ src_shape = (2, 3, 4)
+ src_free = []
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 1)
+ tgt_free = [1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 3, 1: 2, 2: 1}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_tgt_free_to_src_local__broadcast_tgt_both(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: -> src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 1 3 1 shape 2 3 4
+ # state l f c f state l c l
+ # coord d d d a coord a d d
+ # bcast ^ ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->1 1->2 2->3
+ src_shape = (2, 3, 4)
+ src_free = []
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 1, 3, 1)
+ tgt_free = [1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ self.resolve._free_mapping(**args)
+ expected = {0: 1, 1: 2, 2: 3}
+ self.assertEqual(expected, self.resolve.mapping)
+
+ def test_mapping_tgt_free_to_src_no_free__fail(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: -> src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 5 shape 2 3 4
+ # state l f c f state l c l
+ # coord d d d a coord a d d
+ # fail ^
+ #
+ # src-to-tgt mapping:
+ # before 1->2
+ # after 0->0 1->2 2->?
+ src_shape = (2, 3, 4)
+ src_free = []
+ self._make_coverage("src", src_shape, src_free)
+ tgt_shape = (2, 4, 3, 5)
+ tgt_free = [1, 3]
+ self._make_coverage("tgt", tgt_shape, tgt_free)
+ self.resolve.mapping = {1: 2}
+ args = self._make_args()
+ emsg = "Insufficient matching coordinate metadata to resolve cubes"
+ with self.assertRaisesRegex(ValueError, emsg):
+ self.resolve._free_mapping(**args)
+
+
+class Test__src_cube(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+ self.expected = sentinel.cube
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.rhs_cube = self.expected
+ self.assertEqual(self.expected, self.resolve._src_cube)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.resolve.lhs_cube = self.expected
+ self.assertEqual(self.expected, self.resolve._src_cube)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._src_cube
+
+
+class Test__src_cube_position(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.assertEqual("RHS", self.resolve._src_cube_position)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.assertEqual("LHS", self.resolve._src_cube_position)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._src_cube_position
+
+
+class Test__src_cube_resolved__getter(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+ self.expected = sentinel.cube
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.rhs_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve._src_cube_resolved)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.resolve.lhs_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve._src_cube_resolved)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._src_cube_resolved
+
+
+class Test__src_cube_resolved__setter(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+ self.expected = sentinel.cube
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve._src_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve.rhs_cube_resolved)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.resolve._src_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve.lhs_cube_resolved)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._src_cube_resolved = self.expected
+
+
+class Test__tgt_cube(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+ self.expected = sentinel.cube
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.resolve.rhs_cube = self.expected
+ self.assertEqual(self.expected, self.resolve._tgt_cube)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.lhs_cube = self.expected
+ self.assertEqual(self.expected, self.resolve._tgt_cube)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._tgt_cube
+
+
+class Test__tgt_cube_position(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.assertEqual("RHS", self.resolve._tgt_cube_position)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.assertEqual("LHS", self.resolve._tgt_cube_position)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._tgt_cube_position
+
+
+class Test__tgt_cube_resolved__getter(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+ self.expected = sentinel.cube
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.resolve.rhs_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve._tgt_cube_resolved)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.lhs_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve._tgt_cube_resolved)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._tgt_cube_resolved
+
+
+class Test__tgt_cube_resolved__setter(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+ self.expected = sentinel.cube
+
+ def test_rhs_cube(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.resolve._tgt_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve.rhs_cube_resolved)
+
+ def test_lhs_cube(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve._tgt_cube_resolved = self.expected
+ self.assertEqual(self.expected, self.resolve.lhs_cube_resolved)
+
+ def test_fail__no_map_rhs_to_lhs(self):
+ with self.assertRaises(AssertionError):
+ self.resolve._tgt_cube_resolved = self.expected
+
+
+class Test_shape(tests.IrisTest):
+ def setUp(self):
+ self.resolve = Resolve()
+
+ def test_no_shape(self):
+ self.assertIsNone(self.resolve.shape)
+
+ def test_shape(self):
+ expected = sentinel.shape
+ self.resolve._broadcast_shape = expected
+ self.assertEqual(expected, self.resolve.shape)
+
+
+class Test__as_compatible_cubes(tests.IrisTest):
+ def setUp(self):
+ self.Cube = namedtuple(
+ "Wrapper",
+ (
+ "name",
+ "ndim",
+ "shape",
+ "metadata",
+ "core_data",
+ "coord_dims",
+ "dim_coords",
+ "aux_coords",
+ "aux_factories",
+ ),
+ )
+ self.resolve = Resolve()
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.mapping = {}
+ self.mocker = self.patch("iris.cube.Cube")
+ self.args = dict(
+ name=None,
+ ndim=None,
+ shape=None,
+ metadata=None,
+ core_data=None,
+ coord_dims=None,
+ dim_coords=None,
+ aux_coords=None,
+ aux_factories=None,
+ )
+
+ def _make_cube(self, name, shape, transpose_shape=None):
+ self.args["name"] = lambda: name
+ ndim = len(shape)
+ self.args["ndim"] = ndim
+ self.args["shape"] = shape
+ if name == "src":
+ self.args["metadata"] = sentinel.metadata
+ self.reshape = sentinel.reshape
+ m_reshape = mock.Mock(return_value=self.reshape)
+ self.transpose = mock.Mock(
+ shape=transpose_shape, reshape=m_reshape
+ )
+ m_transpose = mock.Mock(return_value=self.transpose)
+ self.data = mock.Mock(
+ shape=shape, transpose=m_transpose, reshape=m_reshape
+ )
+ m_copy = mock.Mock(return_value=self.data)
+ m_core_data = mock.Mock(copy=m_copy)
+ self.args["core_data"] = mock.Mock(return_value=m_core_data)
+ self.args["coord_dims"] = mock.Mock(side_effect=([0], [ndim - 1]))
+ self.dim_coord = sentinel.dim_coord
+ self.aux_coord = sentinel.aux_coord
+ self.aux_factory = sentinel.aux_factory
+ self.args["dim_coords"] = [self.dim_coord]
+ self.args["aux_coords"] = [self.aux_coord]
+ self.args["aux_factories"] = [self.aux_factory]
+ cube = self.Cube(**self.args)
+ self.resolve.rhs_cube = cube
+ self.cube = mock.Mock()
+ self.mocker.return_value = self.cube
+ else:
+ cube = self.Cube(**self.args)
+ self.resolve.lhs_cube = cube
+
+ def test_incomplete_src_to_tgt_mapping__fail(self):
+ src_shape = (1, 2)
+ self._make_cube("src", src_shape)
+ tgt_shape = (3, 4)
+ self._make_cube("tgt", tgt_shape)
+ with self.assertRaises(AssertionError):
+ self.resolve._as_compatible_cubes()
+
+ def test_incompatible_shapes__fail(self):
+ # key: (state) c=common, f=free
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 2 3 4 shape 2 3 5
+ # state f c c c state c c c
+ # fail ^ fail ^
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ src_shape = (2, 3, 5)
+ self._make_cube("src", src_shape)
+ tgt_shape = (2, 2, 3, 4)
+ self._make_cube("tgt", tgt_shape)
+ self.resolve.mapping = {0: 1, 1: 2, 2: 3}
+ emsg = "Cannot resolve cubes"
+ with self.assertRaisesRegex(ValueError, emsg):
+ self.resolve._as_compatible_cubes()
+
+ def test_incompatible_shapes__fail_broadcast(self):
+ # key: (state) c=common, f=free
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 2 4 3 2 shape 2 3 5
+ # state f c c c state c c c
+ # fail ^ fail ^
+ #
+ # src-to-tgt mapping:
+ # 0->3, 1->2, 2->1
+ src_shape = (2, 3, 5)
+ self._make_cube("src", src_shape)
+ tgt_shape = (2, 4, 3, 2)
+ self._make_cube("tgt", tgt_shape)
+ self.resolve.mapping = {0: 3, 1: 2, 2: 1}
+ emsg = "Cannot resolve cubes"
+ with self.assertRaisesRegex(ValueError, emsg):
+ self.resolve._as_compatible_cubes()
+
+ def _check_compatible(self, broadcast_shape):
+ self.assertEqual(
+ self.resolve.lhs_cube, self.resolve._tgt_cube_resolved
+ )
+ self.assertEqual(self.cube, self.resolve._src_cube_resolved)
+ self.assertEqual(broadcast_shape, self.resolve._broadcast_shape)
+ self.assertEqual(1, self.mocker.call_count)
+ self.assertEqual(self.args["metadata"], self.cube.metadata)
+ self.assertEqual(2, self.resolve.rhs_cube.coord_dims.call_count)
+ self.assertEqual(
+ [mock.call(self.dim_coord), mock.call(self.aux_coord)],
+ self.resolve.rhs_cube.coord_dims.call_args_list,
+ )
+ self.assertEqual(1, self.cube.add_dim_coord.call_count)
+ self.assertEqual(
+ [mock.call(self.dim_coord, [self.resolve.mapping[0]])],
+ self.cube.add_dim_coord.call_args_list,
+ )
+ self.assertEqual(1, self.cube.add_aux_coord.call_count)
+ self.assertEqual(
+ [mock.call(self.aux_coord, [self.resolve.mapping[2]])],
+ self.cube.add_aux_coord.call_args_list,
+ )
+ self.assertEqual(1, self.cube.add_aux_factory.call_count)
+ self.assertEqual(
+ [mock.call(self.aux_factory)],
+ self.cube.add_aux_factory.call_args_list,
+ )
+
+ def test_compatible(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1 2
+ # shape 4 3 2 shape 4 3 2
+ # state c c c state c c c
+ # coord d a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1, 2->2
+ src_shape = (4, 3, 2)
+ self._make_cube("src", src_shape)
+ tgt_shape = (4, 3, 2)
+ self._make_cube("tgt", tgt_shape)
+ mapping = {0: 0, 1: 1, 2: 2}
+ self.resolve.mapping = mapping
+ self.resolve._as_compatible_cubes()
+ self._check_compatible(broadcast_shape=tgt_shape)
+ self.assertEqual([mock.call(self.data)], self.mocker.call_args_list)
+
+ def test_compatible__transpose(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1 2
+ # shape 4 3 2 shape 2 3 4
+ # state c c c state c c c
+ # coord d a
+ #
+ # src-to-tgt mapping:
+ # 0->2, 1->1, 2->0
+ src_shape = (2, 3, 4)
+ self._make_cube("src", src_shape, transpose_shape=(4, 3, 2))
+ tgt_shape = (4, 3, 2)
+ self._make_cube("tgt", tgt_shape)
+ mapping = {0: 2, 1: 1, 2: 0}
+ self.resolve.mapping = mapping
+ self.resolve._as_compatible_cubes()
+ self._check_compatible(broadcast_shape=tgt_shape)
+ self.assertEqual(1, self.data.transpose.call_count)
+ self.assertEqual(
+ [mock.call([2, 1, 0])], self.data.transpose.call_args_list
+ )
+ self.assertEqual(
+ [mock.call(self.transpose)], self.mocker.call_args_list
+ )
+
+ def test_compatible__reshape(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 4 3 2
+ # state f c c c state c c c
+ # coord d a
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ src_shape = (4, 3, 2)
+ self._make_cube("src", src_shape)
+ tgt_shape = (5, 4, 3, 2)
+ self._make_cube("tgt", tgt_shape)
+ mapping = {0: 1, 1: 2, 2: 3}
+ self.resolve.mapping = mapping
+ self.resolve._as_compatible_cubes()
+ self._check_compatible(broadcast_shape=tgt_shape)
+ self.assertEqual(1, self.data.reshape.call_count)
+ self.assertEqual(
+ [mock.call((1,) + src_shape)], self.data.reshape.call_args_list
+ )
+ self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list)
+
+ def test_compatible__transpose_reshape(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 2 3 4
+ # state f c c c state c c c
+ # coord d a
+ #
+ # src-to-tgt mapping:
+ # 0->3, 1->2, 2->1
+ src_shape = (2, 3, 4)
+ transpose_shape = (4, 3, 2)
+ self._make_cube("src", src_shape, transpose_shape=transpose_shape)
+ tgt_shape = (5, 4, 3, 2)
+ self._make_cube("tgt", tgt_shape)
+ mapping = {0: 3, 1: 2, 2: 1}
+ self.resolve.mapping = mapping
+ self.resolve._as_compatible_cubes()
+ self._check_compatible(broadcast_shape=tgt_shape)
+ self.assertEqual(1, self.data.transpose.call_count)
+ self.assertEqual(
+ [mock.call([2, 1, 0])], self.data.transpose.call_args_list
+ )
+ self.assertEqual(1, self.data.reshape.call_count)
+ self.assertEqual(
+ [mock.call((1,) + transpose_shape)],
+ self.data.reshape.call_args_list,
+ )
+ self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list)
+
+ def test_compatible__broadcast(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1 2
+ # shape 1 3 2 shape 4 1 2
+ # state c c c state c c c
+ # coord d a
+ # bcast ^ bcast ^
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1, 2->2
+ src_shape = (4, 1, 2)
+ self._make_cube("src", src_shape)
+ tgt_shape = (1, 3, 2)
+ self._make_cube("tgt", tgt_shape)
+ mapping = {0: 0, 1: 1, 2: 2}
+ self.resolve.mapping = mapping
+ self.resolve._as_compatible_cubes()
+ self._check_compatible(broadcast_shape=(4, 3, 2))
+ self.assertEqual([mock.call(self.data)], self.mocker.call_args_list)
+
+ def test_compatible__broadcast_transpose_reshape(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 1 3 2 shape 2 1 4
+ # state f c c c state c c c
+ # coord d a
+ # bcast ^ bcast ^
+ #
+ # src-to-tgt mapping:
+ # 0->3, 1->2, 2->1
+ src_shape = (2, 1, 4)
+ transpose_shape = (4, 1, 2)
+ self._make_cube("src", src_shape)
+ tgt_shape = (5, 1, 3, 2)
+ self._make_cube("tgt", tgt_shape)
+ mapping = {0: 3, 1: 2, 2: 1}
+ self.resolve.mapping = mapping
+ self.resolve._as_compatible_cubes()
+ self._check_compatible(broadcast_shape=(5, 4, 3, 2))
+ self.assertEqual(1, self.data.transpose.call_count)
+ self.assertEqual(
+ [mock.call([2, 1, 0])], self.data.transpose.call_args_list
+ )
+ self.assertEqual(1, self.data.reshape.call_count)
+ self.assertEqual(
+ [mock.call((1,) + transpose_shape)],
+ self.data.reshape.call_args_list,
+ )
+ self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list)
+
+
+class Test__metadata_mapping(tests.IrisTest):
+ def setUp(self):
+ self.ndim = sentinel.ndim
+ self.src_cube = mock.Mock(ndim=self.ndim)
+ self.src_dim_coverage = mock.Mock(dims_free=[])
+ self.src_aux_coverage = mock.Mock(dims_free=[])
+ self.tgt_cube = mock.Mock(ndim=self.ndim)
+ self.tgt_dim_coverage = mock.Mock(dims_free=[])
+ self.tgt_aux_coverage = mock.Mock(dims_free=[])
+ self.resolve = Resolve()
+ self.map_rhs_to_lhs = True
+ self.resolve.map_rhs_to_lhs = self.map_rhs_to_lhs
+ self.resolve.rhs_cube = self.src_cube
+ self.resolve.rhs_cube_dim_coverage = self.src_dim_coverage
+ self.resolve.rhs_cube_aux_coverage = self.src_aux_coverage
+ self.resolve.lhs_cube = self.tgt_cube
+ self.resolve.lhs_cube_dim_coverage = self.tgt_dim_coverage
+ self.resolve.lhs_cube_aux_coverage = self.tgt_aux_coverage
+ self.resolve.mapping = {}
+ self.shape = sentinel.shape
+ self.resolve._broadcast_shape = self.shape
+ self.resolve._src_cube_resolved = mock.Mock(shape=self.shape)
+ self.resolve._tgt_cube_resolved = mock.Mock(shape=self.shape)
+ self.m_dim_mapping = self.patch(
+ "iris.common.resolve.Resolve._dim_mapping", return_value={}
+ )
+ self.m_aux_mapping = self.patch(
+ "iris.common.resolve.Resolve._aux_mapping", return_value={}
+ )
+ self.m_free_mapping = self.patch(
+ "iris.common.resolve.Resolve._free_mapping"
+ )
+ self.m_as_compatible_cubes = self.patch(
+ "iris.common.resolve.Resolve._as_compatible_cubes"
+ )
+ self.mapping = {0: 1, 1: 2, 2: 3}
+
+ def test_mapped__dim_coords(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 4 3 2
+ # state f c c c state c c c
+ # coord d d d coord d d d
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ self.src_cube.ndim = 3
+ self.m_dim_mapping.return_value = self.mapping
+ self.resolve._metadata_mapping()
+ self.assertEqual(self.mapping, self.resolve.mapping)
+ self.assertEqual(1, self.m_dim_mapping.call_count)
+ expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)]
+ self.assertEqual(expected, self.m_dim_mapping.call_args_list)
+ self.assertEqual(0, self.m_aux_mapping.call_count)
+ self.assertEqual(0, self.m_free_mapping.call_count)
+ self.assertEqual(1, self.m_as_compatible_cubes.call_count)
+
+ def test_mapped__aux_coords(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 4 3 2
+ # state f c c c state c c c
+ # coord a a a coord a a a
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ self.src_cube.ndim = 3
+ self.m_aux_mapping.return_value = self.mapping
+ self.resolve._metadata_mapping()
+ self.assertEqual(self.mapping, self.resolve.mapping)
+ self.assertEqual(1, self.m_dim_mapping.call_count)
+ expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)]
+ self.assertEqual(expected, self.m_dim_mapping.call_args_list)
+ self.assertEqual(1, self.m_aux_mapping.call_count)
+ expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)]
+ self.assertEqual(expected, self.m_aux_mapping.call_args_list)
+ self.assertEqual(0, self.m_free_mapping.call_count)
+ self.assertEqual(1, self.m_as_compatible_cubes.call_count)
+
+ def test_mapped__dim_and_aux_coords(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 4 3 2
+ # state f c c c state c c c
+ # coord d a d coord d a d
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ dim_mapping = {0: 1, 2: 3}
+ aux_mapping = {1: 2}
+ self.src_cube.ndim = 3
+ self.m_dim_mapping.return_value = dim_mapping
+ self.m_aux_mapping.return_value = aux_mapping
+ self.resolve._metadata_mapping()
+ self.assertEqual(self.mapping, self.resolve.mapping)
+ self.assertEqual(1, self.m_dim_mapping.call_count)
+ expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)]
+ self.assertEqual(expected, self.m_dim_mapping.call_args_list)
+ self.assertEqual(1, self.m_aux_mapping.call_count)
+ expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)]
+ self.assertEqual(expected, self.m_aux_mapping.call_args_list)
+ self.assertEqual(0, self.m_free_mapping.call_count)
+ self.assertEqual(1, self.m_as_compatible_cubes.call_count)
+
+ def test_mapped__dim_coords_and_free_dims(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 4 3 2
+ # state l f c c state f c c
+ # coord d d d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ dim_mapping = {1: 2, 2: 3}
+ free_mapping = {0: 1}
+ self.src_cube.ndim = 3
+ self.m_dim_mapping.return_value = dim_mapping
+ side_effect = lambda a, b, c, d: self.resolve.mapping.update(
+ free_mapping
+ )
+ self.m_free_mapping.side_effect = side_effect
+ self.resolve._metadata_mapping()
+ self.assertEqual(self.mapping, self.resolve.mapping)
+ self.assertEqual(1, self.m_dim_mapping.call_count)
+ expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)]
+ self.assertEqual(expected, self.m_dim_mapping.call_args_list)
+ self.assertEqual(1, self.m_aux_mapping.call_count)
+ expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)]
+ self.assertEqual(expected, self.m_aux_mapping.call_args_list)
+ self.assertEqual(1, self.m_free_mapping.call_count)
+ expected = [
+ mock.call(
+ self.src_dim_coverage,
+ self.tgt_dim_coverage,
+ self.src_aux_coverage,
+ self.tgt_aux_coverage,
+ )
+ ]
+ self.assertEqual(expected, self.m_free_mapping.call_args_list)
+ self.assertEqual(1, self.m_as_compatible_cubes.call_count)
+
+ def test_mapped__dim_coords_with_broadcast_flip(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 4 dims 0 1 2 4
+ # shape 1 4 3 2 shape 5 4 3 2
+ # state c c c c state c c c c
+ # coord d d d d coord d d d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1, 2->2, 3->3
+ mapping = {0: 0, 1: 1, 2: 2, 3: 3}
+ self.src_cube.ndim = 4
+ self.tgt_cube.ndim = 4
+ self.m_dim_mapping.return_value = mapping
+ broadcast_shape = (5, 4, 3, 2)
+ self.resolve._broadcast_shape = broadcast_shape
+ self.resolve._src_cube_resolved.shape = broadcast_shape
+ self.resolve._tgt_cube_resolved.shape = (1, 4, 3, 2)
+ self.resolve._metadata_mapping()
+ self.assertEqual(mapping, self.resolve.mapping)
+ self.assertEqual(1, self.m_dim_mapping.call_count)
+ expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)]
+ self.assertEqual(expected, self.m_dim_mapping.call_args_list)
+ self.assertEqual(0, self.m_aux_mapping.call_count)
+ self.assertEqual(0, self.m_free_mapping.call_count)
+ self.assertEqual(2, self.m_as_compatible_cubes.call_count)
+ self.assertEqual(not self.map_rhs_to_lhs, self.resolve.map_rhs_to_lhs)
+
+ def test_mapped__dim_coords_free_flip_with_free_flip(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1 2
+ # shape 4 3 2 shape 4 3 2
+ # state f f c state l l c
+ # coord d coord d d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1, 2->2
+ dim_mapping = {2: 2}
+ free_mapping = {0: 0, 1: 1}
+ mapping = {0: 0, 1: 1, 2: 2}
+ self.src_cube.ndim = 3
+ self.tgt_cube.ndim = 3
+ self.m_dim_mapping.return_value = dim_mapping
+ side_effect = lambda a, b, c, d: self.resolve.mapping.update(
+ free_mapping
+ )
+ self.m_free_mapping.side_effect = side_effect
+ self.tgt_dim_coverage.dims_free = [0, 1]
+ self.tgt_aux_coverage.dims_free = [0, 1]
+ self.resolve._metadata_mapping()
+ self.assertEqual(mapping, self.resolve.mapping)
+ self.assertEqual(1, self.m_dim_mapping.call_count)
+ expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)]
+ self.assertEqual(expected, self.m_dim_mapping.call_args_list)
+ self.assertEqual(1, self.m_aux_mapping.call_count)
+ expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)]
+ self.assertEqual(expected, self.m_aux_mapping.call_args_list)
+ self.assertEqual(1, self.m_free_mapping.call_count)
+ expected = [
+ mock.call(
+ self.src_dim_coverage,
+ self.tgt_dim_coverage,
+ self.src_aux_coverage,
+ self.tgt_aux_coverage,
+ )
+ ]
+ self.assertEqual(expected, self.m_free_mapping.call_args_list)
+ self.assertEqual(2, self.m_as_compatible_cubes.call_count)
+
+
+class Test__prepare_common_dim_payload(tests.IrisTest):
+ def setUp(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 4 3 2
+ # state l c c c state c c c
+ # coord d d d coord d d d
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2)
+ self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2)
+ self.pb_0 = (
+ mock.Mock(copy=mock.Mock(return_value=self.points[0])),
+ mock.Mock(copy=mock.Mock(return_value=self.bounds[0])),
+ )
+ self.pb_1 = (
+ mock.Mock(copy=mock.Mock(return_value=self.points[1])),
+ None,
+ )
+ self.pb_2 = (
+ mock.Mock(copy=mock.Mock(return_value=self.points[2])),
+ mock.Mock(copy=mock.Mock(return_value=self.bounds[2])),
+ )
+ side_effect = (self.pb_0, self.pb_1, self.pb_2)
+ self.m_prepare_points_and_bounds = self.patch(
+ "iris.common.resolve.Resolve._prepare_points_and_bounds",
+ side_effect=side_effect,
+ )
+ self.resolve = Resolve()
+ self.resolve.prepared_category = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ self.mapping = {0: 1, 1: 2, 2: 3}
+ self.resolve.mapping = self.mapping
+ self.metadata_combined = (
+ sentinel.combined_0,
+ sentinel.combined_1,
+ sentinel.combined_2,
+ )
+ self.src_metadata = mock.Mock(
+ combine=mock.Mock(side_effect=self.metadata_combined)
+ )
+ metadata = [self.src_metadata] * len(self.mapping)
+ self.src_coords = [
+ sentinel.src_coord_0,
+ sentinel.src_coord_1,
+ sentinel.src_coord_2,
+ ]
+ self.src_dims_common = [0, 1, 2]
+ self.container = DimCoord
+ self.src_dim_coverage = _DimCoverage(
+ cube=None,
+ metadata=metadata,
+ coords=self.src_coords,
+ dims_common=self.src_dims_common,
+ dims_local=[],
+ dims_free=[],
+ )
+ self.tgt_metadata = [
+ sentinel.tgt_metadata_0,
+ sentinel.tgt_metadata_1,
+ sentinel.tgt_metadata_2,
+ sentinel.tgt_metadata_3,
+ ]
+ self.tgt_coords = [
+ sentinel.tgt_coord_0,
+ sentinel.tgt_coord_1,
+ sentinel.tgt_coord_2,
+ sentinel.tgt_coord_3,
+ ]
+ self.tgt_dims_common = [1, 2, 3]
+ self.tgt_dim_coverage = _DimCoverage(
+ cube=None,
+ metadata=self.tgt_metadata,
+ coords=self.tgt_coords,
+ dims_common=self.tgt_dims_common,
+ dims_local=[],
+ dims_free=[],
+ )
+
+ def _check(self, ignore_mismatch=None, bad_points=None):
+ if bad_points is None:
+ bad_points = False
+ self.resolve._prepare_common_dim_payload(
+ self.src_dim_coverage,
+ self.tgt_dim_coverage,
+ ignore_mismatch=ignore_mismatch,
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_aux))
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+ if not bad_points:
+ self.assertEqual(3, len(self.resolve.prepared_category.items_dim))
+ expected = [
+ _PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=self.metadata_combined[0],
+ src=self.src_metadata,
+ tgt=self.tgt_metadata[self.mapping[0]],
+ ),
+ points=self.points[0],
+ bounds=self.bounds[0],
+ dims=(self.mapping[0],),
+ container=self.container,
+ ),
+ _PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=self.metadata_combined[1],
+ src=self.src_metadata,
+ tgt=self.tgt_metadata[self.mapping[1]],
+ ),
+ points=self.points[1],
+ bounds=None,
+ dims=(self.mapping[1],),
+ container=self.container,
+ ),
+ _PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=self.metadata_combined[2],
+ src=self.src_metadata,
+ tgt=self.tgt_metadata[self.mapping[2]],
+ ),
+ points=self.points[2],
+ bounds=self.bounds[2],
+ dims=(self.mapping[2],),
+ container=self.container,
+ ),
+ ]
+ self.assertEqual(
+ expected, self.resolve.prepared_category.items_dim
+ )
+ else:
+ self.assertEqual(0, len(self.resolve.prepared_category.items_dim))
+ self.assertEqual(3, self.m_prepare_points_and_bounds.call_count)
+ if ignore_mismatch is None:
+ ignore_mismatch = False
+ expected = [
+ mock.call(
+ self.src_coords[0],
+ self.tgt_coords[self.mapping[0]],
+ 0,
+ 1,
+ ignore_mismatch=ignore_mismatch,
+ ),
+ mock.call(
+ self.src_coords[1],
+ self.tgt_coords[self.mapping[1]],
+ 1,
+ 2,
+ ignore_mismatch=ignore_mismatch,
+ ),
+ mock.call(
+ self.src_coords[2],
+ self.tgt_coords[self.mapping[2]],
+ 2,
+ 3,
+ ignore_mismatch=ignore_mismatch,
+ ),
+ ]
+ self.assertEqual(
+ expected, self.m_prepare_points_and_bounds.call_args_list
+ )
+ if not bad_points:
+ self.assertEqual(3, self.src_metadata.combine.call_count)
+ expected = [
+ mock.call(metadata) for metadata in self.tgt_metadata[1:]
+ ]
+ self.assertEqual(
+ expected, self.src_metadata.combine.call_args_list
+ )
+
+ def test__default_ignore_mismatch(self):
+ self._check()
+
+ def test__not_ignore_mismatch(self):
+ self._check(ignore_mismatch=False)
+
+ def test__ignore_mismatch(self):
+ self._check(ignore_mismatch=True)
+
+ def test__bad_points(self):
+ side_effect = [(None, None)] * len(self.mapping)
+ self.m_prepare_points_and_bounds.side_effect = side_effect
+ self._check(bad_points=True)
+
+
+class Test__prepare_common_aux_payload(tests.IrisTest):
+ def setUp(self):
+ # key: (state) c=common, f=free
+ # (coord) a=aux, d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 3 dims 0 1 2
+ # shape 5 4 3 2 shape 4 3 2
+ # state l c c c state c c c
+ # coord a a a coord a a a
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2, 2->3
+ self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2)
+ self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2)
+ self.pb_0 = (
+ mock.Mock(copy=mock.Mock(return_value=self.points[0])),
+ mock.Mock(copy=mock.Mock(return_value=self.bounds[0])),
+ )
+ self.pb_1 = (
+ mock.Mock(copy=mock.Mock(return_value=self.points[1])),
+ None,
+ )
+ self.pb_2 = (
+ mock.Mock(copy=mock.Mock(return_value=self.points[2])),
+ mock.Mock(copy=mock.Mock(return_value=self.bounds[2])),
+ )
+ side_effect = (self.pb_0, self.pb_1, self.pb_2)
+ self.m_prepare_points_and_bounds = self.patch(
+ "iris.common.resolve.Resolve._prepare_points_and_bounds",
+ side_effect=side_effect,
+ )
+ self.resolve = Resolve()
+ self.resolve.prepared_category = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ self.mapping = {0: 1, 1: 2, 2: 3}
+ self.resolve.mapping = self.mapping
+ self.resolve.map_rhs_to_lhs = True
+ self.metadata_combined = (
+ sentinel.combined_0,
+ sentinel.combined_1,
+ sentinel.combined_2,
+ )
+ self.src_metadata = [
+ mock.Mock(
+ combine=mock.Mock(return_value=self.metadata_combined[0])
+ ),
+ mock.Mock(
+ combine=mock.Mock(return_value=self.metadata_combined[1])
+ ),
+ mock.Mock(
+ combine=mock.Mock(return_value=self.metadata_combined[2])
+ ),
+ ]
+ self.src_coords = [
+ sentinel.src_coord_0,
+ sentinel.src_coord_1,
+ sentinel.src_coord_2,
+ ]
+ self.src_dims = [(dim,) for dim in self.mapping.keys()]
+ self.src_common_items = [
+ _Item(*item)
+ for item in zip(self.src_metadata, self.src_coords, self.src_dims)
+ ]
+ self.tgt_metadata = [sentinel.tgt_metadata_0] + self.src_metadata
+ self.tgt_coords = [
+ sentinel.tgt_coord_0,
+ sentinel.tgt_coord_1,
+ sentinel.tgt_coord_2,
+ sentinel.tgt_coord_3,
+ ]
+ self.tgt_dims = [None] + [(dim,) for dim in self.mapping.values()]
+ self.tgt_common_items = [
+ _Item(*item)
+ for item in zip(self.tgt_metadata, self.tgt_coords, self.tgt_dims)
+ ]
+ self.container = type(self.src_coords[0])
+
+ def _check(self, ignore_mismatch=None, bad_points=None):
+ if bad_points is None:
+ bad_points = False
+ prepared_items = []
+ self.resolve._prepare_common_aux_payload(
+ self.src_common_items,
+ self.tgt_common_items,
+ prepared_items,
+ ignore_mismatch=ignore_mismatch,
+ )
+ if not bad_points:
+ self.assertEqual(3, len(prepared_items))
+ expected = [
+ _PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=self.metadata_combined[0],
+ src=self.src_metadata[0],
+ tgt=self.tgt_metadata[self.mapping[0]],
+ ),
+ points=self.points[0],
+ bounds=self.bounds[0],
+ dims=self.tgt_dims[self.mapping[0]],
+ container=self.container,
+ ),
+ _PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=self.metadata_combined[1],
+ src=self.src_metadata[1],
+ tgt=self.tgt_metadata[self.mapping[1]],
+ ),
+ points=self.points[1],
+ bounds=None,
+ dims=self.tgt_dims[self.mapping[1]],
+ container=self.container,
+ ),
+ _PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=self.metadata_combined[2],
+ src=self.src_metadata[2],
+ tgt=self.tgt_metadata[self.mapping[2]],
+ ),
+ points=self.points[2],
+ bounds=self.bounds[2],
+ dims=self.tgt_dims[self.mapping[2]],
+ container=self.container,
+ ),
+ ]
+ self.assertEqual(expected, prepared_items)
+ else:
+ self.assertEqual(0, len(prepared_items))
+ self.assertEqual(3, self.m_prepare_points_and_bounds.call_count)
+ if ignore_mismatch is None:
+ ignore_mismatch = False
+ expected = [
+ mock.call(
+ self.src_coords[0],
+ self.tgt_coords[self.mapping[0]],
+ self.src_dims[0],
+ self.tgt_dims[self.mapping[0]],
+ ignore_mismatch=ignore_mismatch,
+ ),
+ mock.call(
+ self.src_coords[1],
+ self.tgt_coords[self.mapping[1]],
+ self.src_dims[1],
+ self.tgt_dims[self.mapping[1]],
+ ignore_mismatch=ignore_mismatch,
+ ),
+ mock.call(
+ self.src_coords[2],
+ self.tgt_coords[self.mapping[2]],
+ self.src_dims[2],
+ self.tgt_dims[self.mapping[2]],
+ ignore_mismatch=ignore_mismatch,
+ ),
+ ]
+ self.assertEqual(
+ expected, self.m_prepare_points_and_bounds.call_args_list
+ )
+ if not bad_points:
+ for src_metadata, tgt_metadata in zip(
+ self.src_metadata, self.tgt_metadata[1:]
+ ):
+ self.assertEqual(1, src_metadata.combine.call_count)
+ expected = [mock.call(tgt_metadata)]
+ self.assertEqual(expected, src_metadata.combine.call_args_list)
+
+ def test__default_ignore_mismatch(self):
+ self._check()
+
+ def test__not_ignore_mismatch(self):
+ self._check(ignore_mismatch=False)
+
+ def test__ignore_mismatch(self):
+ self._check(ignore_mismatch=True)
+
+ def test__bad_points(self):
+ side_effect = [(None, None)] * len(self.mapping)
+ self.m_prepare_points_and_bounds.side_effect = side_effect
+ self._check(bad_points=True)
+
+ def test__no_tgt_metadata_match(self):
+ item = self.tgt_common_items[0]
+ tgt_common_items = [item] * len(self.tgt_common_items)
+ prepared_items = []
+ self.resolve._prepare_common_aux_payload(
+ self.src_common_items, tgt_common_items, prepared_items
+ )
+ self.assertEqual(0, len(prepared_items))
+
+ def test__multi_tgt_metadata_match(self):
+ item = self.tgt_common_items[1]
+ tgt_common_items = [item] * len(self.tgt_common_items)
+ prepared_items = []
+ self.resolve._prepare_common_aux_payload(
+ self.src_common_items, tgt_common_items, prepared_items
+ )
+ self.assertEqual(0, len(prepared_items))
+
+
+class Test__prepare_points_and_bounds(tests.IrisTest):
+ def setUp(self):
+ self.Coord = namedtuple(
+ "Coord",
+ [
+ "name",
+ "points",
+ "bounds",
+ "metadata",
+ "ndim",
+ "shape",
+ "has_bounds",
+ ],
+ )
+ self.Cube = namedtuple("Cube", ["name", "shape"])
+ self.resolve = Resolve()
+ self.resolve.map_rhs_to_lhs = True
+ self.src_name = sentinel.src_name
+ self.src_points = sentinel.src_points
+ self.src_bounds = sentinel.src_bounds
+ self.src_metadata = sentinel.src_metadata
+ self.src_items = dict(
+ name=lambda: self.src_name,
+ points=self.src_points,
+ bounds=self.src_bounds,
+ metadata=self.src_metadata,
+ ndim=None,
+ shape=None,
+ has_bounds=None,
+ )
+ self.tgt_name = sentinel.tgt_name
+ self.tgt_points = sentinel.tgt_points
+ self.tgt_bounds = sentinel.tgt_bounds
+ self.tgt_metadata = sentinel.tgt_metadata
+ self.tgt_items = dict(
+ name=lambda: self.tgt_name,
+ points=self.tgt_points,
+ bounds=self.tgt_bounds,
+ metadata=self.tgt_metadata,
+ ndim=None,
+ shape=None,
+ has_bounds=None,
+ )
+ self.m_array_equal = self.patch(
+ "iris.util.array_equal", side_effect=(True, True)
+ )
+
+ def test_coord_ndim_unequal__tgt_ndim_greater(self):
+ self.src_items["ndim"] = 1
+ src_coord = self.Coord(**self.src_items)
+ self.tgt_items["ndim"] = 10
+ tgt_coord = self.Coord(**self.tgt_items)
+ points, bounds = self.resolve._prepare_points_and_bounds(
+ src_coord, tgt_coord, src_dims=None, tgt_dims=None
+ )
+ self.assertEqual(self.tgt_points, points)
+ self.assertEqual(self.tgt_bounds, bounds)
+
+ def test_coord_ndim_unequal__src_ndim_greater(self):
+ self.src_items["ndim"] = 10
+ src_coord = self.Coord(**self.src_items)
+ self.tgt_items["ndim"] = 1
+ tgt_coord = self.Coord(**self.tgt_items)
+ points, bounds = self.resolve._prepare_points_and_bounds(
+ src_coord, tgt_coord, src_dims=None, tgt_dims=None
+ )
+ self.assertEqual(self.src_points, points)
+ self.assertEqual(self.src_bounds, bounds)
+
+ def test_coord_ndim_equal__shape_unequal_with_src_broadcasting(self):
+ # key: (state) c=common, f=free
+ # (coord) x=coord
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 9 9 shape 1 9
+ # state c c state c c
+ # coord x-x coord x-x
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ broadcast_shape = (9, 9)
+ ndim = len(broadcast_shape)
+ self.resolve.mapping = mapping
+ self.resolve._broadcast_shape = broadcast_shape
+ src_shape = (1, 9)
+ src_dims = tuple(mapping.keys())
+ self.resolve.rhs_cube = self.Cube(name=None, shape=src_shape)
+ self.src_items["ndim"] = ndim
+ self.src_items["shape"] = src_shape
+ src_coord = self.Coord(**self.src_items)
+ tgt_shape = broadcast_shape
+ tgt_dims = tuple(mapping.values())
+ self.resolve.lhs_cube = self.Cube(name=None, shape=tgt_shape)
+ self.tgt_items["ndim"] = ndim
+ self.tgt_items["shape"] = tgt_shape
+ tgt_coord = self.Coord(**self.tgt_items)
+ points, bounds = self.resolve._prepare_points_and_bounds(
+ src_coord, tgt_coord, src_dims, tgt_dims
+ )
+ self.assertEqual(self.tgt_points, points)
+ self.assertEqual(self.tgt_bounds, bounds)
+
+ def test_coord_ndim_equal__shape_unequal_with_tgt_broadcasting(self):
+ # key: (state) c=common, f=free
+ # (coord) x=coord
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 1 9 shape 9 9
+ # state c c state c c
+ # coord x-x coord x-x
+ # bcast ^
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ broadcast_shape = (9, 9)
+ ndim = len(broadcast_shape)
+ self.resolve.mapping = mapping
+ self.resolve._broadcast_shape = broadcast_shape
+ src_shape = broadcast_shape
+ src_dims = tuple(mapping.keys())
+ self.resolve.rhs_cube = self.Cube(name=None, shape=src_shape)
+ self.src_items["ndim"] = ndim
+ self.src_items["shape"] = src_shape
+ src_coord = self.Coord(**self.src_items)
+ tgt_shape = (1, 9)
+ tgt_dims = tuple(mapping.values())
+ self.resolve.lhs_cube = self.Cube(name=None, shape=tgt_shape)
+ self.tgt_items["ndim"] = ndim
+ self.tgt_items["shape"] = tgt_shape
+ tgt_coord = self.Coord(**self.tgt_items)
+ points, bounds = self.resolve._prepare_points_and_bounds(
+ src_coord, tgt_coord, src_dims, tgt_dims
+ )
+ self.assertEqual(self.src_points, points)
+ self.assertEqual(self.src_bounds, bounds)
+
+ def test_coord_ndim_equal__shape_unequal_with_unsupported_broadcasting(
+ self,
+ ):
+ # key: (state) c=common, f=free
+ # (coord) x=coord
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 1 9 shape 9 1
+ # state c c state c c
+ # coord x-x coord x-x
+ # bcast ^ bcast ^
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ broadcast_shape = (9, 9)
+ ndim = len(broadcast_shape)
+ self.resolve.mapping = mapping
+ self.resolve._broadcast_shape = broadcast_shape
+ src_shape = (9, 1)
+ src_dims = tuple(mapping.keys())
+ self.resolve.rhs_cube = self.Cube(
+ name=lambda: sentinel.src_cube, shape=src_shape
+ )
+ self.src_items["ndim"] = ndim
+ self.src_items["shape"] = src_shape
+ src_coord = self.Coord(**self.src_items)
+ tgt_shape = (1, 9)
+ tgt_dims = tuple(mapping.values())
+ self.resolve.lhs_cube = self.Cube(
+ name=lambda: sentinel.tgt_cube, shape=tgt_shape
+ )
+ self.tgt_items["ndim"] = ndim
+ self.tgt_items["shape"] = tgt_shape
+ tgt_coord = self.Coord(**self.tgt_items)
+ emsg = "Cannot broadcast"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve._prepare_points_and_bounds(
+ src_coord, tgt_coord, src_dims, tgt_dims
+ )
+
+ def _populate(
+ self, src_points, tgt_points, src_bounds=None, tgt_bounds=None
+ ):
+ # key: (state) c=common, f=free
+ # (coord) x=coord
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state f c state f c
+ # coord x coord x
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ shape = (2, 3)
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.rhs_cube = self.Cube(
+ name=lambda: sentinel.src_cube, shape=None
+ )
+ self.resolve.lhs_cube = self.Cube(
+ name=lambda: sentinel.tgt_cube, shape=None
+ )
+ ndim = 1
+ src_dims = 1
+ self.src_items["ndim"] = ndim
+ self.src_items["shape"] = (shape[src_dims],)
+ self.src_items["points"] = src_points
+ self.src_items["bounds"] = src_bounds
+ self.src_items["has_bounds"] = lambda: src_bounds is not None
+ src_coord = self.Coord(**self.src_items)
+ tgt_dims = 1
+ self.tgt_items["ndim"] = ndim
+ self.tgt_items["shape"] = (shape[mapping[tgt_dims]],)
+ self.tgt_items["points"] = tgt_points
+ self.tgt_items["bounds"] = tgt_bounds
+ self.tgt_items["has_bounds"] = lambda: tgt_bounds is not None
+ tgt_coord = self.Coord(**self.tgt_items)
+ args = dict(
+ src_coord=src_coord,
+ tgt_coord=tgt_coord,
+ src_dims=src_dims,
+ tgt_dims=tgt_dims,
+ )
+ return args
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_no_bounds(self):
+ args = self._populate(self.src_points, self.src_points)
+ points, bounds = self.resolve._prepare_points_and_bounds(**args)
+ self.assertEqual(self.src_points, points)
+ self.assertIsNone(bounds)
+ self.assertEqual(1, self.m_array_equal.call_count)
+ expected = [mock.call(self.src_points, self.src_points, withnans=True)]
+ self.assertEqual(expected, self.m_array_equal.call_args_list)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only(
+ self,
+ ):
+ args = self._populate(
+ self.src_points, self.src_points, src_bounds=self.src_bounds
+ )
+ points, bounds = self.resolve._prepare_points_and_bounds(**args)
+ self.assertEqual(self.src_points, points)
+ self.assertEqual(self.src_bounds, bounds)
+ self.assertEqual(1, self.m_array_equal.call_count)
+ expected = [mock.call(self.src_points, self.src_points, withnans=True)]
+ self.assertEqual(expected, self.m_array_equal.call_args_list)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only(
+ self,
+ ):
+ args = self._populate(
+ self.src_points, self.src_points, tgt_bounds=self.tgt_bounds
+ )
+ points, bounds = self.resolve._prepare_points_and_bounds(**args)
+ self.assertEqual(self.src_points, points)
+ self.assertEqual(self.tgt_bounds, bounds)
+ self.assertEqual(1, self.m_array_equal.call_count)
+ expected = [mock.call(self.src_points, self.src_points, withnans=True)]
+ self.assertEqual(expected, self.m_array_equal.call_args_list)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only_strict(
+ self,
+ ):
+ args = self._populate(
+ self.src_points, self.src_points, src_bounds=self.src_bounds
+ )
+ with LENIENT.context(maths=False):
+ emsg = f"Coordinate {self.src_name} has bounds"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve._prepare_points_and_bounds(**args)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only_strict(
+ self,
+ ):
+ args = self._populate(
+ self.src_points, self.src_points, tgt_bounds=self.tgt_bounds
+ )
+ with LENIENT.context(maths=False):
+ emsg = f"Coordinate {self.tgt_name} has bounds"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve._prepare_points_and_bounds(**args)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_equal(self):
+ args = self._populate(
+ self.src_points,
+ self.src_points,
+ src_bounds=self.src_bounds,
+ tgt_bounds=self.src_bounds,
+ )
+ points, bounds = self.resolve._prepare_points_and_bounds(**args)
+ self.assertEqual(self.src_points, points)
+ self.assertEqual(self.src_bounds, bounds)
+ self.assertEqual(2, self.m_array_equal.call_count)
+ expected = [
+ mock.call(self.src_points, self.src_points, withnans=True),
+ mock.call(self.src_bounds, self.src_bounds, withnans=True),
+ ]
+ self.assertEqual(expected, self.m_array_equal.call_args_list)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different(
+ self,
+ ):
+ self.m_array_equal.side_effect = (True, False)
+ args = self._populate(
+ self.src_points,
+ self.src_points,
+ src_bounds=self.src_bounds,
+ tgt_bounds=self.tgt_bounds,
+ )
+ emsg = f"Coordinate {self.src_name} has different bounds"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve._prepare_points_and_bounds(**args)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_ignore_mismatch(
+ self,
+ ):
+ self.m_array_equal.side_effect = (True, False)
+ args = self._populate(
+ self.src_points,
+ self.src_points,
+ src_bounds=self.src_bounds,
+ tgt_bounds=self.tgt_bounds,
+ )
+ points, bounds = self.resolve._prepare_points_and_bounds(
+ **args, ignore_mismatch=True
+ )
+ self.assertEqual(self.src_points, points)
+ self.assertIsNone(bounds)
+ self.assertEqual(2, self.m_array_equal.call_count)
+ expected = [
+ mock.call(self.src_points, self.src_points, withnans=True),
+ mock.call(self.src_bounds, self.tgt_bounds, withnans=True),
+ ]
+ self.assertEqual(expected, self.m_array_equal.call_args_list)
+
+ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_strict(
+ self,
+ ):
+ self.m_array_equal.side_effect = (True, False)
+ args = self._populate(
+ self.src_points,
+ self.src_points,
+ src_bounds=self.src_bounds,
+ tgt_bounds=self.tgt_bounds,
+ )
+ with LENIENT.context(maths=False):
+ emsg = f"Coordinate {self.src_name} has different bounds"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve._prepare_points_and_bounds(**args)
+
+ def test_coord_ndim_and_shape_equal__points_different(self):
+ self.m_array_equal.side_effect = (False,)
+ args = self._populate(self.src_points, self.tgt_points)
+ emsg = f"Coordinate {self.src_name} has different points"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve._prepare_points_and_bounds(**args)
+
+ def test_coord_ndim_and_shape_equal__points_different_ignore_mismatch(
+ self,
+ ):
+ self.m_array_equal.side_effect = (False,)
+ args = self._populate(self.src_points, self.tgt_points)
+ points, bounds = self.resolve._prepare_points_and_bounds(
+ **args, ignore_mismatch=True
+ )
+ self.assertIsNone(points)
+ self.assertIsNone(bounds)
+
+ def test_coord_ndim_and_shape_equal__points_different_strict(self):
+ self.m_array_equal.side_effect = (False,)
+ args = self._populate(self.src_points, self.tgt_points)
+ with LENIENT.context(maths=False):
+ emsg = f"Coordinate {self.src_name} has different points"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve._prepare_points_and_bounds(**args)
+
+
+class Test__create_prepared_item(tests.IrisTest):
+ def setUp(self):
+ Coord = namedtuple("Coord", ["points", "bounds"])
+ self.points_value = sentinel.points
+ self.points = mock.Mock(copy=mock.Mock(return_value=self.points_value))
+ self.bounds_value = sentinel.bounds
+ self.bounds = mock.Mock(copy=mock.Mock(return_value=self.bounds_value))
+ self.coord = Coord(points=self.points, bounds=self.bounds)
+ self.container = type(self.coord)
+ self.combined = sentinel.combined
+ self.src = mock.Mock(combine=mock.Mock(return_value=self.combined))
+ self.tgt = sentinel.tgt
+
+ def _check(self, src=None, tgt=None):
+ dims = 0
+ if src is not None and tgt is not None:
+ combined = self.combined
+ else:
+ combined = src or tgt
+ result = Resolve._create_prepared_item(
+ self.coord, dims, src_metadata=src, tgt_metadata=tgt
+ )
+ self.assertIsInstance(result, _PreparedItem)
+ self.assertIsInstance(result.metadata, _PreparedMetadata)
+ expected = _PreparedMetadata(combined=combined, src=src, tgt=tgt)
+ self.assertEqual(expected, result.metadata)
+ self.assertEqual(self.points_value, result.points)
+ self.assertEqual(1, self.points.copy.call_count)
+ self.assertEqual([mock.call()], self.points.copy.call_args_list)
+ self.assertEqual(self.bounds_value, result.bounds)
+ self.assertEqual(1, self.bounds.copy.call_count)
+ self.assertEqual([mock.call()], self.bounds.copy.call_args_list)
+ self.assertEqual((dims,), result.dims)
+ self.assertEqual(self.container, result.container)
+
+ def test__no_metadata(self):
+ self._check()
+
+ def test__src_metadata_only(self):
+ self._check(src=self.src)
+
+ def test__tgt_metadata_only(self):
+ self._check(tgt=self.tgt)
+
+ def test__combine_metadata(self):
+ self._check(src=self.src, tgt=self.tgt)
+
+
+class Test__prepare_local_payload_dim(tests.IrisTest):
+ def setUp(self):
+ self.Cube = namedtuple("Cube", ["ndim"])
+ self.resolve = Resolve()
+ self.resolve.prepared_category = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ self.resolve.map_rhs_to_lhs = True
+ self.src_coverage = dict(
+ cube=None,
+ metadata=[],
+ coords=[],
+ dims_common=None,
+ dims_local=[],
+ dims_free=None,
+ )
+ self.tgt_coverage = deepcopy(self.src_coverage)
+ self.prepared_item = sentinel.prepared_item
+ self.m_create_prepared_item = self.patch(
+ "iris.common.resolve.Resolve._create_prepared_item",
+ return_value=self.prepared_item,
+ )
+
+ def test_src_no_local_with_tgt_no_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c c state c c
+ # coord d d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_dim))
+
+ def test_src_no_local_with_tgt_no_local__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c c state c c
+ # coord d d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_dim))
+
+ def test_src_local_with_tgt_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c l
+ # coord d d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ self.src_coverage["dims_local"] = (1,)
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["dims_local"] = (1,)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_dim))
+
+ def test_src_local_with_tgt_local__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c l
+ # coord d d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ self.src_coverage["dims_local"] = (1,)
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["dims_local"] = (1,)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_dim))
+
+ def test_src_local_with_tgt_free(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c f state c l
+ # coord d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_dim = 1
+ self.src_coverage["dims_local"] = (src_dim,)
+ src_metadata = sentinel.src_metadata
+ self.src_coverage["metadata"] = [None, src_metadata]
+ src_coord = sentinel.src_coord
+ self.src_coverage["coords"] = [None, src_coord]
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_dim))
+ self.assertEqual(
+ self.prepared_item, self.resolve.prepared_category.items_dim[0]
+ )
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ expected = [
+ mock.call(src_coord, mapping[src_dim], src_metadata=src_metadata)
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_free__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c f state c l
+ # coord d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_dim = 1
+ self.src_coverage["dims_local"] = (src_dim,)
+ src_metadata = sentinel.src_metadata
+ self.src_coverage["metadata"] = [None, src_metadata]
+ src_coord = sentinel.src_coord
+ self.src_coverage["coords"] = [None, src_coord]
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_dim))
+
+ def test_src_free_with_tgt_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c f
+ # coord d d coord d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_dim = 1
+ self.tgt_coverage["dims_local"] = (tgt_dim,)
+ tgt_metadata = sentinel.tgt_metadata
+ self.tgt_coverage["metadata"] = [None, tgt_metadata]
+ tgt_coord = sentinel.tgt_coord
+ self.tgt_coverage["coords"] = [None, tgt_coord]
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_dim))
+ self.assertEqual(
+ self.prepared_item, self.resolve.prepared_category.items_dim[0]
+ )
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_free_with_tgt_local__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c f
+ # coord d d coord d
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_dim = 1
+ self.tgt_coverage["dims_local"] = (tgt_dim,)
+ tgt_metadata = sentinel.tgt_metadata
+ self.tgt_coverage["metadata"] = [None, tgt_metadata]
+ tgt_coord = sentinel.tgt_coord
+ self.tgt_coverage["coords"] = [None, tgt_coord]
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_dim))
+
+ def test_src_no_local_with_tgt_local__extra_dims(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1
+ # shape 4 2 3 shape 2 3
+ # state l c c state c c
+ # coord d d d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2
+ mapping = {0: 1, 1: 2}
+ self.resolve.mapping = mapping
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=3)
+ tgt_dim = 0
+ self.tgt_coverage["dims_local"] = (tgt_dim,)
+ tgt_metadata = sentinel.tgt_metadata
+ self.tgt_coverage["metadata"] = [tgt_metadata, None, None]
+ tgt_coord = sentinel.tgt_coord
+ self.tgt_coverage["coords"] = [tgt_coord, None, None]
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_dim))
+ self.assertEqual(
+ self.prepared_item, self.resolve.prepared_category.items_dim[0]
+ )
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_no_local_with_tgt_local__extra_dims_strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1
+ # shape 4 2 3 shape 2 3
+ # state l c c state c c
+ # coord d d d coord d d
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2
+ mapping = {0: 1, 1: 2}
+ self.resolve.mapping = mapping
+ src_coverage = _DimCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=3)
+ tgt_dim = 0
+ self.tgt_coverage["dims_local"] = (tgt_dim,)
+ tgt_metadata = sentinel.tgt_metadata
+ self.tgt_coverage["metadata"] = [tgt_metadata, None, None]
+ tgt_coord = sentinel.tgt_coord
+ self.tgt_coverage["coords"] = [tgt_coord, None, None]
+ tgt_coverage = _DimCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_dim))
+ self.assertEqual(
+ self.prepared_item, self.resolve.prepared_category.items_dim[0]
+ )
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+
+class Test__prepare_local_payload_aux(tests.IrisTest):
+ def setUp(self):
+ self.Cube = namedtuple("Cube", ["ndim"])
+ self.resolve = Resolve()
+ self.resolve.prepared_category = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ self.resolve.map_rhs_to_lhs = True
+ self.src_coverage = dict(
+ cube=None,
+ common_items_aux=None,
+ common_items_scalar=None,
+ local_items_aux=[],
+ local_items_scalar=None,
+ dims_common=None,
+ dims_local=[],
+ dims_free=None,
+ )
+ self.tgt_coverage = deepcopy(self.src_coverage)
+ self.src_prepared_item = sentinel.src_prepared_item
+ self.tgt_prepared_item = sentinel.tgt_prepared_item
+ self.m_create_prepared_item = self.patch(
+ "iris.common.resolve.Resolve._create_prepared_item",
+ side_effect=(self.src_prepared_item, self.tgt_prepared_item),
+ )
+
+ def test_src_no_local_with_tgt_no_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c c state c c
+ # coord a a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_aux))
+
+ def test_src_no_local_with_tgt_no_local__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c c state c c
+ # coord a a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_aux))
+
+ def test_src_local_with_tgt_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c l
+ # coord a a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_dims = (1,)
+ src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims)
+ self.src_coverage["local_items_aux"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_dims = (1,)
+ tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims)
+ self.tgt_coverage["local_items_aux"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_aux))
+ expected = [self.src_prepared_item, self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_aux)
+ expected = [
+ mock.call(src_coord, tgt_dims, src_metadata=src_metadata),
+ mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata),
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_local__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c l
+ # coord a a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_dims = (1,)
+ src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims)
+ self.src_coverage["local_items_aux"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_dims = (1,)
+ tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims)
+ self.tgt_coverage["local_items_aux"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_aux))
+
+ def test_src_local_with_tgt_free(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c f state c l
+ # coord a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_dims = (1,)
+ src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims)
+ self.src_coverage["local_items_aux"].append(src_item)
+ self.src_coverage["dims_local"].extend(src_dims)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_aux))
+ expected = [self.src_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_aux)
+ expected = [mock.call(src_coord, src_dims, src_metadata=src_metadata)]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_free__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c f state c l
+ # coord a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_dims = (1,)
+ src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims)
+ self.src_coverage["local_items_aux"].append(src_item)
+ self.src_coverage["dims_local"].extend(src_dims)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_aux))
+
+ def test_src_free_with_tgt_local(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c f
+ # coord a a coord a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_dims = (1,)
+ tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims)
+ self.tgt_coverage["local_items_aux"].append(tgt_item)
+ self.tgt_coverage["dims_local"].extend(tgt_dims)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_aux))
+ expected = [self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_aux)
+ expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_free_with_tgt_local__strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 dims 0 1
+ # shape 2 3 shape 2 3
+ # state c l state c f
+ # coord a a coord a
+ #
+ # src-to-tgt mapping:
+ # 0->0, 1->1
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ mapping = {0: 0, 1: 1}
+ self.resolve.mapping = mapping
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=2)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_dims = (1,)
+ tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims)
+ self.tgt_coverage["local_items_aux"].append(tgt_item)
+ self.tgt_coverage["dims_local"].extend(tgt_dims)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_aux))
+
+ def test_src_no_local_with_tgt_local__extra_dims(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1
+ # shape 4 2 3 shape 2 3
+ # state l c c state c c
+ # coord a a a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ mapping = {0: 1, 1: 2}
+ self.resolve.mapping = mapping
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=3)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_dims = (0,)
+ tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims)
+ self.tgt_coverage["local_items_aux"].append(tgt_item)
+ self.tgt_coverage["dims_local"].extend(tgt_dims)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_aux))
+ expected = [self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_aux)
+ expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_no_local_with_tgt_local__extra_dims_strict(self):
+ # key: (state) c=common, f=free, l=local
+ # (coord) d=dim
+ #
+ # tgt: <- src:
+ # dims 0 1 2 dims 0 1
+ # shape 4 2 3 shape 2 3
+ # state l c c state c c
+ # coord a a a coord a a
+ #
+ # src-to-tgt mapping:
+ # 0->1, 1->2
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ mapping = {0: 1, 1: 2}
+ self.resolve.mapping = mapping
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ self.tgt_coverage["cube"] = self.Cube(ndim=3)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_dims = (0,)
+ tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims)
+ self.tgt_coverage["local_items_aux"].append(tgt_item)
+ self.tgt_coverage["dims_local"].extend(tgt_dims)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=True):
+ self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_aux))
+ expected = [self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_aux)
+ expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+
+class Test__prepare_local_payload_scalar(tests.IrisTest):
+ def setUp(self):
+ self.Cube = namedtuple("Cube", ["ndim"])
+ self.resolve = Resolve()
+ self.resolve.prepared_category = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ self.src_coverage = dict(
+ cube=None,
+ common_items_aux=None,
+ common_items_scalar=None,
+ local_items_aux=None,
+ local_items_scalar=[],
+ dims_common=None,
+ dims_local=[],
+ dims_free=None,
+ )
+ self.tgt_coverage = deepcopy(self.src_coverage)
+ self.src_prepared_item = sentinel.src_prepared_item
+ self.tgt_prepared_item = sentinel.tgt_prepared_item
+ self.m_create_prepared_item = self.patch(
+ "iris.common.resolve.Resolve._create_prepared_item",
+ side_effect=(self.src_prepared_item, self.tgt_prepared_item),
+ )
+ self.src_dims = ()
+ self.tgt_dims = ()
+
+ def test_src_no_local_with_tgt_no_local(self):
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_no_local_with_tgt_no_local__strict(self):
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_no_local_with_tgt_no_local__src_scalar_cube(self):
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_no_local_with_tgt_no_local__src_scalar_cube_strict(self):
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_local_with_tgt_no_local(self):
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_scalar))
+ expected = [self.src_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_scalar)
+ expected = [
+ mock.call(src_coord, self.src_dims, src_metadata=src_metadata)
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_no_local__strict(self):
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_local_with_tgt_no_local__src_scalar_cube(self):
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_scalar))
+ expected = [self.src_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_scalar)
+ expected = [
+ mock.call(src_coord, self.src_dims, src_metadata=src_metadata)
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_no_local__src_scalar_cube_strict(self):
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_no_local_with_tgt_local(self):
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_scalar))
+ expected = [self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_scalar)
+ expected = [
+ mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_no_local_with_tgt_local__strict(self):
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_no_local_with_tgt_local__src_scalar_cube(self):
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(1, len(self.resolve.prepared_category.items_scalar))
+ expected = [self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_scalar)
+ expected = [
+ mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_no_local_with_tgt_local__src_scalar_cube_strict(self):
+ self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,)
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(1, len(self.resolve.prepared_category.items_scalar))
+ expected = [self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_scalar)
+ expected = [
+ mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_local(self):
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_scalar))
+ expected = [self.src_prepared_item, self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_scalar)
+ expected = [
+ mock.call(src_coord, self.src_dims, src_metadata=src_metadata),
+ mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata),
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_local__strict(self):
+ ndim = 2
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+ def test_src_local_with_tgt_local__src_scalar_cube(self):
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_scalar))
+ expected = [self.src_prepared_item, self.tgt_prepared_item]
+ self.assertEqual(expected, self.resolve.prepared_category.items_scalar)
+ expected = [
+ mock.call(src_coord, self.src_dims, src_metadata=src_metadata),
+ mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata),
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_src_local_with_tgt_local__src_scalar_cube_strict(self):
+ ndim = 0
+ self.src_coverage["cube"] = self.Cube(ndim=ndim)
+ src_metadata = sentinel.src_metadata
+ src_coord = sentinel.src_coord
+ src_item = _Item(
+ metadata=src_metadata, coord=src_coord, dims=self.src_dims
+ )
+ self.src_coverage["local_items_scalar"].append(src_item)
+ src_coverage = _AuxCoverage(**self.src_coverage)
+ tgt_metadata = sentinel.tgt_metadata
+ tgt_coord = sentinel.tgt_coord
+ tgt_item = _Item(
+ metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims
+ )
+ self.tgt_coverage["local_items_scalar"].append(tgt_item)
+ tgt_coverage = _AuxCoverage(**self.tgt_coverage)
+ with LENIENT.context(maths=False):
+ self.resolve._prepare_local_payload_scalar(
+ src_coverage, tgt_coverage
+ )
+ self.assertEqual(0, len(self.resolve.prepared_category.items_scalar))
+
+
+class Test__prepare_local_payload(tests.IrisTest):
+ def test(self):
+ src_dim_coverage = sentinel.src_dim_coverage
+ src_aux_coverage = sentinel.src_aux_coverage
+ tgt_dim_coverage = sentinel.tgt_dim_coverage
+ tgt_aux_coverage = sentinel.tgt_aux_coverage
+ root = "iris.common.resolve.Resolve"
+ m_prepare_dim = self.patch(f"{root}._prepare_local_payload_dim")
+ m_prepare_aux = self.patch(f"{root}._prepare_local_payload_aux")
+ m_prepare_scalar = self.patch(f"{root}._prepare_local_payload_scalar")
+ resolve = Resolve()
+ resolve._prepare_local_payload(
+ src_dim_coverage,
+ src_aux_coverage,
+ tgt_dim_coverage,
+ tgt_aux_coverage,
+ )
+ self.assertEqual(1, m_prepare_dim.call_count)
+ expected = [mock.call(src_dim_coverage, tgt_dim_coverage)]
+ self.assertEqual(expected, m_prepare_dim.call_args_list)
+ self.assertEqual(1, m_prepare_aux.call_count)
+ expected = [mock.call(src_aux_coverage, tgt_aux_coverage)]
+ self.assertEqual(expected, m_prepare_aux.call_args_list)
+ self.assertEqual(1, m_prepare_scalar.call_count)
+ expected = [mock.call(src_aux_coverage, tgt_aux_coverage)]
+ self.assertEqual(expected, m_prepare_scalar.call_args_list)
+
+
+class Test__metadata_prepare(tests.IrisTest):
+ def setUp(self):
+ self.src_cube = sentinel.src_cube
+ self.src_category_local = sentinel.src_category_local
+ self.src_dim_coverage = sentinel.src_dim_coverage
+ self.src_aux_coverage = mock.Mock(
+ common_items_aux=sentinel.src_aux_coverage_common_items_aux,
+ common_items_scalar=sentinel.src_aux_coverage_common_items_scalar,
+ )
+ self.tgt_cube = sentinel.tgt_cube
+ self.tgt_category_local = sentinel.tgt_category_local
+ self.tgt_dim_coverage = sentinel.tgt_dim_coverage
+ self.tgt_aux_coverage = mock.Mock(
+ common_items_aux=sentinel.tgt_aux_coverage_common_items_aux,
+ common_items_scalar=sentinel.tgt_aux_coverage_common_items_scalar,
+ )
+ self.resolve = Resolve()
+ root = "iris.common.resolve.Resolve"
+ self.m_prepare_common_dim_payload = self.patch(
+ f"{root}._prepare_common_dim_payload"
+ )
+ self.m_prepare_common_aux_payload = self.patch(
+ f"{root}._prepare_common_aux_payload"
+ )
+ self.m_prepare_local_payload = self.patch(
+ f"{root}._prepare_local_payload"
+ )
+ self.m_prepare_factory_payload = self.patch(
+ f"{root}._prepare_factory_payload"
+ )
+
+ def _check(self):
+ self.assertIsNone(self.resolve.prepared_category)
+ self.assertIsNone(self.resolve.prepared_factories)
+ self.resolve._metadata_prepare()
+ expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[])
+ self.assertEqual(expected, self.resolve.prepared_category)
+ self.assertEqual([], self.resolve.prepared_factories)
+ self.assertEqual(1, self.m_prepare_common_dim_payload.call_count)
+ expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)]
+ self.assertEqual(
+ expected, self.m_prepare_common_dim_payload.call_args_list
+ )
+ self.assertEqual(2, self.m_prepare_common_aux_payload.call_count)
+ expected = [
+ mock.call(
+ self.src_aux_coverage.common_items_aux,
+ self.tgt_aux_coverage.common_items_aux,
+ [],
+ ),
+ mock.call(
+ self.src_aux_coverage.common_items_scalar,
+ self.tgt_aux_coverage.common_items_scalar,
+ [],
+ ignore_mismatch=True,
+ ),
+ ]
+ self.assertEqual(
+ expected, self.m_prepare_common_aux_payload.call_args_list
+ )
+ self.assertEqual(1, self.m_prepare_local_payload.call_count)
+ expected = [
+ mock.call(
+ self.src_dim_coverage,
+ self.src_aux_coverage,
+ self.tgt_dim_coverage,
+ self.tgt_aux_coverage,
+ )
+ ]
+ self.assertEqual(expected, self.m_prepare_local_payload.call_args_list)
+ self.assertEqual(2, self.m_prepare_factory_payload.call_count)
+ expected = [
+ mock.call(self.tgt_cube, self.tgt_category_local, from_src=False),
+ mock.call(self.src_cube, self.src_category_local),
+ ]
+ self.assertEqual(
+ expected, self.m_prepare_factory_payload.call_args_list
+ )
+
+ def test_map_rhs_to_lhs__true(self):
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.rhs_cube = self.src_cube
+ self.resolve.rhs_cube_category_local = self.src_category_local
+ self.resolve.rhs_cube_dim_coverage = self.src_dim_coverage
+ self.resolve.rhs_cube_aux_coverage = self.src_aux_coverage
+ self.resolve.lhs_cube = self.tgt_cube
+ self.resolve.lhs_cube_category_local = self.tgt_category_local
+ self.resolve.lhs_cube_dim_coverage = self.tgt_dim_coverage
+ self.resolve.lhs_cube_aux_coverage = self.tgt_aux_coverage
+ self._check()
+
+ def test_map_rhs_to_lhs__false(self):
+ self.resolve.map_rhs_to_lhs = False
+ self.resolve.lhs_cube = self.src_cube
+ self.resolve.lhs_cube_category_local = self.src_category_local
+ self.resolve.lhs_cube_dim_coverage = self.src_dim_coverage
+ self.resolve.lhs_cube_aux_coverage = self.src_aux_coverage
+ self.resolve.rhs_cube = self.tgt_cube
+ self.resolve.rhs_cube_category_local = self.tgt_category_local
+ self.resolve.rhs_cube_dim_coverage = self.tgt_dim_coverage
+ self.resolve.rhs_cube_aux_coverage = self.tgt_aux_coverage
+ self._check()
+
+
+class Test__prepare_factory_payload(tests.IrisTest):
+ def setUp(self):
+ self.Cube = namedtuple("Cube", ["aux_factories"])
+ self.Coord = namedtuple("Coord", ["metadata"])
+ self.Factory_T1 = namedtuple(
+ "Factory_T1", ["dependencies"]
+ ) # dummy factory type
+ self.container_T1 = type(self.Factory_T1(None))
+ self.Factory_T2 = namedtuple(
+ "Factory_T2", ["dependencies"]
+ ) # dummy factory type
+ self.container_T2 = type(self.Factory_T2(None))
+ self.resolve = Resolve()
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve.prepared_factories = []
+ self.m_get_prepared_item = self.patch(
+ "iris.common.resolve.Resolve._get_prepared_item"
+ )
+ self.category_local = sentinel.category_local
+ self.from_src = sentinel.from_src
+
+ def test_no_factory(self):
+ cube = self.Cube(aux_factories=[])
+ self.resolve._prepare_factory_payload(cube, self.category_local)
+ self.assertEqual(0, len(self.resolve.prepared_factories))
+
+ def test_skip_factory__already_prepared(self):
+ aux_factory = self.Factory_T1(dependencies=None)
+ aux_factories = [aux_factory]
+ cube = self.Cube(aux_factories=aux_factories)
+ prepared_factories = [
+ _PreparedFactory(container=self.container_T1, dependencies=None),
+ _PreparedFactory(container=self.container_T2, dependencies=None),
+ ]
+ self.resolve.prepared_factories.extend(prepared_factories)
+ self.resolve._prepare_factory_payload(cube, self.category_local)
+ self.assertEqual(prepared_factories, self.resolve.prepared_factories)
+
+ def test_factory__dependency_already_prepared(self):
+ coord_a = self.Coord(metadata=sentinel.coord_a_metadata)
+ coord_b = self.Coord(metadata=sentinel.coord_b_metadata)
+ coord_c = self.Coord(metadata=sentinel.coord_c_metadata)
+ side_effect = (coord_a, coord_b, coord_c)
+ self.m_get_prepared_item.side_effect = side_effect
+ dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c)
+ aux_factory = self.Factory_T1(dependencies=dependencies)
+ aux_factories = [aux_factory]
+ cube = self.Cube(aux_factories=aux_factories)
+ self.resolve._prepare_factory_payload(
+ cube, self.category_local, from_src=self.from_src
+ )
+ self.assertEqual(1, len(self.resolve.prepared_factories))
+ prepared_dependencies = {
+ name: coord.metadata for name, coord in dependencies.items()
+ }
+ expected = [
+ _PreparedFactory(
+ container=self.container_T1, dependencies=prepared_dependencies
+ )
+ ]
+ self.assertEqual(expected, self.resolve.prepared_factories)
+ self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count)
+ expected = [
+ mock.call(
+ coord_a.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_b.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_c.metadata, self.category_local, from_src=self.from_src
+ ),
+ ]
+ actual = self.m_get_prepared_item.call_args_list
+ for call in expected:
+ self.assertIn(call, actual)
+
+ def test_factory__dependency_local_not_prepared(self):
+ coord_a = self.Coord(metadata=sentinel.coord_a_metadata)
+ coord_b = self.Coord(metadata=sentinel.coord_b_metadata)
+ coord_c = self.Coord(metadata=sentinel.coord_c_metadata)
+ side_effect = (None, coord_a, None, coord_b, None, coord_c)
+ self.m_get_prepared_item.side_effect = side_effect
+ dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c)
+ aux_factory = self.Factory_T1(dependencies=dependencies)
+ aux_factories = [aux_factory]
+ cube = self.Cube(aux_factories=aux_factories)
+ self.resolve._prepare_factory_payload(
+ cube, self.category_local, from_src=self.from_src
+ )
+ self.assertEqual(1, len(self.resolve.prepared_factories))
+ prepared_dependencies = {
+ name: coord.metadata for name, coord in dependencies.items()
+ }
+ expected = [
+ _PreparedFactory(
+ container=self.container_T1, dependencies=prepared_dependencies
+ )
+ ]
+ self.assertEqual(expected, self.resolve.prepared_factories)
+ self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count)
+ expected = [
+ mock.call(
+ coord_a.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_b.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_c.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_a.metadata,
+ self.category_local,
+ from_src=self.from_src,
+ from_local=True,
+ ),
+ mock.call(
+ coord_b.metadata,
+ self.category_local,
+ from_src=self.from_src,
+ from_local=True,
+ ),
+ mock.call(
+ coord_c.metadata,
+ self.category_local,
+ from_src=self.from_src,
+ from_local=True,
+ ),
+ ]
+ actual = self.m_get_prepared_item.call_args_list
+ for call in expected:
+ self.assertIn(call, actual)
+
+ def test_factory__dependency_not_found(self):
+ coord_a = self.Coord(metadata=sentinel.coord_a_metadata)
+ coord_b = self.Coord(metadata=sentinel.coord_b_metadata)
+ coord_c = self.Coord(metadata=sentinel.coord_c_metadata)
+ side_effect = (None, None)
+ self.m_get_prepared_item.side_effect = side_effect
+ dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c)
+ aux_factory = self.Factory_T1(dependencies=dependencies)
+ aux_factories = [aux_factory]
+ cube = self.Cube(aux_factories=aux_factories)
+ self.resolve._prepare_factory_payload(
+ cube, self.category_local, from_src=self.from_src
+ )
+ self.assertEqual(0, len(self.resolve.prepared_factories))
+ self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count)
+ expected = [
+ mock.call(
+ coord_a.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_b.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_c.metadata, self.category_local, from_src=self.from_src
+ ),
+ mock.call(
+ coord_a.metadata,
+ self.category_local,
+ from_src=self.from_src,
+ from_local=True,
+ ),
+ mock.call(
+ coord_b.metadata,
+ self.category_local,
+ from_src=self.from_src,
+ from_local=True,
+ ),
+ mock.call(
+ coord_c.metadata,
+ self.category_local,
+ from_src=self.from_src,
+ from_local=True,
+ ),
+ ]
+ actual = self.m_get_prepared_item.call_args_list
+ for call in actual:
+ self.assertIn(call, expected)
+
+
+class Test__get_prepared_item(tests.IrisTest):
+ def setUp(self):
+ PreparedItem = namedtuple("PreparedItem", ["metadata"])
+ self.resolve = Resolve()
+ self.prepared_dim_metadata_src = sentinel.prepared_dim_metadata_src
+ self.prepared_dim_metadata_tgt = sentinel.prepared_dim_metadata_tgt
+ self.prepared_items_dim = PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=None,
+ src=self.prepared_dim_metadata_src,
+ tgt=self.prepared_dim_metadata_tgt,
+ )
+ )
+ self.prepared_aux_metadata_src = sentinel.prepared_aux_metadata_src
+ self.prepared_aux_metadata_tgt = sentinel.prepared_aux_metadata_tgt
+ self.prepared_items_aux = PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=None,
+ src=self.prepared_aux_metadata_src,
+ tgt=self.prepared_aux_metadata_tgt,
+ )
+ )
+ self.prepared_scalar_metadata_src = (
+ sentinel.prepared_scalar_metadata_src
+ )
+ self.prepared_scalar_metadata_tgt = (
+ sentinel.prepared_scalar_metadata_tgt
+ )
+ self.prepared_items_scalar = PreparedItem(
+ metadata=_PreparedMetadata(
+ combined=None,
+ src=self.prepared_scalar_metadata_src,
+ tgt=self.prepared_scalar_metadata_tgt,
+ )
+ )
+ self.resolve.prepared_category = _CategoryItems(
+ items_dim=[self.prepared_items_dim],
+ items_aux=[self.prepared_items_aux],
+ items_scalar=[self.prepared_items_scalar],
+ )
+ self.resolve.mapping = {0: 10}
+ self.m_create_prepared_item = self.patch(
+ "iris.common.resolve.Resolve._create_prepared_item"
+ )
+ self.local_dim_metadata = sentinel.local_dim_metadata
+ self.local_aux_metadata = sentinel.local_aux_metadata
+ self.local_scalar_metadata = sentinel.local_scalar_metadata
+ self.local_coord = sentinel.local_coord
+ self.local_coord_dims = (0,)
+ self.local_items_dim = _Item(
+ metadata=self.local_dim_metadata,
+ coord=self.local_coord,
+ dims=self.local_coord_dims,
+ )
+ self.local_items_aux = _Item(
+ metadata=self.local_aux_metadata,
+ coord=self.local_coord,
+ dims=self.local_coord_dims,
+ )
+ self.local_items_scalar = _Item(
+ metadata=self.local_scalar_metadata,
+ coord=self.local_coord,
+ dims=self.local_coord_dims,
+ )
+ self.category_local = _CategoryItems(
+ items_dim=[self.local_items_dim],
+ items_aux=[self.local_items_aux],
+ items_scalar=[self.local_items_scalar],
+ )
+
+ def test_missing_prepared_coord__from_src(self):
+ metadata = sentinel.missing
+ category_local = None
+ result = self.resolve._get_prepared_item(metadata, category_local)
+ self.assertIsNone(result)
+
+ def test_missing_prepared_coord__from_tgt(self):
+ metadata = sentinel.missing
+ category_local = None
+ result = self.resolve._get_prepared_item(
+ metadata, category_local, from_src=False
+ )
+ self.assertIsNone(result)
+
+ def test_get_prepared_dim_coord__from_src(self):
+ metadata = self.prepared_dim_metadata_src
+ category_local = None
+ result = self.resolve._get_prepared_item(metadata, category_local)
+ self.assertEqual(self.prepared_items_dim, result)
+
+ def test_get_prepared_dim_coord__from_tgt(self):
+ metadata = self.prepared_dim_metadata_tgt
+ category_local = None
+ result = self.resolve._get_prepared_item(
+ metadata, category_local, from_src=False
+ )
+ self.assertEqual(self.prepared_items_dim, result)
+
+ def test_get_prepared_aux_coord__from_src(self):
+ metadata = self.prepared_aux_metadata_src
+ category_local = None
+ result = self.resolve._get_prepared_item(metadata, category_local)
+ self.assertEqual(self.prepared_items_aux, result)
+
+ def test_get_prepared_aux_coord__from_tgt(self):
+ metadata = self.prepared_aux_metadata_tgt
+ category_local = None
+ result = self.resolve._get_prepared_item(
+ metadata, category_local, from_src=False
+ )
+ self.assertEqual(self.prepared_items_aux, result)
+
+ def test_get_prepared_scalar_coord__from_src(self):
+ metadata = self.prepared_scalar_metadata_src
+ category_local = None
+ result = self.resolve._get_prepared_item(metadata, category_local)
+ self.assertEqual(self.prepared_items_scalar, result)
+
+ def test_get_prepared_scalar_coord__from_tgt(self):
+ metadata = self.prepared_scalar_metadata_tgt
+ category_local = None
+ result = self.resolve._get_prepared_item(
+ metadata, category_local, from_src=False
+ )
+ self.assertEqual(self.prepared_items_scalar, result)
+
+ def test_missing_local_coord__from_src(self):
+ metadata = sentinel.missing
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_local=True
+ )
+ self.assertIsNone(result)
+
+ def test_missing_local_coord__from_tgt(self):
+ metadata = sentinel.missing
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_src=False, from_local=True
+ )
+ self.assertIsNone(result)
+
+ def test_get_local_dim_coord__from_src(self):
+ created_local_item = sentinel.created_local_item
+ self.m_create_prepared_item.return_value = created_local_item
+ metadata = self.local_dim_metadata
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_local=True
+ )
+ expected = created_local_item
+ self.assertEqual(expected, result)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_dim))
+ self.assertEqual(expected, self.resolve.prepared_category.items_dim[1])
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ dims = (self.resolve.mapping[self.local_coord_dims[0]],)
+ expected = [
+ mock.call(
+ self.local_coord,
+ dims,
+ src_metadata=metadata,
+ tgt_metadata=None,
+ )
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_get_local_dim_coord__from_tgt(self):
+ created_local_item = sentinel.created_local_item
+ self.m_create_prepared_item.return_value = created_local_item
+ metadata = self.local_dim_metadata
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_src=False, from_local=True
+ )
+ expected = created_local_item
+ self.assertEqual(expected, result)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_dim))
+ self.assertEqual(expected, self.resolve.prepared_category.items_dim[1])
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ dims = self.local_coord_dims
+ expected = [
+ mock.call(
+ self.local_coord,
+ dims,
+ src_metadata=None,
+ tgt_metadata=metadata,
+ )
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_get_local_aux_coord__from_src(self):
+ created_local_item = sentinel.created_local_item
+ self.m_create_prepared_item.return_value = created_local_item
+ metadata = self.local_aux_metadata
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_local=True
+ )
+ expected = created_local_item
+ self.assertEqual(expected, result)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_aux))
+ self.assertEqual(expected, self.resolve.prepared_category.items_aux[1])
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ dims = (self.resolve.mapping[self.local_coord_dims[0]],)
+ expected = [
+ mock.call(
+ self.local_coord,
+ dims,
+ src_metadata=metadata,
+ tgt_metadata=None,
+ )
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_get_local_aux_coord__from_tgt(self):
+ created_local_item = sentinel.created_local_item
+ self.m_create_prepared_item.return_value = created_local_item
+ metadata = self.local_aux_metadata
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_src=False, from_local=True
+ )
+ expected = created_local_item
+ self.assertEqual(expected, result)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_aux))
+ self.assertEqual(expected, self.resolve.prepared_category.items_aux[1])
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ dims = self.local_coord_dims
+ expected = [
+ mock.call(
+ self.local_coord,
+ dims,
+ src_metadata=None,
+ tgt_metadata=metadata,
+ )
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_get_local_scalar_coord__from_src(self):
+ created_local_item = sentinel.created_local_item
+ self.m_create_prepared_item.return_value = created_local_item
+ metadata = self.local_scalar_metadata
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_local=True
+ )
+ expected = created_local_item
+ self.assertEqual(expected, result)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_scalar))
+ self.assertEqual(
+ expected, self.resolve.prepared_category.items_scalar[1]
+ )
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ dims = (self.resolve.mapping[self.local_coord_dims[0]],)
+ expected = [
+ mock.call(
+ self.local_coord,
+ dims,
+ src_metadata=metadata,
+ tgt_metadata=None,
+ )
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+ def test_get_local_scalar_coord__from_tgt(self):
+ created_local_item = sentinel.created_local_item
+ self.m_create_prepared_item.return_value = created_local_item
+ metadata = self.local_scalar_metadata
+ result = self.resolve._get_prepared_item(
+ metadata, self.category_local, from_src=False, from_local=True
+ )
+ expected = created_local_item
+ self.assertEqual(expected, result)
+ self.assertEqual(2, len(self.resolve.prepared_category.items_scalar))
+ self.assertEqual(
+ expected, self.resolve.prepared_category.items_scalar[1]
+ )
+ self.assertEqual(1, self.m_create_prepared_item.call_count)
+ dims = self.local_coord_dims
+ expected = [
+ mock.call(
+ self.local_coord,
+ dims,
+ src_metadata=None,
+ tgt_metadata=metadata,
+ )
+ ]
+ self.assertEqual(expected, self.m_create_prepared_item.call_args_list)
+
+
+class Test_cube(tests.IrisTest):
+ def setUp(self):
+ self.shape = (2, 3)
+ self.data = np.zeros(np.multiply(*self.shape), dtype=np.int8).reshape(
+ self.shape
+ )
+ self.bad_data = np.zeros(np.multiply(*self.shape), dtype=np.int8)
+ self.resolve = Resolve()
+ self.resolve.map_rhs_to_lhs = True
+ self.resolve._broadcast_shape = self.shape
+ self.cube_metadata = CubeMetadata(
+ standard_name="air_temperature",
+ long_name="air temp",
+ var_name="airT",
+ units=Unit("K"),
+ attributes={},
+ cell_methods=(),
+ )
+ lhs_cube = Cube(self.data)
+ lhs_cube.metadata = self.cube_metadata
+ self.resolve.lhs_cube = lhs_cube
+ rhs_cube = Cube(self.data)
+ rhs_cube.metadata = self.cube_metadata
+ self.resolve.rhs_cube = rhs_cube
+ self.m_add_dim_coord = self.patch("iris.cube.Cube.add_dim_coord")
+ self.m_add_aux_coord = self.patch("iris.cube.Cube.add_aux_coord")
+ self.m_add_aux_factory = self.patch("iris.cube.Cube.add_aux_factory")
+ self.m_coord = self.patch("iris.cube.Cube.coord")
+ #
+ # prepared coordinates
+ #
+ prepared_category = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ # prepared dim coordinates
+ self.prepared_dim_0_metadata = _PreparedMetadata(
+ combined=sentinel.prepared_dim_0_metadata_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_dim_0_points = sentinel.prepared_dim_0_points
+ self.prepared_dim_0_bounds = sentinel.prepared_dim_0_bounds
+ self.prepared_dim_0_dims = (0,)
+ self.prepared_dim_0_coord = mock.Mock(metadata=None)
+ self.prepared_dim_0_container = mock.Mock(
+ return_value=self.prepared_dim_0_coord
+ )
+ self.prepared_dim_0 = _PreparedItem(
+ metadata=self.prepared_dim_0_metadata,
+ points=self.prepared_dim_0_points,
+ bounds=self.prepared_dim_0_bounds,
+ dims=self.prepared_dim_0_dims,
+ container=self.prepared_dim_0_container,
+ )
+ prepared_category.items_dim.append(self.prepared_dim_0)
+ self.prepared_dim_1_metadata = _PreparedMetadata(
+ combined=sentinel.prepared_dim_1_metadata_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_dim_1_points = sentinel.prepared_dim_1_points
+ self.prepared_dim_1_bounds = sentinel.prepared_dim_1_bounds
+ self.prepared_dim_1_dims = (1,)
+ self.prepared_dim_1_coord = mock.Mock(metadata=None)
+ self.prepared_dim_1_container = mock.Mock(
+ return_value=self.prepared_dim_1_coord
+ )
+ self.prepared_dim_1 = _PreparedItem(
+ metadata=self.prepared_dim_1_metadata,
+ points=self.prepared_dim_1_points,
+ bounds=self.prepared_dim_1_bounds,
+ dims=self.prepared_dim_1_dims,
+ container=self.prepared_dim_1_container,
+ )
+ prepared_category.items_dim.append(self.prepared_dim_1)
+
+ # prepared auxiliary coordinates
+ self.prepared_aux_0_metadata = _PreparedMetadata(
+ combined=sentinel.prepared_aux_0_metadata_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_aux_0_points = sentinel.prepared_aux_0_points
+ self.prepared_aux_0_bounds = sentinel.prepared_aux_0_bounds
+ self.prepared_aux_0_dims = (0,)
+ self.prepared_aux_0_coord = mock.Mock(metadata=None)
+ self.prepared_aux_0_container = mock.Mock(
+ return_value=self.prepared_aux_0_coord
+ )
+ self.prepared_aux_0 = _PreparedItem(
+ metadata=self.prepared_aux_0_metadata,
+ points=self.prepared_aux_0_points,
+ bounds=self.prepared_aux_0_bounds,
+ dims=self.prepared_aux_0_dims,
+ container=self.prepared_aux_0_container,
+ )
+ prepared_category.items_aux.append(self.prepared_aux_0)
+ self.prepared_aux_1_metadata = _PreparedMetadata(
+ combined=sentinel.prepared_aux_1_metadata_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_aux_1_points = sentinel.prepared_aux_1_points
+ self.prepared_aux_1_bounds = sentinel.prepared_aux_1_bounds
+ self.prepared_aux_1_dims = (1,)
+ self.prepared_aux_1_coord = mock.Mock(metadata=None)
+ self.prepared_aux_1_container = mock.Mock(
+ return_value=self.prepared_aux_1_coord
+ )
+ self.prepared_aux_1 = _PreparedItem(
+ metadata=self.prepared_aux_1_metadata,
+ points=self.prepared_aux_1_points,
+ bounds=self.prepared_aux_1_bounds,
+ dims=self.prepared_aux_1_dims,
+ container=self.prepared_aux_1_container,
+ )
+ prepared_category.items_aux.append(self.prepared_aux_1)
+
+ # prepare scalar coordinates
+ self.prepared_scalar_0_metadata = _PreparedMetadata(
+ combined=sentinel.prepared_scalar_0_metadata_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_scalar_0_points = sentinel.prepared_scalar_0_points
+ self.prepared_scalar_0_bounds = sentinel.prepared_scalar_0_bounds
+ self.prepared_scalar_0_dims = ()
+ self.prepared_scalar_0_coord = mock.Mock(metadata=None)
+ self.prepared_scalar_0_container = mock.Mock(
+ return_value=self.prepared_scalar_0_coord
+ )
+ self.prepared_scalar_0 = _PreparedItem(
+ metadata=self.prepared_scalar_0_metadata,
+ points=self.prepared_scalar_0_points,
+ bounds=self.prepared_scalar_0_bounds,
+ dims=self.prepared_scalar_0_dims,
+ container=self.prepared_scalar_0_container,
+ )
+ prepared_category.items_scalar.append(self.prepared_scalar_0)
+ self.prepared_scalar_1_metadata = _PreparedMetadata(
+ combined=sentinel.prepared_scalar_1_metadata_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_scalar_1_points = sentinel.prepared_scalar_1_points
+ self.prepared_scalar_1_bounds = sentinel.prepared_scalar_1_bounds
+ self.prepared_scalar_1_dims = ()
+ self.prepared_scalar_1_coord = mock.Mock(metadata=None)
+ self.prepared_scalar_1_container = mock.Mock(
+ return_value=self.prepared_scalar_1_coord
+ )
+ self.prepared_scalar_1 = _PreparedItem(
+ metadata=self.prepared_scalar_1_metadata,
+ points=self.prepared_scalar_1_points,
+ bounds=self.prepared_scalar_1_bounds,
+ dims=self.prepared_scalar_1_dims,
+ container=self.prepared_scalar_1_container,
+ )
+ prepared_category.items_scalar.append(self.prepared_scalar_1)
+ #
+ # prepared factories
+ #
+ prepared_factories = []
+ self.aux_factory = sentinel.aux_factory
+ self.prepared_factory_container = mock.Mock(
+ return_value=self.aux_factory
+ )
+ self.prepared_factory_metadata_a = _PreparedMetadata(
+ combined=sentinel.prepared_factory_metadata_a_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_factory_metadata_b = _PreparedMetadata(
+ combined=sentinel.prepared_factory_metadata_b_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_factory_metadata_c = _PreparedMetadata(
+ combined=sentinel.prepared_factory_metadata_c_combined,
+ src=None,
+ tgt=None,
+ )
+ self.prepared_factory_dependencies = dict(
+ name_a=self.prepared_factory_metadata_a,
+ name_b=self.prepared_factory_metadata_b,
+ name_c=self.prepared_factory_metadata_c,
+ )
+ self.prepared_factory = _PreparedFactory(
+ container=self.prepared_factory_container,
+ dependencies=self.prepared_factory_dependencies,
+ )
+ prepared_factories.append(self.prepared_factory)
+ self.prepared_factory_side_effect = (
+ sentinel.prepared_factory_coord_a,
+ sentinel.prepared_factory_coord_b,
+ sentinel.prepared_factory_coord_c,
+ )
+ self.m_coord.side_effect = self.prepared_factory_side_effect
+ self.resolve.prepared_category = prepared_category
+ self.resolve.prepared_factories = prepared_factories
+
+ def test_no_resolved_shape(self):
+ self.resolve._broadcast_shape = None
+ data = None
+ emsg = "Cannot resolve resultant cube, as no candidate cubes have been provided"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve.cube(data)
+
+ def test_bad_data_shape(self):
+ emsg = "Cannot resolve resultant cube, as the provided data must have shape"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve.cube(self.bad_data)
+
+ def test_bad_data_shape__inplace(self):
+ self.resolve.lhs_cube = Cube(self.bad_data)
+ emsg = "Cannot resolve resultant cube in-place"
+ with self.assertRaisesRegex(ValueError, emsg):
+ _ = self.resolve.cube(self.data, in_place=True)
+
+ def _check(self):
+ # check dim coordinate 0
+ self.assertEqual(1, self.prepared_dim_0.container.call_count)
+ expected = [
+ mock.call(
+ self.prepared_dim_0_points, bounds=self.prepared_dim_0_bounds
+ )
+ ]
+ self.assertEqual(
+ expected, self.prepared_dim_0.container.call_args_list
+ )
+ self.assertEqual(
+ self.prepared_dim_0_coord.metadata,
+ self.prepared_dim_0_metadata.combined,
+ )
+ # check dim coordinate 1
+ self.assertEqual(1, self.prepared_dim_1.container.call_count)
+ expected = [
+ mock.call(
+ self.prepared_dim_1_points, bounds=self.prepared_dim_1_bounds
+ )
+ ]
+ self.assertEqual(
+ expected, self.prepared_dim_1.container.call_args_list
+ )
+ self.assertEqual(
+ self.prepared_dim_1_coord.metadata,
+ self.prepared_dim_1_metadata.combined,
+ )
+ # check add_dim_coord
+ self.assertEqual(2, self.m_add_dim_coord.call_count)
+ expected = [
+ mock.call(self.prepared_dim_0_coord, self.prepared_dim_0_dims),
+ mock.call(self.prepared_dim_1_coord, self.prepared_dim_1_dims),
+ ]
+ self.assertEqual(expected, self.m_add_dim_coord.call_args_list)
+
+ # check aux coordinate 0
+ self.assertEqual(1, self.prepared_aux_0.container.call_count)
+ expected = [
+ mock.call(
+ self.prepared_aux_0_points, bounds=self.prepared_aux_0_bounds
+ )
+ ]
+ self.assertEqual(
+ expected, self.prepared_aux_0.container.call_args_list
+ )
+ self.assertEqual(
+ self.prepared_aux_0_coord.metadata,
+ self.prepared_aux_0_metadata.combined,
+ )
+ # check aux coordinate 1
+ self.assertEqual(1, self.prepared_aux_1.container.call_count)
+ expected = [
+ mock.call(
+ self.prepared_aux_1_points, bounds=self.prepared_aux_1_bounds
+ )
+ ]
+ self.assertEqual(
+ expected, self.prepared_aux_1.container.call_args_list
+ )
+ self.assertEqual(
+ self.prepared_aux_1_coord.metadata,
+ self.prepared_aux_1_metadata.combined,
+ )
+ # check scalar coordinate 0
+ self.assertEqual(1, self.prepared_scalar_0.container.call_count)
+ expected = [
+ mock.call(
+ self.prepared_scalar_0_points,
+ bounds=self.prepared_scalar_0_bounds,
+ )
+ ]
+ self.assertEqual(
+ expected, self.prepared_scalar_0.container.call_args_list
+ )
+ self.assertEqual(
+ self.prepared_scalar_0_coord.metadata,
+ self.prepared_scalar_0_metadata.combined,
+ )
+ # check scalar coordinate 1
+ self.assertEqual(1, self.prepared_scalar_1.container.call_count)
+ expected = [
+ mock.call(
+ self.prepared_scalar_1_points,
+ bounds=self.prepared_scalar_1_bounds,
+ )
+ ]
+ self.assertEqual(
+ expected, self.prepared_scalar_1.container.call_args_list
+ )
+ self.assertEqual(
+ self.prepared_scalar_1_coord.metadata,
+ self.prepared_scalar_1_metadata.combined,
+ )
+ # check add_aux_coord
+ self.assertEqual(4, self.m_add_aux_coord.call_count)
+ expected = [
+ mock.call(self.prepared_aux_0_coord, self.prepared_aux_0_dims),
+ mock.call(self.prepared_aux_1_coord, self.prepared_aux_1_dims),
+ mock.call(
+ self.prepared_scalar_0_coord, self.prepared_scalar_0_dims
+ ),
+ mock.call(
+ self.prepared_scalar_1_coord, self.prepared_scalar_1_dims
+ ),
+ ]
+ self.assertEqual(expected, self.m_add_aux_coord.call_args_list)
+
+ # check auxiliary factories
+ self.assertEqual(1, self.m_add_aux_factory.call_count)
+ expected = [mock.call(self.aux_factory)]
+ self.assertEqual(expected, self.m_add_aux_factory.call_args_list)
+ self.assertEqual(1, self.prepared_factory_container.call_count)
+ expected = [
+ mock.call(
+ **{
+ name: value
+ for name, value in zip(
+ sorted(self.prepared_factory_dependencies.keys()),
+ self.prepared_factory_side_effect,
+ )
+ }
+ )
+ ]
+ self.assertEqual(
+ expected, self.prepared_factory_container.call_args_list
+ )
+ self.assertEqual(3, self.m_coord.call_count)
+ expected = [
+ mock.call(self.prepared_factory_metadata_a.combined),
+ mock.call(self.prepared_factory_metadata_b.combined),
+ mock.call(self.prepared_factory_metadata_c.combined),
+ ]
+ self.assertEqual(expected, self.m_coord.call_args_list)
+
+ def test_resolve(self):
+ result = self.resolve.cube(self.data)
+ self.assertEqual(self.cube_metadata, result.metadata)
+ self._check()
+ self.assertIsNot(self.resolve.lhs_cube, result)
+
+ def test_resolve__inplace(self):
+ result = self.resolve.cube(self.data, in_place=True)
+ self.assertEqual(self.cube_metadata, result.metadata)
+ self._check()
+ self.assertIs(self.resolve.lhs_cube, result)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py
index 63553ac821..ded401cab3 100644
--- a/lib/iris/tests/unit/cube/test_Cube.py
+++ b/lib/iris/tests/unit/cube/test_Cube.py
@@ -336,6 +336,108 @@ def test_non_lazy_aggregator(self):
self.assertArrayEqual(result.data, np.mean(self.data, axis=1))
+class Test_collapsed__multidim_weighted(tests.IrisTest):
+ def setUp(self):
+ self.data = np.arange(6.0).reshape((2, 3))
+ self.lazydata = as_lazy_data(self.data)
+ # Test cubes wth (same-valued) real and lazy data
+ cube_real = Cube(self.data)
+ for i_dim, name in enumerate(("y", "x")):
+ npts = cube_real.shape[i_dim]
+ coord = DimCoord(np.arange(npts), long_name=name)
+ cube_real.add_dim_coord(coord, i_dim)
+ self.cube_real = cube_real
+ self.cube_lazy = cube_real.copy(data=self.lazydata)
+ # Test weights and expected result for a y-collapse
+ self.y_weights = np.array([0.3, 0.5])
+ self.full_weights_y = np.broadcast_to(
+ self.y_weights.reshape((2, 1)), cube_real.shape
+ )
+ self.expected_result_y = np.array([1.875, 2.875, 3.875])
+ # Test weights and expected result for an x-collapse
+ self.x_weights = np.array([0.7, 0.4, 0.6])
+ self.full_weights_x = np.broadcast_to(
+ self.x_weights.reshape((1, 3)), cube_real.shape
+ )
+ self.expected_result_x = np.array([0.941176, 3.941176])
+
+ def test_weighted_fullweights_real_y(self):
+ # Supplying full-shape weights for collapsing over a single dimension.
+ cube_collapsed = self.cube_real.collapsed(
+ "y", MEAN, weights=self.full_weights_y
+ )
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_y
+ )
+
+ def test_weighted_fullweights_lazy_y(self):
+ # Full-shape weights, lazy data : Check lazy result, same values as real calc.
+ cube_collapsed = self.cube_lazy.collapsed(
+ "y", MEAN, weights=self.full_weights_y
+ )
+ self.assertTrue(cube_collapsed.has_lazy_data())
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_y
+ )
+
+ def test_weighted_1dweights_real_y(self):
+ # 1-D weights, real data : Check same results as full-shape.
+ cube_collapsed = self.cube_real.collapsed(
+ "y", MEAN, weights=self.y_weights
+ )
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_y
+ )
+
+ def test_weighted_1dweights_lazy_y(self):
+ # 1-D weights, lazy data : Check lazy result, same values as real calc.
+ cube_collapsed = self.cube_lazy.collapsed(
+ "y", MEAN, weights=self.y_weights
+ )
+ self.assertTrue(cube_collapsed.has_lazy_data())
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_y
+ )
+
+ def test_weighted_fullweights_real_x(self):
+ # Full weights, real data, ** collapse X ** : as for 'y' case above
+ cube_collapsed = self.cube_real.collapsed(
+ "x", MEAN, weights=self.full_weights_x
+ )
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_x
+ )
+
+ def test_weighted_fullweights_lazy_x(self):
+ # Full weights, lazy data, ** collapse X ** : as for 'y' case above
+ cube_collapsed = self.cube_lazy.collapsed(
+ "x", MEAN, weights=self.full_weights_x
+ )
+ self.assertTrue(cube_collapsed.has_lazy_data())
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_x
+ )
+
+ def test_weighted_1dweights_real_x(self):
+ # 1-D weights, real data, ** collapse X ** : as for 'y' case above
+ cube_collapsed = self.cube_real.collapsed(
+ "x", MEAN, weights=self.x_weights
+ )
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_x
+ )
+
+ def test_weighted_1dweights_lazy_x(self):
+ # 1-D weights, lazy data, ** collapse X ** : as for 'y' case above
+ cube_collapsed = self.cube_lazy.collapsed(
+ "x", MEAN, weights=self.x_weights
+ )
+ self.assertTrue(cube_collapsed.has_lazy_data())
+ self.assertArrayAlmostEqual(
+ cube_collapsed.data, self.expected_result_x
+ )
+
+
class Test_collapsed__cellmeasure_ancils(tests.IrisTest):
def setUp(self):
cube = Cube(np.arange(6.0).reshape((2, 3)))
@@ -484,6 +586,16 @@ def test_ancillary_variable(self):
)
self.assertEqual(cube.summary(), expected_summary)
+ def test_similar_coords(self):
+ coord1 = AuxCoord(
+ 42, long_name="foo", attributes=dict(bar=np.array([2, 5]))
+ )
+ coord2 = coord1.copy()
+ coord2.attributes = dict(bar="baz")
+ for coord in [coord1, coord2]:
+ self.cube.add_aux_coord(coord)
+ self.assertIn("baz", self.cube.summary())
+
class Test_is_compatible(tests.IrisTest):
def setUp(self):
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py
index 48cc9c0d1a..c8f9460e0f 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py
@@ -53,8 +53,44 @@ def test_formula_terms_ap(self):
self.assertEqual(factory.surface_air_pressure, self.ps)
def test_formula_terms_a_p0(self):
- coord_a = DimCoord(np.arange(5), units="Pa")
- coord_p0 = DimCoord(10, units="1")
+ coord_a = DimCoord(np.arange(5), units="1")
+ coord_p0 = DimCoord(10, units="Pa")
+ coord_expected = DimCoord(
+ np.arange(5) * 10,
+ units="Pa",
+ long_name="vertical pressure",
+ var_name="ap",
+ )
+ self.cube_parts["coordinates"].extend(
+ [(coord_a, "a"), (coord_p0, "p0")]
+ )
+ self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0")
+ _load_aux_factory(self.engine, self.cube)
+ # Check cube.coord_dims method.
+ self.assertEqual(self.cube.coord_dims.call_count, 1)
+ args, _ = self.cube.coord_dims.call_args
+ self.assertEqual(len(args), 1)
+ self.assertIs(args[0], coord_a)
+ # Check cube.add_aux_coord method.
+ self.assertEqual(self.cube.add_aux_coord.call_count, 1)
+ args, _ = self.cube.add_aux_coord.call_args
+ self.assertEqual(len(args), 2)
+ self.assertEqual(args[0], coord_expected)
+ self.assertIsInstance(args[1], mock.Mock)
+ # Check cube.add_aux_factory method.
+ self.assertEqual(self.cube.add_aux_factory.call_count, 1)
+ args, _ = self.cube.add_aux_factory.call_args
+ self.assertEqual(len(args), 1)
+ factory = args[0]
+ self.assertEqual(factory.delta, coord_expected)
+ self.assertEqual(factory.sigma, mock.sentinel.b)
+ self.assertEqual(factory.surface_air_pressure, self.ps)
+
+ def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless(
+ self,
+ ):
+ coord_a = DimCoord(np.arange(5), units="unknown")
+ coord_p0 = DimCoord(10, units="Pa")
coord_expected = DimCoord(
np.arange(5) * 10,
units="Pa",
@@ -71,6 +107,7 @@ def test_formula_terms_a_p0(self):
args, _ = self.cube.coord_dims.call_args
self.assertEqual(len(args), 1)
self.assertIs(args[0], coord_a)
+ self.assertEqual("1", args[0].units)
# Check cube.add_aux_coord method.
self.assertEqual(self.cube.add_aux_coord.call_count, 1)
args, _ = self.cube.add_aux_coord.call_args
diff --git a/noxfile.py b/noxfile.py
index cd97e8ef8b..7bfcc73dd7 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -19,7 +19,7 @@
PACKAGE = str("lib" / Path("iris"))
#: Cirrus-CI environment variable hook.
-PY_VER = os.environ.get("PY_VER", "3.7")
+PY_VER = os.environ.get("PY_VER", ["3.6", "3.7"])
#: Default cartopy cache directory.
CARTOPY_CACHE_DIR = os.environ.get("HOME") / Path(".local/share/cartopy")
@@ -41,7 +41,7 @@ def venv_cached(session):
"""
result = False
- yml = Path(f"requirements/ci/py{PY_VER.replace('.', '')}.yml")
+ yml = Path(f"requirements/ci/py{session.python.replace('.', '')}.yml")
tmp_dir = Path(session.create_tmp())
cache = tmp_dir / yml.name
if cache.is_file():
@@ -66,7 +66,7 @@ def cache_venv(session):
A `nox.sessions.Session` object.
"""
- yml = Path(f"requirements/ci/py{PY_VER.replace('.', '')}.yml")
+ yml = Path(f"requirements/ci/py{session.python.replace('.', '')}.yml")
with open(yml, "rb") as fi:
hexdigest = hashlib.sha256(fi.read()).hexdigest()
tmp_dir = Path(session.create_tmp())
@@ -131,7 +131,7 @@ def black(session):
session.run("black", "--check", __file__)
-@nox.session(python=[PY_VER], venv_backend="conda")
+@nox.session(python=PY_VER, venv_backend="conda")
def tests(session):
"""
Perform iris system, integration and unit tests.
@@ -150,7 +150,7 @@ def tests(session):
"""
if not venv_cached(session):
# Determine the conda requirements yaml file.
- fname = f"requirements/ci/py{PY_VER.replace('.', '')}.yml"
+ fname = f"requirements/ci/py{session.python.replace('.', '')}.yml"
# Back-door approach to force nox to use "conda env update".
command = (
"conda",
@@ -164,7 +164,7 @@ def tests(session):
cache_venv(session)
cache_cartopy(session)
- session.run("python", "setup.py", "develop")
+ session.install("--no-deps", "--editable", ".")
session.run(
"python",
"-m",
@@ -174,7 +174,7 @@ def tests(session):
)
-@nox.session(python=[PY_VER], venv_backend="conda")
+@nox.session(python=PY_VER, venv_backend="conda")
def gallery(session):
"""
Perform iris gallery doc-tests.
@@ -193,7 +193,7 @@ def gallery(session):
"""
if not venv_cached(session):
# Determine the conda requirements yaml file.
- fname = f"requirements/ci/py{PY_VER.replace('.', '')}.yml"
+ fname = f"requirements/ci/py{session.python.replace('.', '')}.yml"
# Back-door approach to force nox to use "conda env update".
command = (
"conda",
@@ -207,7 +207,7 @@ def gallery(session):
cache_venv(session)
cache_cartopy(session)
- session.run("python", "setup.py", "develop")
+ session.install("--no-deps", "--editable", ".")
session.run(
"python",
"-m",
@@ -216,7 +216,7 @@ def gallery(session):
)
-@nox.session(python=[PY_VER], venv_backend="conda")
+@nox.session(python=PY_VER, venv_backend="conda")
def doctest(session):
"""
Perform iris doc-tests.
@@ -235,7 +235,7 @@ def doctest(session):
"""
if not venv_cached(session):
# Determine the conda requirements yaml file.
- fname = f"requirements/ci/py{PY_VER.replace('.', '')}.yml"
+ fname = f"requirements/ci/py{session.python.replace('.', '')}.yml"
# Back-door approach to force nox to use "conda env update".
command = (
"conda",
@@ -249,7 +249,7 @@ def doctest(session):
cache_venv(session)
cache_cartopy(session)
- session.run("python", "setup.py", "develop")
+ session.install("--no-deps", "--editable", ".")
session.cd("docs/iris")
session.run(
"make",
@@ -264,7 +264,7 @@ def doctest(session):
)
-@nox.session(python=[PY_VER], venv_backend="conda")
+@nox.session(python=PY_VER, venv_backend="conda")
def linkcheck(session):
"""
Perform iris doc link check.
@@ -283,7 +283,7 @@ def linkcheck(session):
"""
if not venv_cached(session):
# Determine the conda requirements yaml file.
- fname = f"requirements/ci/py{PY_VER.replace('.', '')}.yml"
+ fname = f"requirements/ci/py{session.python.replace('.', '')}.yml"
# Back-door approach to force nox to use "conda env update".
command = (
"conda",
@@ -297,7 +297,7 @@ def linkcheck(session):
cache_venv(session)
cache_cartopy(session)
- session.run("python", "setup.py", "develop")
+ session.install("--no-deps", "--editable", ".")
session.cd("docs/iris")
session.run(
"make",
diff --git a/requirements/ci/py36.yml b/requirements/ci/py36.yml
index 2b40fbad4e..4d9d25d7c6 100644
--- a/requirements/ci/py36.yml
+++ b/requirements/ci/py36.yml
@@ -44,11 +44,8 @@ dependencies:
# Documentation dependencies.
- sphinx
+ - sphinxcontrib-napoleon
- sphinx-copybutton
- sphinx-gallery
+ - sphinx-panels
- sphinx_rtd_theme
- - pip
- - pip:
- - sphinxcontrib-napoleon
- - sphinx-panels
-
diff --git a/requirements/ci/py37.yml b/requirements/ci/py37.yml
index 0f01f0ef75..bdb097796a 100644
--- a/requirements/ci/py37.yml
+++ b/requirements/ci/py37.yml
@@ -44,10 +44,8 @@ dependencies:
# Documentation dependencies.
- sphinx
+ - sphinxcontrib-napoleon
- sphinx-copybutton
- sphinx-gallery
+ - sphinx-panels
- sphinx_rtd_theme
- - pip
- - pip:
- - sphinxcontrib-napoleon
- - sphinx-panels