diff --git a/.cirrus.yml b/.cirrus.yml new file mode 100644 index 00000000000..007bab403e3 --- /dev/null +++ b/.cirrus.yml @@ -0,0 +1,227 @@ +# Reference: +# - https://cirrus-ci.org/guide/writing-tasks/ +# - https://cirrus-ci.org/guide/tips-and-tricks/#sharing-configuration-between-tasks +# - https://cirrus-ci.org/guide/linux/ +# - https://cirrus-ci.org/guide/macOS/ +# - https://cirrus-ci.org/guide/windows/ +# - https://hub.docker.com/_/gcc/ +# - https://hub.docker.com/_/python/ + +# +# Global defaults. +# +container: + image: python:3.8 + cpu: 2 + memory: 4G + + +env: + # Maximum cache period (in weeks) before forcing a new cache upload. + CACHE_PERIOD: "2" + # Increment the build number to force new cartopy cache upload. + CARTOPY_CACHE_BUILD: "0" + # Increment the build number to force new conda cache upload. + CONDA_CACHE_BUILD: "0" + # Increment the build number to force new nox cache upload. + NOX_CACHE_BUILD: "0" + # Increment the build number to force new pip cache upload. + PIP_CACHE_BUILD: "0" + # Pip package to be upgraded/installed. + PIP_CACHE_PACKAGES: "pip setuptools wheel nox" + # Git commit hash for iris test data. + IRIS_TEST_DATA_REF: "fffb9b14b9cb472c5eb2ebb7fd19acb7f6414a30" + # Base directory for the iris-test-data. + IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data + + +# +# Linting +# +lint_task: + auto_cancellation: true + name: "${CIRRUS_OS}: flake8 and black" + pip_cache: + folder: ~/.cache/pip + fingerprint_script: + - echo "${CIRRUS_TASK_NAME}" + - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${PIP_CACHE_BUILD} ${PIP_CACHE_PACKAGES}" + lint_script: + - pip list + - python -m pip install --retries 3 --upgrade ${PIP_CACHE_PACKAGES} + - pip list + - nox --session flake8 + - nox --session black + + +# +# YAML alias for common linux test infra-structure. +# +linux_task_template: &LINUX_TASK_TEMPLATE + auto_cancellation: true + env: + IRIS_REPO_DIR: ${CIRRUS_WORKING_DIR} + PATH: ${HOME}/miniconda/bin:${PATH} + SITE_CFG: ${CIRRUS_WORKING_DIR}/lib/iris/etc/site.cfg + conda_cache: + folder: ${HOME}/miniconda + fingerprint_script: + - wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh + - echo "${CIRRUS_OS} $(sha256sum miniconda.sh)" + - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${CONDA_CACHE_BUILD}" + populate_script: + - bash miniconda.sh -b -p ${HOME}/miniconda + - conda config --set always_yes yes --set changeps1 no + - conda config --set show_channel_urls True + - conda config --add channels conda-forge + - conda update --quiet --name base conda + - conda install --quiet --name base nox pip + cartopy_cache: + folder: ${HOME}/.local/share/cartopy + fingerprint_script: + - echo "${CIRRUS_OS}" + - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${CARTOPY_CACHE_BUILD}" + nox_cache: + folder: ${CIRRUS_WORKING_DIR}/.nox + fingerprint_script: + - echo "${CIRRUS_TASK_NAME}" + - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${NOX_CACHE_BUILD}" + - sha256sum ${CIRRUS_WORKING_DIR}/requirements/ci/py$(echo ${PY_VER} | tr -d ".").yml + + +# +# Testing Minimal (Linux) +# +linux_minimal_task: + matrix: + env: + PY_VER: 3.6 + env: + PY_VER: 3.7 + name: "${CIRRUS_OS}: py${PY_VER} tests (minimal)" + container: + image: gcc:latest + cpu: 2 + memory: 4G + << : *LINUX_TASK_TEMPLATE + tests_script: + - echo "[Resources]" > ${SITE_CFG} + - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} + - nox --session tests -- --verbose + + +# +# Testing Full (Linux) +# +linux_task: + matrix: + env: + PY_VER: 3.6 + env: + PY_VER: 3.7 + name: "${CIRRUS_OS}: py${PY_VER} tests (full)" + container: + image: gcc:latest + cpu: 6 + memory: 8G + data_cache: + folder: ${IRIS_TEST_DATA_DIR} + fingerprint_script: + - echo "${IRIS_TEST_DATA_REF}" + populate_script: + - wget --quiet https://github.com/SciTools/iris-test-data/archive/${IRIS_TEST_DATA_REF}.zip -O iris-test-data.zip + - unzip -q iris-test-data.zip + - mv iris-test-data-$(echo "${IRIS_TEST_DATA_REF}" | sed "s/^v//") ${IRIS_TEST_DATA_DIR} + << : *LINUX_TASK_TEMPLATE + tests_script: + - echo "[Resources]" > ${SITE_CFG} + - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} + - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} + - nox --session tests -- --verbose + + +# +# Testing Documentation Gallery (Linux) +# +gallery_task: + matrix: + env: + PY_VER: 3.6 + env: + PY_VER: 3.7 + name: "${CIRRUS_OS}: py${PY_VER} doc tests (gallery)" + container: + image: gcc:latest + cpu: 2 + memory: 4G + data_cache: + folder: ${IRIS_TEST_DATA_DIR} + fingerprint_script: + - echo "${IRIS_TEST_DATA_REF}" + populate_script: + - wget --quiet https://github.com/SciTools/iris-test-data/archive/${IRIS_TEST_DATA_REF}.zip -O iris-test-data.zip + - unzip -q iris-test-data.zip + - mv iris-test-data-$(echo "${IRIS_TEST_DATA_REF}" | sed "s/^v//") ${IRIS_TEST_DATA_DIR} + << : *LINUX_TASK_TEMPLATE + tests_script: + - echo "[Resources]" > ${SITE_CFG} + - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} + - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} + - nox --session gallery -- --verbose + + +# +# Testing Documentation (Linux) +# +doctest_task: + matrix: + env: + PY_VER: 3.7 + name: "${CIRRUS_OS}: py${PY_VER} doc tests" + container: + image: gcc:latest + cpu: 2 + memory: 4G + env: + MPL_RC_DIR: ${HOME}/.config/matplotlib + MPL_RC_FILE: ${HOME}/.config/matplotlib/matplotlibrc + data_cache: + folder: ${IRIS_TEST_DATA_DIR} + fingerprint_script: + - echo "${IRIS_TEST_DATA_REF}" + populate_script: + - wget --quiet https://github.com/SciTools/iris-test-data/archive/${IRIS_TEST_DATA_REF}.zip -O iris-test-data.zip + - unzip -q iris-test-data.zip + - mv iris-test-data-$(echo "${IRIS_TEST_DATA_REF}" | sed "s/^v//") ${IRIS_TEST_DATA_DIR} + << : *LINUX_TASK_TEMPLATE + tests_script: + - echo "[Resources]" > ${SITE_CFG} + - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} + - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} + - mkdir -p ${MPL_RC_DIR} + - echo "backend : agg" > ${MPL_RC_FILE} + - echo "image.cmap : viridis" >> ${MPL_RC_FILE} + - nox --session doctest -- --verbose + + +# +# Testing Documentation Link Check (Linux) +# +link_task: + matrix: + env: + PY_VER: 3.7 + name: "${CIRRUS_OS}: py${PY_VER} doc link check" + container: + image: gcc:latest + cpu: 2 + memory: 4G + env: + MPL_RC_DIR: ${HOME}/.config/matplotlib + MPL_RC_FILE: ${HOME}/.config/matplotlib/matplotlibrc + << : *LINUX_TASK_TEMPLATE + tests_script: + - mkdir -p ${MPL_RC_DIR} + - echo "backend : agg" > ${MPL_RC_FILE} + - echo "image.cmap : viridis" >> ${MPL_RC_FILE} + - nox --session linkcheck -- --verbose diff --git a/.flake8 b/.flake8 index e313fc2ac5d..807e8c0de14 100644 --- a/.flake8 +++ b/.flake8 @@ -30,7 +30,7 @@ exclude = .eggs, build, compiled_krb, - docs/iris/src/sphinxext/*, + docs/src/sphinxext/*, tools/*, # # ignore auto-generated files diff --git a/.gitignore b/.gitignore index d589c306fea..4a589524d26 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ var sdist develop-eggs .installed.cfg +.nox # Installer logs pip-log.txt @@ -55,11 +56,11 @@ lib/iris/tests/results/imagerepo.lock *.cover # Auto generated documentation files -docs/iris/src/_build/* -docs/iris/src/generated +docs/src/_build/* +docs/src/generated # Example test results -docs/iris/iris_image_test_output/ +docs/iris_image_test_output/ # Created by editiors *~ diff --git a/.readthedocs.yml b/.readthedocs.yml index bfc8cfa72b4..b54b0f065b3 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -7,7 +7,7 @@ conda: environment: requirements/ci/readthedocs.yml sphinx: - configuration: docs/iris/src/conf.py + configuration: docs/src/conf.py fail_on_warning: false python: diff --git a/.stickler.yml b/.stickler.yml deleted file mode 100644 index 6edee0f6a50..00000000000 --- a/.stickler.yml +++ /dev/null @@ -1,4 +0,0 @@ -linters: - flake8: - python: 3 - config: ./.flake8 \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index adbe7774fb5..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,161 +0,0 @@ -# Please update the test data git references below if appropriate. -# -# Note: Contrary to the travis documentation, -# http://about.travis-ci.org/docs/user/languages/python/#Travis-CI-Uses-Isolated-virtualenvs -# we will use conda to give us a much faster setup time. - -language: minimal -dist: xenial - -env: - global: - # The decryption key for the encrypted .github/deploy_key.scitools-docs.enc. - - secure: "N9/qBUT5CqfC7KQBDy5mIWZcGNuUJk3e/qmKJpotWYV+zwOI4GghJsRce6nFnlRiwl65l5oBEcvf3+sBvUfbZqh7U0MdHpw2tHhr2FSCmMB3bkvARZblh9M37f4da9G9VmRkqnyBM5G5TImXtoq4dusvNWKvLW0qETciaipq7ws=" - matrix: -# - PYTHON_VERSION='36' TEST_TARGET='default' TEST_MINIMAL=true -# - PYTHON_VERSION='36' TEST_TARGET='default' TEST_BLACK=true -# - PYTHON_VERSION='36' TEST_TARGET='gallery' -# - PYTHON_VERSION='37' TEST_TARGET='default' TEST_MINIMAL=true -# - PYTHON_VERSION='37' TEST_TARGET='default' TEST_BLACK=true -# - PYTHON_VERSION='37' TEST_TARGET='gallery' -# - PYTHON_VERSION='37' TEST_TARGET='doctest' PUSH_BUILT_DOCS=true - - PYTHON_VERSION='37' TEST_TARGET='checkdocs' - -git: - # We need a deep clone so that we can compute the age of the files using their git history. - depth: 10000 - -# Need to to install enchant as it is used by the pip packatge pyenchant. -# pyenchant is used by sphinxcontrib-spelling (docs spell checker) -before_install: - - sudo apt-get -y install enchant - -install: - - > - export IRIS_TEST_DATA_REF="fffb9b14b9cb472c5eb2ebb7fd19acb7f6414a30"; - export IRIS_TEST_DATA_SUFFIX=$(echo "${IRIS_TEST_DATA_REF}" | sed "s/^v//"); - - # Install miniconda - # ----------------- - - > - echo 'Installing miniconda'; - export CONDA_BASE="https://repo.continuum.io/miniconda/Miniconda"; - wget --quiet ${CONDA_BASE}3-latest-Linux-x86_64.sh -O miniconda.sh; - bash miniconda.sh -b -p ${HOME}/miniconda; - export PATH="${HOME}/miniconda/bin:${PATH}"; - - # Create the testing environment - # ------------------------------ - # Explicitly add defaults channel, see https://github.com/conda/conda/issues/2675 - - > - echo 'Configure conda and create an environment'; - conda config --set always_yes yes --set changeps1 no; - conda config --set show_channel_urls True; - conda config --add channels conda-forge; - conda update --quiet conda; - export ENV_NAME='iris-dev'; - ENV_FILE="requirements/ci/py${PYTHON_VERSION}.yml"; - cat ${ENV_FILE}; - conda env create --quiet --file=${ENV_FILE}; - source activate ${ENV_NAME}; - export PREFIX="${CONDA_PREFIX}"; - - # Output debug info - - > - conda list -n ${ENV_NAME}; - conda list -n ${ENV_NAME} --explicit; - conda info -a; - -# Pre-load Natural Earth data to avoid multiple, overlapping downloads. -# i.e. There should be no DownloadWarning reports in the log. - - python -c 'import cartopy; cartopy.io.shapereader.natural_earth()' - -# iris test data - - > - if [[ "${TEST_MINIMAL}" != true ]]; then - wget --quiet -O iris-test-data.zip https://github.com/SciTools/iris-test-data/archive/${IRIS_TEST_DATA_REF}.zip; - unzip -q iris-test-data.zip; - mv "iris-test-data-${IRIS_TEST_DATA_SUFFIX}" iris-test-data; - fi - -# set config paths - - > - SITE_CFG="lib/iris/etc/site.cfg"; - echo "[Resources]" > ${SITE_CFG}; - echo "test_data_dir = $(pwd)/iris-test-data/test_data" >> ${SITE_CFG}; - echo "doc_dir = $(pwd)/docs/iris" >> ${SITE_CFG}; - echo "[System]" >> ${SITE_CFG}; - echo "udunits2_path = ${PREFIX}/lib/libudunits2.so" >> ${SITE_CFG}; - - - python setup.py --quiet install - -script: - # Capture install-dir: As a test command must be last for get Travis to check - # the RC, so it's best to start each operation with an absolute cd. - - export INSTALL_DIR=$(pwd) - - - > - if [[ "${TEST_BLACK}" == 'true' ]]; then - echo $(black --version); - rm ${INSTALL_DIR}/.gitignore; - black --check ${INSTALL_DIR}; - fi - - - > - if [[ "${TEST_TARGET}" == 'default' ]]; then - export IRIS_REPO_DIR=${INSTALL_DIR}; - python -m iris.tests.runner --default-tests --system-tests; - fi - - - > - if [[ "${TEST_TARGET}" == 'gallery' ]]; then - python -m iris.tests.runner --gallery-tests; - fi - - # Build the docs. - - > - if [[ "${TEST_TARGET}" == 'doctest' ]]; then - MPL_RC_DIR="${HOME}/.config/matplotlib"; - mkdir -p ${MPL_RC_DIR}; - echo 'backend : agg' > ${MPL_RC_DIR}/matplotlibrc; - echo 'image.cmap : viridis' >> ${MPL_RC_DIR}/matplotlibrc; - cd ${INSTALL_DIR}/docs/iris; - make clean html && make doctest; - fi - - # check the links and spelling in the docs - # make clean && make linkcheck && make spelling; - - > - export PYENCHANT_VERBOSE_FIND=1; - python -c 'import enchant'; - - if [[ "${TEST_TARGET}" == 'checkdocs' ]]; then - MPL_RC_DIR="${HOME}/.config/matplotlib"; - mkdir -p ${MPL_RC_DIR}; - echo 'backend : agg' > ${MPL_RC_DIR}/matplotlibrc; - echo 'image.cmap : viridis' >> ${MPL_RC_DIR}/matplotlibrc; - cd ${INSTALL_DIR}/docs/iris; - make spelling; - fi - - # Split the organisation out of the slug. See https://stackoverflow.com/a/5257398/741316 for description. - # NOTE: a *separate* "export" command appears to be necessary here : A command of the - # form "export ORG=.." failed to define ORG for the following command (?!) - - > - ORG=$(echo ${TRAVIS_REPO_SLUG} | cut -d/ -f1); - export ORG - - - echo "Travis job context ORG=${ORG}; TRAVIS_EVENT_TYPE=${TRAVIS_EVENT_TYPE}; PUSH_BUILT_DOCS=${PUSH_BUILT_DOCS}" - - # When we merge a change to SciTools/iris, we can push docs to github pages. - # At present, only the Python 3.7 "doctest" job does this. - # Results appear at https://scitools-docs.github.io/iris/<>/index.html - - if [[ "${ORG}" == 'SciTools' && "${TRAVIS_EVENT_TYPE}" == 'push' && "${PUSH_BUILT_DOCS}" == 'true' ]]; then - cd ${INSTALL_DIR}; - conda install --quiet -n ${ENV_NAME} pip; - pip install doctr; - doctr deploy --deploy-repo SciTools-docs/iris --built-docs docs/iris/src/_build/html - --key-path .github/deploy_key.scitools-docs.enc - --no-require-master - ${TRAVIS_BRANCH:-${TRAVIS_TAG}}; - fi diff --git a/CHANGES b/CHANGES index 2364de84a4e..cdb2b64f846 100644 --- a/CHANGES +++ b/CHANGES @@ -1,5 +1,5 @@ This file is no longer updated and is provided for historical purposes only. -Please see docs/iris/src/whatsnew/ for a changelog. +Please see docs/src/whatsnew/ for a changelog. Release 1.4 (14 June 2013) diff --git a/MANIFEST.in b/MANIFEST.in index 6f6ec445a22..99b801e8270 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -11,7 +11,7 @@ include requirements/*.txt # File required to build docs recursive-include docs Makefile *.js *.png *.py *.rst -prune docs/iris/build +prune docs/build # Files required to build std_names module include tools/generate_std_names.py diff --git a/README.md b/README.md index aeadb52d936..0ceac7e0890 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@

- Iris
+ Iris

@@ -10,13 +10,12 @@

- - -Travis-CI + +Cirrus-CI - Documentation Status +Documentation Status conda-forge downloads @@ -26,9 +25,6 @@ Latest version - -Stable docs Commits since last release diff --git a/docs/iris/Makefile b/docs/Makefile similarity index 100% rename from docs/iris/Makefile rename to docs/Makefile diff --git a/docs/iris/gallery_code/README.rst b/docs/gallery_code/README.rst similarity index 100% rename from docs/iris/gallery_code/README.rst rename to docs/gallery_code/README.rst diff --git a/docs/iris/gallery_code/general/README.rst b/docs/gallery_code/general/README.rst similarity index 100% rename from docs/iris/gallery_code/general/README.rst rename to docs/gallery_code/general/README.rst diff --git a/docs/iris/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py similarity index 98% rename from docs/iris/gallery_code/general/plot_SOI_filtering.py rename to docs/gallery_code/general/plot_SOI_filtering.py index 116e819af7a..d7948ac9651 100644 --- a/docs/iris/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -1,5 +1,5 @@ """ -Applying a filter to a time-series +Applying a Filter to a Time-Series ================================== This example demonstrates low pass filtering a time-series by applying a diff --git a/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py similarity index 98% rename from docs/iris/gallery_code/general/plot_anomaly_log_colouring.py rename to docs/gallery_code/general/plot_anomaly_log_colouring.py index b0cee818de5..778f92db1b8 100644 --- a/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -1,5 +1,5 @@ """ -Colouring anomaly data with logarithmic scaling +Colouring Anomaly Data With Logarithmic Scaling =============================================== In this example, we need to plot anomaly data where the values have a diff --git a/docs/iris/gallery_code/general/plot_coriolis.py b/docs/gallery_code/general/plot_coriolis.py similarity index 98% rename from docs/iris/gallery_code/general/plot_coriolis.py rename to docs/gallery_code/general/plot_coriolis.py index cc67d1267c3..77066d362af 100644 --- a/docs/iris/gallery_code/general/plot_coriolis.py +++ b/docs/gallery_code/general/plot_coriolis.py @@ -1,5 +1,5 @@ """ -Deriving the Coriolis frequency over the globe +Deriving the Coriolis Frequency Over the Globe ============================================== This code computes the Coriolis frequency and stores it in a cube with diff --git a/docs/iris/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py similarity index 98% rename from docs/iris/gallery_code/general/plot_cross_section.py rename to docs/gallery_code/general/plot_cross_section.py index a4bc918fc7b..12f4bdb0dc4 100644 --- a/docs/iris/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -1,5 +1,5 @@ """ -Cross section plots +Cross Section Plots =================== This example demonstrates contour plots of a cross-sectioned multi-dimensional diff --git a/docs/iris/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py similarity index 99% rename from docs/iris/gallery_code/general/plot_custom_aggregation.py rename to docs/gallery_code/general/plot_custom_aggregation.py index 9c847be7798..5fba3669b6e 100644 --- a/docs/iris/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -1,5 +1,5 @@ """ -Calculating a custom statistic +Calculating a Custom Statistic ============================== This example shows how to define and use a custom diff --git a/docs/iris/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py similarity index 99% rename from docs/iris/gallery_code/general/plot_custom_file_loading.py rename to docs/gallery_code/general/plot_custom_file_loading.py index b96e152bf8b..6890650704a 100644 --- a/docs/iris/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -1,5 +1,5 @@ """ -Loading a cube from a custom file format +Loading a Cube From a Custom File Format ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ This example shows how a custom text file can be loaded using the standard Iris diff --git a/docs/iris/gallery_code/general/plot_global_map.py b/docs/gallery_code/general/plot_global_map.py similarity index 96% rename from docs/iris/gallery_code/general/plot_global_map.py rename to docs/gallery_code/general/plot_global_map.py index 41fd2269217..8d2bdee174c 100644 --- a/docs/iris/gallery_code/general/plot_global_map.py +++ b/docs/gallery_code/general/plot_global_map.py @@ -1,5 +1,5 @@ """ -Quickplot of a 2d cube on a map +Quickplot of a 2D Cube on a Map =============================== This example demonstrates a contour plot of global air temperature. The plot diff --git a/docs/iris/gallery_code/general/plot_inset.py b/docs/gallery_code/general/plot_inset.py similarity index 100% rename from docs/iris/gallery_code/general/plot_inset.py rename to docs/gallery_code/general/plot_inset.py diff --git a/docs/iris/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py similarity index 96% rename from docs/iris/gallery_code/general/plot_lineplot_with_legend.py rename to docs/gallery_code/general/plot_lineplot_with_legend.py index 5641b9c4d00..78401817bab 100644 --- a/docs/iris/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -1,5 +1,5 @@ """ -Multi-line temperature profile plot +Multi-Line Temperature Profile Plot ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ """ diff --git a/docs/iris/gallery_code/general/plot_polar_stereo.py b/docs/gallery_code/general/plot_polar_stereo.py similarity index 92% rename from docs/iris/gallery_code/general/plot_polar_stereo.py rename to docs/gallery_code/general/plot_polar_stereo.py index bd4a11923d9..71c0f3b00ec 100644 --- a/docs/iris/gallery_code/general/plot_polar_stereo.py +++ b/docs/gallery_code/general/plot_polar_stereo.py @@ -1,5 +1,5 @@ """ -Example of a polar stereographic plot +Example of a Polar Stereographic Plot ===================================== Demonstrates plotting data that are defined on a polar stereographic diff --git a/docs/iris/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py similarity index 98% rename from docs/iris/gallery_code/general/plot_polynomial_fit.py rename to docs/gallery_code/general/plot_polynomial_fit.py index 237f4044b64..5da5d50571b 100644 --- a/docs/iris/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -1,5 +1,5 @@ """ -Fitting a polynomial +Fitting a Polynomial ==================== This example demonstrates computing a polynomial fit to 1D data from an Iris diff --git a/docs/iris/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py similarity index 99% rename from docs/iris/gallery_code/general/plot_projections_and_annotations.py rename to docs/gallery_code/general/plot_projections_and_annotations.py index e59bb236d7a..f93ac3714fa 100644 --- a/docs/iris/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -1,5 +1,5 @@ """ -Plotting in different projections +Plotting in Different Projections ================================= This example shows how to overlay data and graphics in different projections, diff --git a/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py similarity index 98% rename from docs/iris/gallery_code/general/plot_rotated_pole_mapping.py rename to docs/gallery_code/general/plot_rotated_pole_mapping.py index 063fe93674a..8a0c80c7076 100644 --- a/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -1,5 +1,5 @@ """ -Rotated pole mapping +Rotated Pole Mapping ===================== This example uses several visualisation methods to achieve an array of diff --git a/docs/iris/gallery_code/meteorology/README.rst b/docs/gallery_code/meteorology/README.rst similarity index 100% rename from docs/iris/gallery_code/meteorology/README.rst rename to docs/gallery_code/meteorology/README.rst diff --git a/docs/iris/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py similarity index 99% rename from docs/iris/gallery_code/meteorology/plot_COP_1d.py rename to docs/gallery_code/meteorology/plot_COP_1d.py index 2f93627b77a..bebbad4224a 100644 --- a/docs/iris/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -1,5 +1,5 @@ """ -Global average annual temperature plot +Global Average Annual Temperature Plot ====================================== Produces a time-series plot of North American temperature forecasts for 2 diff --git a/docs/iris/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py similarity index 99% rename from docs/iris/gallery_code/meteorology/plot_COP_maps.py rename to docs/gallery_code/meteorology/plot_COP_maps.py index a8e6055a775..5555a0b85c5 100644 --- a/docs/iris/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -1,5 +1,5 @@ """ -Global average annual temperature maps +Global Average Annual Temperature Maps ====================================== Produces maps of global temperature forecasts from the A1B and E1 scenarios. diff --git a/docs/iris/gallery_code/meteorology/plot_TEC.py b/docs/gallery_code/meteorology/plot_TEC.py similarity index 97% rename from docs/iris/gallery_code/meteorology/plot_TEC.py rename to docs/gallery_code/meteorology/plot_TEC.py index df2e29ef19c..71a743a1612 100644 --- a/docs/iris/gallery_code/meteorology/plot_TEC.py +++ b/docs/gallery_code/meteorology/plot_TEC.py @@ -1,5 +1,5 @@ """ -Ionosphere space weather +Ionosphere Space Weather ======================== This space weather example plots a filled contour of rotated pole point diff --git a/docs/iris/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py similarity index 100% rename from docs/iris/gallery_code/meteorology/plot_deriving_phenomena.py rename to docs/gallery_code/meteorology/plot_deriving_phenomena.py diff --git a/docs/iris/gallery_code/meteorology/plot_hovmoller.py b/docs/gallery_code/meteorology/plot_hovmoller.py similarity index 96% rename from docs/iris/gallery_code/meteorology/plot_hovmoller.py rename to docs/gallery_code/meteorology/plot_hovmoller.py index 9f18b8021e4..e9f8207a940 100644 --- a/docs/iris/gallery_code/meteorology/plot_hovmoller.py +++ b/docs/gallery_code/meteorology/plot_hovmoller.py @@ -1,5 +1,5 @@ """ -Hovmoller diagram of monthly surface temperature +Hovmoller Diagram of Monthly Surface Temperature ================================================ This example demonstrates the creation of a Hovmoller diagram with fine control diff --git a/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py similarity index 56% rename from docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py rename to docs/gallery_code/meteorology/plot_lagged_ensemble.py index cb82a663d49..5cd2752f39b 100644 --- a/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -1,5 +1,5 @@ """ -Seasonal ensemble model plots +Seasonal Ensemble Model Plots ============================= This example demonstrates the loading of a lagged ensemble dataset from the @@ -19,6 +19,7 @@ """ import matplotlib.pyplot as plt +import matplotlib.ticker import numpy as np import iris @@ -32,14 +33,11 @@ def realization_metadata(cube, field, fname): in the cube. """ - # add an ensemble member coordinate if one doesn't already exist + # Add an ensemble member coordinate if one doesn't already exist. if not cube.coords("realization"): - # the ensemble member is encoded in the filename as *_???.pp where ??? - # is the ensemble member + # The ensemble member is encoded in the filename as *_???.pp where ??? + # is the ensemble member. realization_number = fname[-6:-3] - - import iris.coords - realization_coord = iris.coords.AuxCoord( np.int32(realization_number), "realization", units="1" ) @@ -47,11 +45,16 @@ def realization_metadata(cube, field, fname): def main(): - # extract surface temperature cubes which have an ensemble member - # coordinate, adding appropriate lagged ensemble metadata + # Create a constraint to extract surface temperature cubes which have a + # "realization" coordinate. + constraint = iris.Constraint( + "surface_temperature", realization=lambda value: True + ) + # Use this to load our ensemble. The callback ensures all our members + # have the "realization" coordinate and therefore they will all be loaded. surface_temp = iris.load_cube( iris.sample_data_path("GloSea4", "ensemble_???.pp"), - iris.Constraint("surface_temperature", realization=lambda value: True), + constraint, callback=realization_metadata, ) @@ -59,18 +62,19 @@ def main(): # Plot #1: Ensemble postage stamps # ------------------------------------------------------------------------- - # for the purposes of this example, take the last time element of the cube - last_timestep = surface_temp[:, -1, :, :] + # For the purposes of this example, take the last time element of the cube. + # First get hold of the last time by slicing the coordinate. + last_time_coord = surface_temp.coord("time")[-1] + last_timestep = surface_temp.subset(last_time_coord) - # Make 50 evenly spaced levels which span the dataset - contour_levels = np.linspace( - np.min(last_timestep.data), np.max(last_timestep.data), 50 - ) + # Find the maximum and minimum across the dataset. + data_min = np.min(last_timestep.data) + data_max = np.max(last_timestep.data) - # Create a wider than normal figure to support our many plots + # Create a wider than normal figure to support our many plots. plt.figure(figsize=(12, 6), dpi=100) - # Also manually adjust the spacings which are used when creating subplots + # Also manually adjust the spacings which are used when creating subplots. plt.gcf().subplots_adjust( hspace=0.05, wspace=0.05, @@ -80,46 +84,42 @@ def main(): right=0.925, ) - # iterate over all possible latitude longitude slices + # Iterate over all possible latitude longitude slices. for cube in last_timestep.slices(["latitude", "longitude"]): - # get the ensemble member number from the ensemble coordinate + # Get the ensemble member number from the ensemble coordinate. ens_member = cube.coord("realization").points[0] - # plot the data in a 4x4 grid, with each plot's position in the grid - # being determined by ensemble member number the special case for the - # 13th ensemble member is to have the plot at the bottom right + # Plot the data in a 4x4 grid, with each plot's position in the grid + # being determined by ensemble member number. The special case for the + # 13th ensemble member is to have the plot at the bottom right. if ens_member == 13: plt.subplot(4, 4, 16) else: plt.subplot(4, 4, ens_member + 1) - cf = iplt.contourf(cube, contour_levels) + # Plot with 50 evenly spaced contour levels (49 intervals). + cf = iplt.contourf(cube, 49, vmin=data_min, vmax=data_max) - # add coastlines + # Add coastlines. plt.gca().coastlines() - # make an axes to put the shared colorbar in + # Make an axes to put the shared colorbar in. colorbar_axes = plt.gcf().add_axes([0.35, 0.1, 0.3, 0.05]) colorbar = plt.colorbar(cf, colorbar_axes, orientation="horizontal") - colorbar.set_label("%s" % last_timestep.units) - - # limit the colorbar to 8 tick marks - import matplotlib.ticker + colorbar.set_label(last_timestep.units) + # Limit the colorbar to 8 tick marks. colorbar.locator = matplotlib.ticker.MaxNLocator(8) colorbar.update_ticks() - # get the time for the entire plot - time_coord = last_timestep.coord("time") - time = time_coord.units.num2date(time_coord.bounds[0, 0]) + # Get the time for the entire plot. + time = last_time_coord.units.num2date(last_time_coord.bounds[0, 0]) - # set a global title for the postage stamps with the date formated by - # "monthname year" - plt.suptitle( - "Surface temperature ensemble forecasts for %s" - % (time.strftime("%B %Y"),) - ) + # Set a global title for the postage stamps with the date formated by + # "monthname year". + time_string = time.strftime("%B %Y") + plt.suptitle(f"Surface temperature ensemble forecasts for {time_string}") iplt.show() @@ -127,36 +127,25 @@ def main(): # Plot #2: ENSO plumes # ------------------------------------------------------------------------- - # Nino 3.4 lies between: 170W and 120W, 5N and 5S, so define a constraint - # which matches this - nino_3_4_constraint = iris.Constraint( - longitude=lambda v: -170 + 360 <= v <= -120 + 360, - latitude=lambda v: -5 <= v <= 5, + # Nino 3.4 lies between: 170W and 120W, 5N and 5S, so use the intersection + # method to restrict to this region. + nino_cube = surface_temp.intersection( + latitude=[-5, 5], longitude=[-170, -120] ) - nino_cube = surface_temp.extract(nino_3_4_constraint) - - # Subsetting a circular longitude coordinate always results in a circular - # coordinate, so set the coordinate to be non-circular - nino_cube.coord("longitude").circular = False - - # Calculate the horizontal mean for the nino region + # Calculate the horizontal mean for the nino region. mean = nino_cube.collapsed(["latitude", "longitude"], iris.analysis.MEAN) - # Calculate the ensemble mean of the horizontal mean. To do this, remove - # the "forecast_period" and "forecast_reference_time" coordinates which - # span both "relalization" and "time". - mean.remove_coord("forecast_reference_time") - mean.remove_coord("forecast_period") + # Calculate the ensemble mean of the horizontal mean. ensemble_mean = mean.collapsed("realization", iris.analysis.MEAN) - # take the ensemble mean from each ensemble member - mean -= ensemble_mean.data + # Take the ensemble mean from each ensemble member. + mean -= ensemble_mean plt.figure() for ensemble_member in mean.slices(["time"]): - # draw each ensemble member as a dashed line in black + # Draw each ensemble member as a dashed line in black. iplt.plot(ensemble_member, "--k") plt.title("Mean temperature anomaly for ENSO 3.4 region") diff --git a/docs/iris/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py similarity index 95% rename from docs/iris/gallery_code/meteorology/plot_wind_speed.py rename to docs/gallery_code/meteorology/plot_wind_speed.py index 6844d3874cc..79be64ddd74 100644 --- a/docs/iris/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -1,6 +1,6 @@ """ -Plotting wind direction using quiver -=========================================================== +Plotting Wind Direction Using Quiver +==================================== This example demonstrates using quiver to plot wind speed contours and wind direction arrows from wind vector component input data. The vector components diff --git a/docs/iris/gallery_code/oceanography/README.rst b/docs/gallery_code/oceanography/README.rst similarity index 100% rename from docs/iris/gallery_code/oceanography/README.rst rename to docs/gallery_code/oceanography/README.rst diff --git a/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py similarity index 90% rename from docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py rename to docs/gallery_code/oceanography/plot_atlantic_profiles.py index a7e82c34f51..dc038ecffe5 100644 --- a/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -1,5 +1,5 @@ """ -Oceanographic profiles and T-S diagrams +Oceanographic Profiles and T-S Diagrams ======================================= This example demonstrates how to plot vertical profiles of different @@ -39,9 +39,8 @@ def main(): theta_1000m = theta.extract(depth_cons & lon_cons & lat_cons) salinity_1000m = salinity.extract(depth_cons & lon_cons & lat_cons) - # Plot these profiles on the same set of axes. In each case we call plot - # with two arguments, the cube followed by the depth coordinate. Putting - # them in this order places the depth coordinate on the y-axis. + # Plot these profiles on the same set of axes. Depth is automatically + # recognised as a vertical coordinate and placed on the y-axis. # The first plot is in the default axes. We'll use the same color for the # curve and its axes/tick labels. plt.figure(figsize=(5, 6)) @@ -49,7 +48,6 @@ def main(): ax1 = plt.gca() iplt.plot( theta_1000m, - theta_1000m.coord("depth"), linewidth=2, color=temperature_color, alpha=0.75, @@ -65,7 +63,6 @@ def main(): ax2 = plt.gca().twiny() iplt.plot( salinity_1000m, - salinity_1000m.coord("depth"), linewidth=2, color=salinity_color, alpha=0.75, diff --git a/docs/iris/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py similarity index 97% rename from docs/iris/gallery_code/oceanography/plot_load_nemo.py rename to docs/gallery_code/oceanography/plot_load_nemo.py index 5f2b72c956f..c7ad5aaee4a 100644 --- a/docs/iris/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -1,5 +1,5 @@ """ -Load a time series of data from the NEMO model +Load a Time Series of Data From the NEMO Model ============================================== This example demonstrates how to load multiple files containing data output by diff --git a/docs/iris/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py similarity index 100% rename from docs/iris/gallery_code/oceanography/plot_orca_projection.py rename to docs/gallery_code/oceanography/plot_orca_projection.py diff --git a/docs/iris/gallery_tests/__init__.py b/docs/gallery_tests/__init__.py similarity index 100% rename from docs/iris/gallery_tests/__init__.py rename to docs/gallery_tests/__init__.py diff --git a/docs/iris/gallery_tests/gallerytest_util.py b/docs/gallery_tests/gallerytest_util.py similarity index 100% rename from docs/iris/gallery_tests/gallerytest_util.py rename to docs/gallery_tests/gallerytest_util.py diff --git a/docs/iris/gallery_tests/test_plot_COP_1d.py b/docs/gallery_tests/test_plot_COP_1d.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_COP_1d.py rename to docs/gallery_tests/test_plot_COP_1d.py diff --git a/docs/iris/gallery_tests/test_plot_COP_maps.py b/docs/gallery_tests/test_plot_COP_maps.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_COP_maps.py rename to docs/gallery_tests/test_plot_COP_maps.py diff --git a/docs/iris/gallery_tests/test_plot_SOI_filtering.py b/docs/gallery_tests/test_plot_SOI_filtering.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_SOI_filtering.py rename to docs/gallery_tests/test_plot_SOI_filtering.py diff --git a/docs/iris/gallery_tests/test_plot_TEC.py b/docs/gallery_tests/test_plot_TEC.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_TEC.py rename to docs/gallery_tests/test_plot_TEC.py diff --git a/docs/iris/gallery_tests/test_plot_anomaly_log_colouring.py b/docs/gallery_tests/test_plot_anomaly_log_colouring.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_anomaly_log_colouring.py rename to docs/gallery_tests/test_plot_anomaly_log_colouring.py diff --git a/docs/iris/gallery_tests/test_plot_atlantic_profiles.py b/docs/gallery_tests/test_plot_atlantic_profiles.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_atlantic_profiles.py rename to docs/gallery_tests/test_plot_atlantic_profiles.py diff --git a/docs/iris/gallery_tests/test_plot_coriolis.py b/docs/gallery_tests/test_plot_coriolis.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_coriolis.py rename to docs/gallery_tests/test_plot_coriolis.py diff --git a/docs/iris/gallery_tests/test_plot_cross_section.py b/docs/gallery_tests/test_plot_cross_section.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_cross_section.py rename to docs/gallery_tests/test_plot_cross_section.py diff --git a/docs/iris/gallery_tests/test_plot_custom_aggregation.py b/docs/gallery_tests/test_plot_custom_aggregation.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_custom_aggregation.py rename to docs/gallery_tests/test_plot_custom_aggregation.py diff --git a/docs/iris/gallery_tests/test_plot_custom_file_loading.py b/docs/gallery_tests/test_plot_custom_file_loading.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_custom_file_loading.py rename to docs/gallery_tests/test_plot_custom_file_loading.py diff --git a/docs/iris/gallery_tests/test_plot_deriving_phenomena.py b/docs/gallery_tests/test_plot_deriving_phenomena.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_deriving_phenomena.py rename to docs/gallery_tests/test_plot_deriving_phenomena.py diff --git a/docs/iris/gallery_tests/test_plot_global_map.py b/docs/gallery_tests/test_plot_global_map.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_global_map.py rename to docs/gallery_tests/test_plot_global_map.py diff --git a/docs/iris/gallery_tests/test_plot_hovmoller.py b/docs/gallery_tests/test_plot_hovmoller.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_hovmoller.py rename to docs/gallery_tests/test_plot_hovmoller.py diff --git a/docs/iris/gallery_tests/test_plot_inset.py b/docs/gallery_tests/test_plot_inset.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_inset.py rename to docs/gallery_tests/test_plot_inset.py diff --git a/docs/iris/gallery_tests/test_plot_lagged_ensemble.py b/docs/gallery_tests/test_plot_lagged_ensemble.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_lagged_ensemble.py rename to docs/gallery_tests/test_plot_lagged_ensemble.py diff --git a/docs/iris/gallery_tests/test_plot_lineplot_with_legend.py b/docs/gallery_tests/test_plot_lineplot_with_legend.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_lineplot_with_legend.py rename to docs/gallery_tests/test_plot_lineplot_with_legend.py diff --git a/docs/iris/gallery_tests/test_plot_load_nemo.py b/docs/gallery_tests/test_plot_load_nemo.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_load_nemo.py rename to docs/gallery_tests/test_plot_load_nemo.py diff --git a/docs/iris/gallery_tests/test_plot_orca_projection.py b/docs/gallery_tests/test_plot_orca_projection.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_orca_projection.py rename to docs/gallery_tests/test_plot_orca_projection.py diff --git a/docs/iris/gallery_tests/test_plot_polar_stereo.py b/docs/gallery_tests/test_plot_polar_stereo.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_polar_stereo.py rename to docs/gallery_tests/test_plot_polar_stereo.py diff --git a/docs/iris/gallery_tests/test_plot_polynomial_fit.py b/docs/gallery_tests/test_plot_polynomial_fit.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_polynomial_fit.py rename to docs/gallery_tests/test_plot_polynomial_fit.py diff --git a/docs/iris/gallery_tests/test_plot_projections_and_annotations.py b/docs/gallery_tests/test_plot_projections_and_annotations.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_projections_and_annotations.py rename to docs/gallery_tests/test_plot_projections_and_annotations.py diff --git a/docs/iris/gallery_tests/test_plot_rotated_pole_mapping.py b/docs/gallery_tests/test_plot_rotated_pole_mapping.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_rotated_pole_mapping.py rename to docs/gallery_tests/test_plot_rotated_pole_mapping.py diff --git a/docs/iris/gallery_tests/test_plot_wind_speed.py b/docs/gallery_tests/test_plot_wind_speed.py similarity index 100% rename from docs/iris/gallery_tests/test_plot_wind_speed.py rename to docs/gallery_tests/test_plot_wind_speed.py diff --git a/docs/iris/src/developers_guide/ci_checks.png b/docs/iris/src/developers_guide/ci_checks.png deleted file mode 100644 index cf93239dea4..00000000000 Binary files a/docs/iris/src/developers_guide/ci_checks.png and /dev/null differ diff --git a/docs/iris/src/whatsnew/3.0.rst b/docs/iris/src/whatsnew/3.0.rst deleted file mode 100644 index 0caba69de8c..00000000000 --- a/docs/iris/src/whatsnew/3.0.rst +++ /dev/null @@ -1,419 +0,0 @@ -.. include:: ../common_links.inc - -v3.0 (01 Oct 2020) -****************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -📢 Announcements -================ - -* Congratulations to `@bouweandela`_, `@jvegasbsc`_, and `@zklaus`_ who - recently became Iris core developers. They bring a wealth of expertise to the - team, and are using Iris to underpin `ESMValTool`_ - "*A community diagnostic - and performance metrics tool for routine evaluation of Earth system models - in CMIP*". Welcome aboard! 🎉 - - -✨ Features -=========== - -* `@MoseleyS`_ greatly enhanced the :mod:`~iris.fileformats.nimrod` - module to provide richer meta-data translation when loading ``Nimrod`` data - into cubes. This covers most known operational use-cases. (:pull:`3647`) - -* `@stephenworsley`_ improved the handling of - :class:`iris.coords.CellMeasure`\ s in the :class:`~iris.cube.Cube` - statistical operations :meth:`~iris.cube.Cube.collapsed`, - :meth:`~iris.cube.Cube.aggregated_by` and - :meth:`~iris.cube.Cube.rolling_window`. These previously removed every - :class:`~iris.coords.CellMeasure` attached to the cube. Now, a - :class:`~iris.coords.CellMeasure` will only be removed if it is associated - with an axis over which the statistic is being run. (:pull:`3549`) - -* `@stephenworsley`_, `@pp-mo`_ and `@abooton`_ added support for - `CF Ancillary Data`_ variables. These are created as - :class:`iris.coords.AncillaryVariable`, and appear as components of cubes - much like :class:`~iris.coords.AuxCoord`\ s, with the new - :class:`~iris.cube.Cube` methods - :meth:`~iris.cube.Cube.add_ancillary_variable`, - :meth:`~iris.cube.Cube.remove_ancillary_variable`, - :meth:`~iris.cube.Cube.ancillary_variable`, - :meth:`~iris.cube.Cube.ancillary_variables` and - :meth:`~iris.cube.Cube.ancillary_variable_dims`. - They are loaded from and saved to NetCDF-CF files. Special support for - `Quality Flags`_ is also provided, to ensure they load and save with - appropriate units. (:pull:`3800`) - -* `@bouweandela`_ implemented lazy regridding for the - :class:`~iris.analysis.Linear`, :class:`~iris.analysis.Nearest`, and - :class:`~iris.analysis.AreaWeighted` regridding schemes. (:pull:`3701`) - -* `@bjlittle`_ added `logging`_ support within :mod:`iris.analysis.maths`, - :mod:`iris.common.metadata`, and :mod:`iris.common.resolve`. Each module - defines a :class:`logging.Logger` instance called ``logger`` with a default - ``level`` of ``INFO``. To enable ``DEBUG`` logging use - ``logger.setLevel("DEBUG")``. (:pull:`3785`) - -* `@bjlittle`_ added the :mod:`iris.common.resolve` module, which provides - infrastructure to support the analysis, identification and combination - of metadata common between two :class:`~iris.cube.Cube` operands into a - single resultant :class:`~iris.cube.Cube` that will be auto-transposed, - and with the appropriate broadcast shape. (:pull:`3785`) - -* `@bjlittle`_ added the :ref:`common metadata API `, which provides - a unified treatment of metadata across Iris, and allows users to easily - manage and manipulate their metadata in a consistent way. (:pull:`3785`) - -* `@bjlittle`_ added :ref:`lenient metadata ` support, to - allow users to control **strict** or **lenient** metadata equivalence, - difference and combination. (:pull:`3785`) - -* `@bjlittle`_ added :ref:`lenient cube maths ` support and - resolved several long standing major issues with cube arithmetic regarding - a more robust treatment of cube broadcasting, cube dimension auto-transposition, - and preservation of common metadata and coordinates during cube math operations. - Resolves :issue:`1887`, :issue:`2765`, and :issue:`3478`. (:pull:`3785`) - - -🐛 Bugs Fixed -============= - -* `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.remove_coord` to now also - remove derived coordinates by removing aux_factories. (:pull:`3641`) - -* `@jonseddon`_ fixed ``isinstance(cube, collections.Iterable)`` to now behave - as expected if a :class:`~iris.cube.Cube` is iterated over, while also - ensuring that ``TypeError`` is still raised. (Fixed by setting the - ``__iter__()`` method in :class:`~iris.cube.Cube` to ``None``). - (:pull:`3656`) - -* `@stephenworsley`_ enabled cube concatenation along an axis shared by cell - measures; these cell measures are now concatenated together in the resulting - cube. Such a scenario would previously cause concatenation to inappropriately - fail. (:pull:`3566`) - -* `@stephenworsley`_ newly included :class:`~iris.coords.CellMeasure`\ s in - :class:`~iris.cube.Cube` copy operations. Previously copying a - :class:`~iris.cube.Cube` would ignore any attached - :class:`~iris.coords.CellMeasure`. (:pull:`3546`) - -* `@bjlittle`_ set a :class:`~iris.coords.CellMeasure`'s - ``measure`` attribute to have a default value of ``area``. - Previously, the ``measure`` was provided as a keyword argument to - :class:`~iris.coords.CellMeasure` with a default value of ``None``, which - caused a ``TypeError`` when no ``measure`` was provided, since ``area`` or - ``volume`` are the only accepted values. (:pull:`3533`) - -* `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use - `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot - axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh` - did not include this behaviour). (:pull:`3762`) - -* `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to - now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_ - (previously would take the unit from a time coordinate, if present, even - though the coordinate's value had been changed via ``date2num``). - (:pull:`3762`) - -* `@pp-mo`_ newly included attributes of cell measures in NETCDF-CF - file loading; they were previously being discarded. They are now available on - the :class:`~iris.coords.CellMeasure` in the loaded :class:`~iris.cube.Cube`. - (:pull:`3800`) - -* `@pp-mo`_ fixed the netcdf loader to now handle any grid-mapping - variables with missing ``false_easting`` and ``false_northing`` properties, - which was previously failing for some coordinate systems. See :issue:`3629`. - (:pull:`3804`) - -* `@stephenworsley`_ changed the way tick labels are assigned from string coords. - Previously, the first tick label would occasionally be duplicated. This also - removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`) - - -💣 Incompatible Changes -======================= - -* `@pp-mo`_ rationalised :class:`~iris.cube.CubeList` extraction - methods: - - The former method ``iris.cube.CubeList.extract_strict``, and the ``strict`` - keyword of the :meth:`~iris.cube.CubeList.extract` method have been removed, - and are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube` - and :meth:`~iris.cube.CubeList.extract_cubes`. - The new routines perform the same operation, but in a style more like other - ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`. - Unlike ``strict`` extraction, the type of return value is now completely - consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a - :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes` - always returns an :class:`iris.cube.CubeList` of a length equal to the - number of constraints. (:pull:`3715`) - -* `@pp-mo`_ removed the former function - ``iris.analysis.coord_comparison``. (:pull:`3562`) - -* `@bjlittle`_ moved the - :func:`iris.experimental.equalise_cubes.equalise_attributes` function from - the :mod:`iris.experimental` module into the :mod:`iris.util` module. Please - use the :func:`iris.util.equalise_attributes` function instead. - (:pull:`3527`) - -* `@bjlittle`_ removed the module ``iris.experimental.concatenate``. In - ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the - :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the - :func:`iris.experimental.concatenate.concatenate` function raised an - exception. (:pull:`3523`) - -* `@stephenworsley`_ changed Iris objects loaded from NetCDF-CF files to have - ``units='unknown'`` where the corresponding NetCDF variable has no ``units`` - property. Previously these cases defaulted to ``units='1'``. - This affects loading of coordinates whose file variable has no "units" - attribute (not valid, under `CF units rules`_): These will now have units - of `"unknown"`, rather than `"1"`, which **may prevent the creation of - a hybrid vertical coordinate**. While these cases used to "work", this was - never really correct behaviour. (:pull:`3795`) - -* `@SimonPeatman`_ added attribute ``var_name`` to coordinates created by the - :func:`iris.analysis.trajectory.interpolate` function. This prevents - duplicate coordinate errors in certain circumstances. (:pull:`3718`) - -* `@bjlittle`_ aligned the :func:`iris.analysis.maths.apply_ufunc` with the - rest of the :mod:`iris.analysis.maths` API by changing its keyword argument - from ``other_cube`` to ``other``. (:pull:`3785`) - -* `@bjlittle`_ changed the :meth:`iris.analysis.maths.IFunc.__call__` to ignore - any surplus ``other`` keyword argument for a ``data_func`` that requires - **only one** argument. This aligns the behaviour of - :meth:`iris.analysis.maths.IFunc.__call__` with - :func:`~iris.analysis.maths.apply_ufunc`. Previously a ``ValueError`` - exception was raised. (:pull:`3785`) - - -🔥 Deprecations -=============== - -* `@stephenworsley`_ removed the deprecated :class:`iris.Future` flags - ``cell_date_time_objects``, ``netcdf_promote``, ``netcdf_no_unlimited`` and - ``clip_latitudes``. (:pull:`3459`) - -* `@stephenworsley`_ changed :attr:`iris.fileformats.pp.PPField.lbproc` to be an - ``int``. The deprecated attributes ``flag1``, ``flag2`` etc. have been - removed from it. (:pull:`3461`) - -* `@bjlittle`_ deprecated :func:`~iris.util.as_compatible_shape` in preference - for :class:`~iris.common.resolve.Resolve` e.g., ``Resolve(src, tgt)(tgt.core_data())``. - The :func:`~iris.util.as_compatible_shape` function will be removed in a future - release of Iris. (:pull:`3892`) - - -🔗 Dependencies -=============== - -* `@stephenworsley`_, `@trexfeathers`_ and `@bjlittle`_ removed ``Python2`` - support, modernising the codebase by switching to exclusive ``Python3`` - support. (:pull:`3513`) - -* `@bjlittle`_ improved the developer set up process. Configuring Iris and - :ref:`installing_from_source` as a developer with all the required package - dependencies is now easier with our curated conda environment YAML files. - (:pull:`3812`) - -* `@stephenworsley`_ pinned Iris to require `Dask`_ ``>=2.0``. (:pull:`3460`) - -* `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require - `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version - of `Matplotlib`_. (:pull:`3762`) - -* `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_. - Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in - pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer - necessary now that ``Python2`` support has been dropped. (:pull:`3468`) - -* `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version - of `Proj`_. (:pull:`3762`) - -* `@stephenworsley`_ and `@trexfeathers`_ removed GDAL from the extensions - dependency group. We no longer consider it to be an extension. (:pull:`3762`) - - -📚 Documentation -================ - -* `@tkknight`_ moved the - :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py` - from the general part of the gallery to oceanography. (:pull:`3761`) - -* `@tkknight`_ updated documentation to use a modern sphinx theme and be - served from https://scitools-iris.readthedocs.io/en/latest/. (:pull:`3752`) - -* `@bjlittle`_ added support for the `black`_ code formatter. This is - now automatically checked on GitHub PRs, replacing the older, unittest-based - ``iris.tests.test_coding_standards.TestCodeFormat``. Black provides automatic - code format correction for most IDEs. See the new developer guide section on - :ref:`code_formatting`. (:pull:`3518`) - -* `@tkknight`_ and `@trexfeathers`_ refreshed the :ref:`whats_new_contributions` - for the :ref:`iris_whatsnew`. This includes always creating the ``latest`` - what's new page so it appears on the latest documentation at - https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves - :issue:`2104`, :issue:`3451`, :issue:`3818`, :issue:`3837`. Also updated the - :ref:`iris_development_releases_steps` to follow when making a release. - (:pull:`3769`, :pull:`3838`, :pull:`3843`) - -* `@tkknight`_ enabled the PDF creation of the documentation on the - `Read the Docs`_ service. The PDF may be accessed by clicking on the version - at the bottom of the side bar, then selecting ``PDF`` from the ``Downloads`` - section. (:pull:`3765`) - -* `@stephenworsley`_ added a warning to the - :func:`iris.analysis.cartography.project` function regarding its behaviour on - projections with non-rectangular boundaries. (:pull:`3762`) - -* `@stephenworsley`_ added the :ref:`cube_maths_combining_units` section to the - user guide to clarify how ``Units`` are handled during cube arithmetic. - (:pull:`3803`) - -* `@tkknight`_ overhauled the :ref:`developers_guide` including information on - getting involved in becoming a contributor and general structure of the - guide. This resolves :issue:`2170`, :issue:`2331`, :issue:`3453`, - :issue:`314`, :issue:`2902`. (:pull:`3852`) - -* `@rcomer`_ added argument descriptions to the :class:`~iris.coords.DimCoord` - docstring. (:pull:`3681`) - -* `@tkknight`_ added two url's to be ignored for the ``make linkcheck``. This - will ensure the Iris github project is not repeatedly hit during the - linkcheck for issues and pull requests as it can result in connection - refused and thus travis-ci_ job failures. For more information on linkcheck, - see :ref:`contributing.documentation.testing`. (:pull:`3873`) - -* `@tkknight`_ enabled the napolean_ package that is used by sphinx_ to cater - for the existing google style docstrings and to also allow for `numpy`_ - docstrings. This resolves :issue:`3841`. (:pull:`3871`) - -* `@tkknight`_ configured ``sphinx-build`` to promote warnings to errors when - building the documentation via ``make html``. This will minimise technical - debt accruing for the documentation. (:pull:`3877`) - -* `@tkknight`_ updated :ref:`installing_iris` to include a reference to - Windows Subsystem for Linux. (:pull:`3885`) - -* `@tkknight`_ updated the :ref:`iris_docs` homepage to include panels so the - links are more visible to users. This uses the sphinx-panels_ extension. - (:pull:`3884`) - -* `@bjlittle`_ created the :ref:`Further topics ` section and - included documentation for :ref:`metadata`, :ref:`lenient metadata`, and - :ref:`lenient maths`. (:pull:`3890`) - - -💼 Internal -=========== - -* `@pp-mo`_ and `@lbdreyer`_ removed all Iris test dependencies on `iris-grib`_ - by transferring all relevant content to the `iris-grib`_ repository. (:pull:`3662`, - :pull:`3663`, :pull:`3664`, :pull:`3665`, :pull:`3666`, :pull:`3669`, - :pull:`3670`, :pull:`3671`, :pull:`3672`, :pull:`3742`, :pull:`3746`) - -* `@lbdreyer`_ and `@pp-mo`_ overhauled the handling of dimensional - metadata to remove duplication. (:pull:`3422`, :pull:`3551`) - -* `@trexfeathers`_ simplified the standard license header for all files, which - removes the need to repeatedly update year numbers in the header. - (:pull:`3489`) - -* `@stephenworsley`_ changed the numerical values in tests involving the - Robinson projection due to improvements made in - `Proj`_. (:pull:`3762`) (see also `Proj#1292`_ and `Proj#2151`_) - -* `@stephenworsley`_ changed tests to account for more detailed descriptions of - projections in `GDAL`_. (:pull:`3762`) (see also `GDAL#1185`_) - -* `@stephenworsley`_ changed tests to account for `GDAL`_ now saving fill values - for data without masked points. (:pull:`3762`) - -* `@trexfeathers`_ changed every graphics test that includes `Cartopy's coastlines`_ - to account for new adaptive coastline scaling. (:pull:`3762`) - (see also `Cartopy#1105`_) - -* `@trexfeathers`_ changed graphics tests to account for some new default - grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_) - -* `@trexfeathers`_ added additional acceptable graphics test targets to account - for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and - axes borders). (:pull:`3762`) - -* `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore - `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``. - (:pull:`3846`) - -* `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``. - (:pull:`3866`) - -* `@lbdreyer`_ updated the CF standard name table to the latest version: `v75`_. - (:pull:`3867`) - -* `@bjlittle`_ added :pep:`517` and :pep:`518` support for building and - installing Iris, in particular to handle the `PyKE`_ package dependency. - (:pull:`3812`) - -* `@bjlittle`_ added metadata support for comparing :attr:`~iris.cube.Cube.attributes` - dictionaries that contain `numpy`_ arrays using `xxHash`_, an extremely fast - non-cryptographic hash algorithm, running at RAM speed limits. - -* `@bjlittle`_ added the ``iris.tests.assertDictEqual`` method to override - :meth:`unittest.TestCase.assertDictEqual` in order to cope with testing - metadata :attr:`~iris.cube.Cube.attributes` dictionary comparison where - the value of a key may be a `numpy`_ array. (:pull:`3785`) - -* `@bjlittle`_ added the :func:`~iris.config.get_logger` function for creating - a generic :class:`logging.Logger` with a :class:`logging.StreamHandler` and - custom :class:`logging.Formatter`. (:pull:`3785`) - -* `@owena11`_ identified and optimised a bottleneck in ``FieldsFile`` header - loading due to the use of :func:`numpy.fromfile`. (:pull:`3791`) - - -.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ -.. _Matplotlib: https://matplotlib.org/ -.. _CF units rules: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#units -.. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data -.. _Quality Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags -.. _iris-grib: https://github.com/SciTools/iris-grib -.. _Cartopy: https://github.com/SciTools/cartopy -.. _Cartopy's coastlines: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html?highlight=coastlines#cartopy.mpl.geoaxes.GeoAxes.coastlines -.. _Cartopy#1105: https://github.com/SciTools/cartopy/pull/1105 -.. _Cartopy#1117: https://github.com/SciTools/cartopy/pull/1117 -.. _Dask: https://github.com/dask/dask -.. _matplotlib.dates.date2num: https://matplotlib.org/api/dates_api.html#matplotlib.dates.date2num -.. _Proj: https://github.com/OSGeo/PROJ -.. _black: https://black.readthedocs.io/en/stable/ -.. _Proj#1292: https://github.com/OSGeo/PROJ/pull/1292 -.. _Proj#2151: https://github.com/OSGeo/PROJ/pull/2151 -.. _GDAL: https://github.com/OSGeo/gdal -.. _GDAL#1185: https://github.com/OSGeo/gdal/pull/1185 -.. _@MoseleyS: https://github.com/MoseleyS -.. _@stephenworsley: https://github.com/stephenworsley -.. _@pp-mo: https://github.com/pp-mo -.. _@abooton: https://github.com/abooton -.. _@bouweandela: https://github.com/bouweandela -.. _@bjlittle: https://github.com/bjlittle -.. _@trexfeathers: https://github.com/trexfeathers -.. _@jonseddon: https://github.com/jonseddon -.. _@tkknight: https://github.com/tkknight -.. _@lbdreyer: https://github.com/lbdreyer -.. _@SimonPeatman: https://github.com/SimonPeatman -.. _@rcomer: https://github.com/rcomer -.. _@jvegasbsc: https://github.com/jvegasbsc -.. _@zklaus: https://github.com/zklaus -.. _ESMValTool: https://github.com/ESMValGroup/ESMValTool -.. _v75: https://cfconventions.org/Data/cf-standard-names/75/build/cf-standard-name-table.html -.. _sphinx-panels: https://sphinx-panels.readthedocs.io/en/latest/ -.. _logging: https://docs.python.org/3/library/logging.html -.. _numpy: https://github.com/numpy/numpy -.. _xxHash: https://github.com/Cyan4973/xxHash -.. _PyKE: https://pypi.org/project/scitools-pyke/ -.. _matplotlib.rcdefaults: https://matplotlib.org/3.1.1/api/matplotlib_configuration_api.html?highlight=rcdefaults#matplotlib.rcdefaults -.. _@owena11: https://github.com/owena11 diff --git a/docs/iris/src/whatsnew/latest.rst b/docs/iris/src/whatsnew/latest.rst deleted file mode 100644 index 67518e539a5..00000000000 --- a/docs/iris/src/whatsnew/latest.rst +++ /dev/null @@ -1,55 +0,0 @@ -.. include:: ../common_links.inc - - -************ - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -📢 Announcements -================ - -* N/A - - -✨ Features -=========== - -* N/A - - -🐛 Bugs Fixed -============= - -* N/A - - -💣 Incompatible Changes -======================= - -* N/A - - -🔥 Deprecations -=============== - -* N/A - - -🔗 Dependencies -=============== - -* N/A - - -📚 Documentation -================ - -* N/A - - -💼 Internal -=========== - -* N/A diff --git a/docs/iris/src/whatsnew/latest.rst.template b/docs/iris/src/whatsnew/latest.rst.template deleted file mode 100644 index 67518e539a5..00000000000 --- a/docs/iris/src/whatsnew/latest.rst.template +++ /dev/null @@ -1,55 +0,0 @@ -.. include:: ../common_links.inc - - -************ - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -📢 Announcements -================ - -* N/A - - -✨ Features -=========== - -* N/A - - -🐛 Bugs Fixed -============= - -* N/A - - -💣 Incompatible Changes -======================= - -* N/A - - -🔥 Deprecations -=============== - -* N/A - - -🔗 Dependencies -=============== - -* N/A - - -📚 Documentation -================ - -* N/A - - -💼 Internal -=========== - -* N/A diff --git a/docs/iris/src/IEP/IEP001.adoc b/docs/src/IEP/IEP001.adoc similarity index 100% rename from docs/iris/src/IEP/IEP001.adoc rename to docs/src/IEP/IEP001.adoc diff --git a/docs/iris/src/Makefile b/docs/src/Makefile similarity index 100% rename from docs/iris/src/Makefile rename to docs/src/Makefile diff --git a/docs/iris/src/_static/Iris7_1_trim_100.png b/docs/src/_static/Iris7_1_trim_100.png similarity index 100% rename from docs/iris/src/_static/Iris7_1_trim_100.png rename to docs/src/_static/Iris7_1_trim_100.png diff --git a/docs/iris/src/_static/Iris7_1_trim_full.png b/docs/src/_static/Iris7_1_trim_full.png similarity index 100% rename from docs/iris/src/_static/Iris7_1_trim_full.png rename to docs/src/_static/Iris7_1_trim_full.png diff --git a/docs/iris/src/_static/favicon.ico b/docs/src/_static/favicon.ico similarity index 100% rename from docs/iris/src/_static/favicon.ico rename to docs/src/_static/favicon.ico diff --git a/docs/iris/src/_static/iris-logo-title.png b/docs/src/_static/iris-logo-title.png similarity index 100% rename from docs/iris/src/_static/iris-logo-title.png rename to docs/src/_static/iris-logo-title.png diff --git a/docs/iris/src/_static/iris-logo-title.svg b/docs/src/_static/iris-logo-title.svg similarity index 100% rename from docs/iris/src/_static/iris-logo-title.svg rename to docs/src/_static/iris-logo-title.svg diff --git a/docs/iris/src/_static/theme_override.css b/docs/src/_static/theme_override.css similarity index 100% rename from docs/iris/src/_static/theme_override.css rename to docs/src/_static/theme_override.css diff --git a/docs/src/_templates/footer.html b/docs/src/_templates/footer.html new file mode 100644 index 00000000000..1d5fb08b789 --- /dev/null +++ b/docs/src/_templates/footer.html @@ -0,0 +1,5 @@ +{% extends "!footer.html" %} +{% block extrafooter %} + Built using Python {{ python_version }}. + {{ super() }} +{% endblock %} diff --git a/docs/iris/src/_templates/layout.html b/docs/src/_templates/layout.html similarity index 100% rename from docs/iris/src/_templates/layout.html rename to docs/src/_templates/layout.html diff --git a/docs/iris/src/common_links.inc b/docs/src/common_links.inc similarity index 55% rename from docs/iris/src/common_links.inc rename to docs/src/common_links.inc index 94c2f3c92bd..9f6a57f5294 100644 --- a/docs/iris/src/common_links.inc +++ b/docs/src/common_links.inc @@ -1,27 +1,58 @@ -.. _SciTools: https://github.com/SciTools +.. comment + Common resources in alphabetical order: + +.. _.cirrus.yml: https://github.com/SciTools/iris/blob/master/.cirrus.yml +.. _.flake8.yml: https://github.com/SciTools/iris/blob/master/.flake8 +.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris +.. _conda: https://docs.conda.io/en/latest/ +.. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json +.. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json +.. _generating sss keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account +.. _GitHub Help Documentation: https://docs.github.com/en/github .. _Iris: https://github.com/SciTools/iris .. _Iris GitHub: https://github.com/SciTools/iris .. _iris mailing list: https://groups.google.com/forum/#!forum/scitools-iris +.. _iris-sample-data: https://github.com/SciTools/iris-sample-data +.. _iris-test-data: https://github.com/SciTools/iris-test-data .. _issue: https://github.com/SciTools/iris/issues .. _issues: https://github.com/SciTools/iris/issues +.. _legacy documentation: https://scitools.org.uk/iris/docs/v2.4.0/ +.. _matplotlib: https://matplotlib.org/ +.. _napolean: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html +.. _New Issue: https://github.com/scitools/iris/issues/new/choose .. _pull request: https://github.com/SciTools/iris/pulls .. _pull requests: https://github.com/SciTools/iris/pulls -.. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json -.. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json -.. _iris-test-data: https://github.com/SciTools/iris-test-data -.. _iris-sample-data: https://github.com/SciTools/iris-sample-data -.. _test-iris-imagehash: https://github.com/SciTools/test-iris-imagehash .. _readthedocs.yml: https://github.com/SciTools/iris/blob/master/requirements/ci/readthedocs.yml -.. _travis-ci: https://travis-ci.org/github/SciTools/iris -.. _.travis.yml: https://github.com/SciTools/iris/blob/master/.travis.yml -.. _.stickler.yml: https://github.com/SciTools/iris/blob/master/.stickler.yml -.. _.flake8.yml: https://github.com/SciTools/iris/blob/master/.flake8 -.. _GitHub Help Documentation: https://docs.github.com/en/github -.. _using git: https://docs.github.com/en/github/using-git -.. _generating sss keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account -.. _New Issue: https://github.com/scitools/iris/issues/new/choose -.. _matplotlib: https://matplotlib.org/ -.. _conda: https://docs.conda.io/en/latest/ +.. _SciTools: https://github.com/SciTools .. _sphinx: https://www.sphinx-doc.org/en/master/ -.. _napolean: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html -.. _legacy documentation: https://scitools.org.uk/iris/docs/v2.4.0/ +.. _test-iris-imagehash: https://github.com/SciTools/test-iris-imagehash +.. _using git: https://docs.github.com/en/github/using-git + + +.. comment + Core developers (@github names) in alphabetical order: + +.. _@abooton: https://github.com/abooton +.. _@alastair-gemmell: https://github.com/alastair-gemmell +.. _@ajdawson: https://github.com/ajdawson +.. _@bjlittle: https://github.com/bjlittle +.. _@bouweandela: https://github.com/bouweandela +.. _@corinnebosley: https://github.com/corinnebosley +.. _@cpelley: https://github.com/cpelley +.. _@djkirkham: https://github.com/djkirkham +.. _@DPeterK: https://github.com/DPeterK +.. _@esc24: https://github.com/esc24 +.. _@jamesp: https://github.com/jamesp +.. _@jonseddon: https://github.com/jonseddon +.. _@jvegasbsc: https://github.com/jvegasbsc +.. _@lbdreyer: https://github.com/lbdreyer +.. _@marqh: https://github.com/marqh +.. _@pelson: https://github.com/pelson +.. _@pp-mo: https://github.com/pp-mo +.. _@QuLogic: https://github.com/QuLogic +.. _@rcomer: https://github.com/rcomer +.. _@rhattersley: https://github.com/rhattersley +.. _@stephenworsley: https://github.com/stephenworsley +.. _@tkknight: https://github.com/tkknight +.. _@trexfeathers: https://github.com/trexfeathers +.. _@zklaus: https://github.com/zklaus diff --git a/docs/iris/src/conf.py b/docs/src/conf.py similarity index 89% rename from docs/iris/src/conf.py rename to docs/src/conf.py index d564d153a13..960035213a5 100644 --- a/docs/iris/src/conf.py +++ b/docs/src/conf.py @@ -43,6 +43,7 @@ def autolog(message): for item, value in os.environ.items(): autolog("[READTHEDOCS] {} = {}".format(item, value)) + # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, @@ -68,8 +69,8 @@ def autolog(message): # define the copyright information for latex builds. Note, for html builds, # the copyright exists directly inside "_templates/layout.html" -upper_copy_year = datetime.datetime.now().year -copyright = "Iris Contributors" +copyright_years = f"2010 - {datetime.datetime.now().year}" +copyright = f"{copyright_years}, Iris Contributors" author = "Iris Developers" # The version info for the project you're documenting, acts as replacement for @@ -82,8 +83,8 @@ def autolog(message): if iris.__version__ == "dev": version = "dev" else: - # major.feature(.minor)-dev -> major.minor - version = ".".join(iris.__version__.split("-")[0].split(".")[:2]) + # major.minor.patch-dev -> major.minor.patch + version = ".".join(iris.__version__.split("-")[0].split(".")[:3]) # The full version, including alpha/beta/rc tags. release = iris.__version__ @@ -92,13 +93,17 @@ def autolog(message): # -- General configuration --------------------------------------------------- -# Create a variable that can be insterted in the rst "|copyright_years|". -# You can add more vairables here if needed -rst_epilog = """ -.. |copyright_years| replace:: {year_range} -""".format( - year_range="2010 - {}".format(upper_copy_year) -) +# Create a variable that can be inserted in the rst "|copyright_years|". +# You can add more variables here if needed. + +build_python_version = ".".join([str(i) for i in sys.version_info[:3]]) + +rst_epilog = f""" +.. |copyright_years| replace:: {copyright_years} +.. |python_version| replace:: {build_python_version} +.. |iris_version| replace:: v{version} +.. |build_date| replace:: ({datetime.datetime.now().strftime('%d %b %Y')}) +""" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -224,14 +229,15 @@ def autolog(message): } html_context = { - "copyright_years": "2010 - {}".format(upper_copy_year), + "copyright_years": copyright_years, + "python_version": build_python_version, # menu_links and menu_links_name are used in _templates/layout.html # to include some nice icons. See http://fontawesome.io for a list of # icons (used in the sphinx_rtd_theme) "menu_links_name": "Support", "menu_links": [ ( - ' Source code', + ' Source Code', "https://github.com/SciTools/iris", ), ( @@ -243,11 +249,11 @@ def autolog(message): "https://groups.google.com/forum/#!forum/scitools-iris-dev", ), ( - ' StackOverflow for "How do I?"', + ' StackOverflow for "How Do I?"', "https://stackoverflow.com/questions/tagged/python-iris", ), ( - ' Legacy documentation', + ' Legacy Documentation', "https://scitools.org.uk/iris/docs/v2.4.0/index.html", ), ], @@ -262,13 +268,16 @@ def autolog(message): # url link checker. Some links work but report as broken, lets ignore them. # See https://www.sphinx-doc.org/en/1.2/config.html#options-for-the-linkcheck-builder linkcheck_ignore = [ - "https://github.com/SciTools/iris/commit/69597eb3d8501ff16ee3d56aef1f7b8f1c2bb316#diff-1680206bdc5cfaa83e14428f5ba0f848", - "http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf", + "http://cfconventions.org", "http://code.google.com/p/msysgit/downloads/list", + "http://effbot.org", + "https://github.com", + "http://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html", "http://schacon.github.com/git", - "https://github.com/SciTools/iris/pull", - "https://github.com/SciTools/iris/issue", - "http://cfconventions.org", + "http://scitools.github.com/cartopy", + "http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf", + "https://software.ac.uk/how-cite-software", + "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", ] # list of sources to exclude from the build. diff --git a/docs/iris/src/copyright.rst b/docs/src/copyright.rst similarity index 94% rename from docs/iris/src/copyright.rst rename to docs/src/copyright.rst index 08a40e5a1e9..16ac07acb36 100644 --- a/docs/iris/src/copyright.rst +++ b/docs/src/copyright.rst @@ -1,8 +1,8 @@ -Iris copyright, licensing and contributors +Iris Copyright, Licensing and Contributors ========================================== -Iris code +Iris Code --------- All Iris source code, unless explicitly stated, is ``Copyright Iris @@ -20,7 +20,7 @@ You should find all source files with the following header: licensing details. -Iris documentation and examples +Iris Documentation and Examples ------------------------------- All documentation, examples and sample data found on this website and in source repository diff --git a/docs/src/developers_guide/ci_checks.png b/docs/src/developers_guide/ci_checks.png new file mode 100755 index 00000000000..e088e03a665 Binary files /dev/null and b/docs/src/developers_guide/ci_checks.png differ diff --git a/docs/iris/src/developers_guide/contributing_changes.rst b/docs/src/developers_guide/contributing_changes.rst similarity index 86% rename from docs/iris/src/developers_guide/contributing_changes.rst rename to docs/src/developers_guide/contributing_changes.rst index a752986ec4f..48357874a74 100644 --- a/docs/iris/src/developers_guide/contributing_changes.rst +++ b/docs/src/developers_guide/contributing_changes.rst @@ -1,7 +1,7 @@ .. _contributing.changes: -Contributing your changes +Contributing Your Changes ========================= .. toctree:: diff --git a/docs/iris/src/developers_guide/contributing_ci_tests.rst b/docs/src/developers_guide/contributing_ci_tests.rst similarity index 68% rename from docs/iris/src/developers_guide/contributing_ci_tests.rst rename to docs/src/developers_guide/contributing_ci_tests.rst index c7a041bcb2c..a6bdac4ae02 100644 --- a/docs/iris/src/developers_guide/contributing_ci_tests.rst +++ b/docs/src/developers_guide/contributing_ci_tests.rst @@ -10,8 +10,7 @@ automatically when a pull request is created, updated or merged against Iris **master**. The checks performed are: * :ref:`testing_cla` -* :ref:`testing_travis` -* :ref:`testing_stickler` +* :ref:`testing_cirrus` .. _testing_cla: @@ -24,33 +23,23 @@ A bot that checks the user who created the pull request has signed the please see https://scitools.org.uk/organisation.html#governance -.. _testing_stickler: +.. _testing_cirrus: -Stickler CI -*********** - -Automatically enforces coding standards. The configuration file named -`.stickler.yml`_ is in the Iris_ root directory. For more information see -https://stickler-ci.com/. - - -.. _testing_travis: - -Travis-CI +Cirrus-CI ********* The unit and integration tests in Iris are an essential mechanism to ensure that the Iris code base is working as expected. :ref:`developer_running_tests` may be run manually but to ensure the checks are performed a -continuous integration testing tool named `travis-ci`_ is used. +continuous integration testing tool named `cirrus-ci`_ is used. -A `travis-ci`_ configuration file named `.travis.yml`_ -is in the Iris repository which tells travis-ci what commands to run. The +A `cirrus-ci`_ configuration file named `.cirrus.yml`_ +is in the Iris repository which tells Cirrus-CI what commands to run. The commands include retrieving the Iris code base and associated test files using -conda and then running the tests. `travis-ci`_ allows for a matrix of tests to +conda and then running the tests. `cirrus-ci`_ allows for a matrix of tests to be performed to ensure that all expected variations test successfully. -The `travis-ci`_ tests are run automatically against the `Iris`_ master +The `cirrus-ci`_ tests are run automatically against the `Iris`_ master repository when a pull request is submitted, updated or merged. GitHub Checklist diff --git a/docs/iris/src/developers_guide/contributing_code_formatting.rst b/docs/src/developers_guide/contributing_code_formatting.rst similarity index 99% rename from docs/iris/src/developers_guide/contributing_code_formatting.rst rename to docs/src/developers_guide/contributing_code_formatting.rst index b3f23f655a7..6bf8dca7173 100644 --- a/docs/iris/src/developers_guide/contributing_code_formatting.rst +++ b/docs/src/developers_guide/contributing_code_formatting.rst @@ -2,7 +2,7 @@ .. _code_formatting: -Code formatting +Code Formatting =============== To ensure a consistent code format throughout Iris, we recommend using diff --git a/docs/iris/src/developers_guide/contributing_codebase_index.rst b/docs/src/developers_guide/contributing_codebase_index.rst similarity index 89% rename from docs/iris/src/developers_guide/contributing_codebase_index.rst rename to docs/src/developers_guide/contributing_codebase_index.rst index 8d7eed8c84f..88986c0c7a0 100644 --- a/docs/iris/src/developers_guide/contributing_codebase_index.rst +++ b/docs/src/developers_guide/contributing_codebase_index.rst @@ -1,6 +1,6 @@ .. _contributing.documentation.codebase: -Contributing to the code base +Contributing to the Code Base ============================= .. toctree:: diff --git a/docs/iris/src/developers_guide/contributing_deprecations.rst b/docs/src/developers_guide/contributing_deprecations.rst similarity index 97% rename from docs/iris/src/developers_guide/contributing_deprecations.rst rename to docs/src/developers_guide/contributing_deprecations.rst index c7a68889841..1ecafdca9f9 100644 --- a/docs/iris/src/developers_guide/contributing_deprecations.rst +++ b/docs/src/developers_guide/contributing_deprecations.rst @@ -10,12 +10,12 @@ one release, before removing/updating it in the next `major release `_. -Adding a deprecation +Adding a Deprecation ==================== .. _removing-a-public-api: -Removing a public API +Removing a Public API --------------------- The simplest form of deprecation occurs when you need to remove a public @@ -49,7 +49,7 @@ Under these circumstances the following points apply: - You should check the documentation for references to the deprecated API and update them as appropriate. -Changing a default +Changing a Default ------------------ When you need to change the default behaviour of a public API the @@ -74,7 +74,7 @@ API: deprecation warning and corresponding Sphinx deprecation directive. -Removing a deprecation +Removing a Deprecation ====================== When the time comes to make a new major release you should locate any @@ -83,7 +83,7 @@ minimum period described previously. Locating deprecated APIs can easily be done by searching for the Sphinx deprecation directives and/or deprecation warnings. -Removing a public API +Removing a Public API --------------------- The deprecated API should be removed and any corresponding documentation @@ -91,7 +91,7 @@ and/or example code should be removed/updated as appropriate. .. _iris_developer_future: -Changing a default +Changing a Default ------------------ - You should update the initial state of the relevant boolean attribute diff --git a/docs/iris/src/developers_guide/contributing_documentation.rst b/docs/src/developers_guide/contributing_documentation.rst similarity index 83% rename from docs/iris/src/developers_guide/contributing_documentation.rst rename to docs/src/developers_guide/contributing_documentation.rst index e083e56e4a3..1b2941822be 100644 --- a/docs/iris/src/developers_guide/contributing_documentation.rst +++ b/docs/src/developers_guide/contributing_documentation.rst @@ -1,7 +1,7 @@ .. _contributing.documentation: -Contributing to the documentation +Contributing to the Documentation --------------------------------- Documentation is important and we encourage any improvements that can be made. @@ -24,11 +24,14 @@ The documentation uses specific packages that need to be present. Please see Building ~~~~~~~~ -The build can be run from the documentation directory ``iris/docs/iris/src``. +This documentation was built using the latest Python version that Iris +supports. For more information see :ref:`installing_iris`. + +The build can be run from the documentation directory ``docs/src``. The build output for the html is found in the ``_build/html`` sub directory. When updating the documentation ensure the html build has *no errors* or -*warnings* otherwise it may fail the automated `travis-ci`_ build. +*warnings* otherwise it may fail the automated `cirrus-ci`_ build. Once the build is complete, if it is rerun it will only rebuild the impacted build artefacts so should take less time. @@ -50,7 +53,7 @@ This is useful for a final test before committing your changes. have been promoted to be **errors** to ensure they are addressed. This **only** applies when ``make html`` is run. -.. _travis-ci: https://travis-ci.org/github/SciTools/iris +.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris .. _contributing.documentation.testing: @@ -58,8 +61,8 @@ Testing ~~~~~~~ There are a ways to test various aspects of the documentation. The -``make`` commands shown below can be run in the ``iris/docs/iris`` or -``iris/docs/iris/src`` directory. +``make`` commands shown below can be run in the ``docs`` or +``docs/src`` directory. Each :ref:`contributing.documentation.gallery` entry has a corresponding test. To run the tests:: @@ -98,7 +101,7 @@ or list of files is set in the `conf.py`_ using the string list ``spelling_word_list_filename``. -.. note:: In addition to the automated `travis-ci`_ build of all the +.. note:: In addition to the automated `cirrus-ci`_ build of all the documentation build options above, the https://readthedocs.org/ service is also used. The configuration of this held in a file in the root of the @@ -106,24 +109,24 @@ or list of files is set in the `conf.py`_ using the string list ``.readthedocs.yml``. -.. _conf.py: https://github.com/SciTools/iris/blob/master/docs/iris/src/conf.py +.. _conf.py: https://github.com/SciTools/iris/blob/master/docs/src/conf.py .. _contributing.documentation.api: -Generating API documentation +Generating API Documentation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In order to auto generate the API documentation based upon the docstrings a custom set of python scripts are used, these are located in the directory -``iris/docs/iris/src/sphinxext``. Once the ``make html`` command has been run, +``docs/src/sphinxext``. Once the ``make html`` command has been run, the output of these scripts can be found in -``iris/docs/iris/src/generated/api``. +``docs/src/generated/api``. If there is a particularly troublesome module that breaks the ``make html`` you can exclude the module from the API documentation. Add the entry to the ``exclude_modules`` tuple list in the -``iris/docs/iris/src/sphinxext/generate_package_rst.py`` file. +``docs/src/sphinxext/generate_package_rst.py`` file. .. _contributing.documentation.gallery: @@ -136,12 +139,12 @@ The Iris :ref:`sphx_glr_generated_gallery` uses a sphinx extension named that auto generates reStructuredText (rst) files based upon a gallery source directory that abides directory and filename convention. -The code for the gallery entries are in ``iris/docs/iris/gallery_code``. +The code for the gallery entries are in ``docs/gallery_code``. Each sub directory in this directory is a sub section of the gallery. The respective ``README.rst`` in each folder is included in the gallery output. For each gallery entry there must be a corresponding test script located in -``iris/docs/iris/gallery_tests``. +``docs/gallery_tests``. To add an entry to the gallery simple place your python code into the appropriate sub directory and name it with a prefix of ``plot_``. If your @@ -149,7 +152,7 @@ gallery entry does not fit into any existing sub directories then create a new directory and place it in there. The reStructuredText (rst) output of the gallery is located in -``iris/docs/iris/src/generated/gallery``. +``docs/src/generated/gallery``. For more information on the directory structure and options please see the `sphinx-gallery getting started diff --git a/docs/iris/src/developers_guide/contributing_getting_involved.rst b/docs/src/developers_guide/contributing_getting_involved.rst similarity index 98% rename from docs/iris/src/developers_guide/contributing_getting_involved.rst rename to docs/src/developers_guide/contributing_getting_involved.rst index 2ba09d740d6..7ce09f0999c 100644 --- a/docs/iris/src/developers_guide/contributing_getting_involved.rst +++ b/docs/src/developers_guide/contributing_getting_involved.rst @@ -2,7 +2,7 @@ .. _development_where_to_start: -Getting involved +Getting Involved ---------------- Iris_ is an Open Source project hosted on Github and as such anyone with a diff --git a/docs/iris/src/developers_guide/contributing_graphics_tests.rst b/docs/src/developers_guide/contributing_graphics_tests.rst similarity index 94% rename from docs/iris/src/developers_guide/contributing_graphics_tests.rst rename to docs/src/developers_guide/contributing_graphics_tests.rst index a276f520d69..81ec9c0344e 100644 --- a/docs/iris/src/developers_guide/contributing_graphics_tests.rst +++ b/docs/src/developers_guide/contributing_graphics_tests.rst @@ -2,7 +2,7 @@ .. _testing.graphics: -Graphics tests +Graphics Tests ************** Iris may be used to create various forms of graphical output; to ensure @@ -15,7 +15,7 @@ At present graphical tests are used in the following areas of Iris: * Module ``iris.tests.test_plot`` * Module ``iris.tests.test_quickplot`` * :ref:`sphx_glr_generated_gallery` plots contained in - ``docs/iris/gallery_tests``. + ``docs/gallery_tests``. Challenges @@ -31,10 +31,10 @@ known acceptable output may fail. The failure may also not be visually perceived as it may be a simple pixel shift. -Testing strategy +Testing Strategy ================ -The `Iris Travis matrix`_ defines multiple test runs that use +The `Iris Cirrus-CI matrix`_ defines multiple test runs that use different versions of Python to ensure Iris is working as expected. To make this manageable, the ``iris.tests.IrisTest_nometa.check_graphic`` test @@ -64,7 +64,7 @@ This consists of: against the existing accepted reference images, for each failing test. -Reviewing failing tests +Reviewing Failing Tests ======================= When you find that a graphics test in the Iris testing suite has failed, @@ -122,7 +122,7 @@ you should follow: happens, simply repeat the check-and-accept process until all tests pass. -Add your changes to Iris +Add Your Changes to Iris ======================== To add your changes to Iris, you need to make two pull requests (PR). @@ -155,7 +155,7 @@ To add your changes to Iris, you need to make two pull requests (PR). .. important:: - The Iris pull-request will not test successfully in Travis until the + The Iris pull-request will not test successfully in Cirrus-CI until the ``test-iris-imagehash`` pull request has been merged. This is because there is an Iris_ test which ensures the existence of the reference images (uris) for all the targets in the image results database. It will also fail @@ -163,4 +163,4 @@ To add your changes to Iris, you need to make two pull requests (PR). image-listing file in ``test-iris-imagehash``. -.. _Iris travis matrix: https://github.com/scitools/iris/blob/master/.travis.yml#L15 +.. _Iris Cirrus-CI matrix: https://github.com/scitools/iris/blob/master/.cirrus.yml diff --git a/docs/iris/src/developers_guide/contributing_pull_request_checklist.rst b/docs/src/developers_guide/contributing_pull_request_checklist.rst similarity index 95% rename from docs/iris/src/developers_guide/contributing_pull_request_checklist.rst rename to docs/src/developers_guide/contributing_pull_request_checklist.rst index b01f370ea2c..3e7a9f1ae38 100644 --- a/docs/iris/src/developers_guide/contributing_pull_request_checklist.rst +++ b/docs/src/developers_guide/contributing_pull_request_checklist.rst @@ -2,8 +2,8 @@ .. _pr_check: -Pull request check list -======================= +Pull Request Checklist +====================== All pull request will be reviewed by a core developer who will manage the process of merging. It is the responsibility of a developer submitting a @@ -38,7 +38,7 @@ is merged. Before submitting a pull request please consider this list. #. **Check the documentation builds without warnings or errors**. See :ref:`contributing.documentation.building` -#. **Check for any new dependencies in the** `.travis.yml`_ **config file.** +#. **Check for any new dependencies in the** `.cirrus.yml`_ **config file.** #. **Check for any new dependencies in the** `readthedocs.yml`_ **file**. This file is used to build the documentation that is served from diff --git a/docs/iris/src/developers_guide/contributing_running_tests.rst b/docs/src/developers_guide/contributing_running_tests.rst similarity index 55% rename from docs/iris/src/developers_guide/contributing_running_tests.rst rename to docs/src/developers_guide/contributing_running_tests.rst index cadf3710db3..0fd9fa8486d 100644 --- a/docs/iris/src/developers_guide/contributing_running_tests.rst +++ b/docs/src/developers_guide/contributing_running_tests.rst @@ -2,9 +2,14 @@ .. _developer_running_tests: -Running the tests +Running the Tests ***************** +Using setuptools for Testing Iris +================================= + +.. warning:: The `setuptools`_ ``test`` command was deprecated in `v41.5.0`_. See :ref:`using nox`. + A prerequisite of running the tests is to have the Python environment setup. For more information on this see :ref:`installing_from_source`. @@ -90,4 +95,101 @@ due to an experimental dependency not being present. All Python decorators that skip tests will be defined in ``lib/iris/tests/__init__.py`` with a function name with a prefix of - ``skip_``. \ No newline at end of file + ``skip_``. + + +.. _using nox: + +Using Nox for Testing Iris +========================== + +Iris has adopted the use of the `nox`_ tool for automated testing on `cirrus-ci`_ +and also locally on the command-line for developers. + +`nox`_ is similar to `tox`_, but instead leverages the expressiveness and power of a Python +configuration file rather than an `.ini` style file. As with `tox`_, `nox`_ can use `virtualenv`_ +to create isolated Python environments, but in addition also supports `conda`_ as a testing +environment backend. + + +Where is Nox Used? +------------------ + +Iris uses `nox`_ as a convenience to fully automate the process of executing the Iris tests, but also +automates the process of: + +* building the documentation and executing the doc-tests +* building the documentation gallery +* running the documentation URL link check +* linting the code-base +* ensuring the code-base style conforms to the `black`_ standard + + +You can perform all of these tasks manually yourself, however the onus is on you to first ensure +that all of the required package dependencies are installed and available in the testing environment. + +`Nox`_ has been configured to automatically do this for you, and provides a means to easily replicate +the remote testing behaviour of `cirrus-ci`_ locally for the developer. + + +Installing Nox +-------------- + +We recommend installing `nox`_ using `conda`_. To install `nox`_ in a separate `conda`_ environment:: + + conda create -n nox -c conda-forge nox + conda activate nox + +To install `nox`_ in an existing active `conda`_ environment:: + + conda install -c conda-forge nox + +The `nox`_ package is also available on PyPI, however `nox`_ has been configured to use the `conda`_ +backend for Iris, so an installation of `conda`_ must always be available. + + +Testing with Nox +---------------- + +The `nox`_ configuration file `noxfile.py` is available in the root ``iris`` project directory, and +defines all the `nox`_ sessions (i.e., tasks) that may be performed. `nox`_ must always be executed +from the ``iris`` root directory. + +To list the configured `nox`_ sessions for Iris:: + + nox --list + +To run the Iris tests for all configured versions of Python:: + + nox --session tests + +To build the Iris documentation specifically for Python 3.7:: + + nox --session doctest-3.7 + +To run all the Iris `nox`_ sessions:: + + nox + +For further `nox`_ command-line options:: + + nox --help + +.. tip:: + For `nox`_ sessions that use the `conda`_ backend, you can use the ``-v`` or ``--verbose`` + flag to display the `nox`_ `conda`_ environment package details and environment info. + For example:: + + nox --session tests -- --verbose + + +.. note:: `nox`_ will cache its testing environments in the `.nox` root ``iris`` project directory. + + +.. _black: https://black.readthedocs.io/en/stable/ +.. _nox: https://nox.thea.codes/en/latest/ +.. _setuptools: https://setuptools.readthedocs.io/en/latest/ +.. _tox: https://tox.readthedocs.io/en/latest/ +.. _virtualenv: https://virtualenv.pypa.io/en/latest/ +.. _PyPI: https://pypi.org/project/nox/ +.. _v41.5.0: https://setuptools.readthedocs.io/en/latest/history.html#v41-5-0 diff --git a/docs/iris/src/developers_guide/contributing_testing.rst b/docs/src/developers_guide/contributing_testing.rst similarity index 98% rename from docs/iris/src/developers_guide/contributing_testing.rst rename to docs/src/developers_guide/contributing_testing.rst index 375ad570031..486af706d3c 100644 --- a/docs/iris/src/developers_guide/contributing_testing.rst +++ b/docs/src/developers_guide/contributing_testing.rst @@ -3,7 +3,7 @@ .. _developer_test_categories: -Test categories +Test Categories *************** There are two main categories of tests within Iris: @@ -20,7 +20,7 @@ feel free to submit a pull-request in any state and ask for assistance. .. _testing.unit_test: -Unit tests +Unit Tests ========== Code changes should be accompanied by enough unit tests to give a @@ -128,7 +128,7 @@ Within that file the tests might look something like: .. _testing.integration: -Integration tests +Integration Tests ================= Some code changes may require tests which exercise several units in @@ -141,4 +141,4 @@ tests. But folders and files must be created as required to help developers locate relevant tests. It is recommended they are named according to the capabilities under test, e.g. ``metadata/test_pp_preservation.py``, and not named according to the -module(s) under test. \ No newline at end of file +module(s) under test. diff --git a/docs/iris/src/developers_guide/contributing_testing_index.rst b/docs/src/developers_guide/contributing_testing_index.rst similarity index 100% rename from docs/iris/src/developers_guide/contributing_testing_index.rst rename to docs/src/developers_guide/contributing_testing_index.rst diff --git a/docs/iris/src/developers_guide/documenting/__init__.py b/docs/src/developers_guide/documenting/__init__.py similarity index 100% rename from docs/iris/src/developers_guide/documenting/__init__.py rename to docs/src/developers_guide/documenting/__init__.py diff --git a/docs/iris/src/developers_guide/documenting/docstrings.rst b/docs/src/developers_guide/documenting/docstrings.rst similarity index 96% rename from docs/iris/src/developers_guide/documenting/docstrings.rst rename to docs/src/developers_guide/documenting/docstrings.rst index 34ec790d033..8a06024ee23 100644 --- a/docs/iris/src/developers_guide/documenting/docstrings.rst +++ b/docs/src/developers_guide/documenting/docstrings.rst @@ -27,7 +27,7 @@ There are two forms of docstrings: **single-line** and **multi-line** docstrings. -Single-line docstrings +Single-Line Docstrings ====================== The single line docstring of an object must state the **purpose** of that @@ -35,7 +35,7 @@ object, known as the **purpose section**. This terse overview must be on one line and ideally no longer than 80 characters. -Multi-line docstrings +Multi-Line Docstrings ===================== Multi-line docstrings must consist of at least a purpose section akin to the @@ -53,7 +53,7 @@ not to document *argument* and *keyword argument* details. Such information should be documented in the following *arguments and keywords section*. -Sample multi-line docstring +Sample Multi-Line Docstring --------------------------- Here is a simple example of a standard docstring: @@ -75,7 +75,7 @@ Additionally, a summary can be extracted automatically, which would result in: documenting.docstrings_sample_routine.sample_routine -Documenting classes +Documenting Classes =================== The class constructor should be documented in the docstring for its @@ -90,7 +90,7 @@ superclass method and does not call the superclass method; use the verb (in addition to its own behaviour). -Attribute and property docstrings +Attribute and Property Docstrings --------------------------------- Here is a simple example of a class containing an attribute docstring and a diff --git a/docs/iris/src/developers_guide/documenting/docstrings_attribute.py b/docs/src/developers_guide/documenting/docstrings_attribute.py similarity index 100% rename from docs/iris/src/developers_guide/documenting/docstrings_attribute.py rename to docs/src/developers_guide/documenting/docstrings_attribute.py diff --git a/docs/iris/src/developers_guide/documenting/docstrings_sample_routine.py b/docs/src/developers_guide/documenting/docstrings_sample_routine.py similarity index 100% rename from docs/iris/src/developers_guide/documenting/docstrings_sample_routine.py rename to docs/src/developers_guide/documenting/docstrings_sample_routine.py diff --git a/docs/iris/src/developers_guide/documenting/rest_guide.rst b/docs/src/developers_guide/documenting/rest_guide.rst similarity index 98% rename from docs/iris/src/developers_guide/documenting/rest_guide.rst rename to docs/src/developers_guide/documenting/rest_guide.rst index bc34d16cd8d..4845132b159 100644 --- a/docs/iris/src/developers_guide/documenting/rest_guide.rst +++ b/docs/src/developers_guide/documenting/rest_guide.rst @@ -3,7 +3,7 @@ .. _reST_quick_start: ================ -reST quick start +reST Quick Start ================ `reST`_ is used to create the documentation for Iris_. It is used to author @@ -19,7 +19,7 @@ reST markup syntaxes, for the basics of reST the following links may be useful: Reference documentation for reST can be found at http://docutils.sourceforge.net/rst.html. -Creating links +Creating Links -------------- Basic links can be created with ```Text of the link `_`` which will look like `Text of the link `_ diff --git a/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst b/docs/src/developers_guide/documenting/whats_new_contributions.rst similarity index 77% rename from docs/iris/src/developers_guide/documenting/whats_new_contributions.rst rename to docs/src/developers_guide/documenting/whats_new_contributions.rst index 856d9af0a9a..ebb553024bc 100644 --- a/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/src/developers_guide/documenting/whats_new_contributions.rst @@ -1,7 +1,7 @@ .. _whats_new_contributions: ================================= -Contributing a "What's New" entry +Contributing a "What's New" Entry ================================= Iris uses a file named ``latest.rst`` to keep a draft of upcoming changes @@ -11,7 +11,7 @@ The contribution should be included as part of the Iris Pull Request that introduces the change. The ``latest.rst`` and the past release notes are kept in -``docs/iris/src/whatsnew/``. If you are writing the first contribution after +``docs/src/whatsnew/``. If you are writing the first contribution after an Iris release: **create the new** ``latest.rst`` by copying the content from ``latest.rst.template`` in the same directory. @@ -38,7 +38,7 @@ situation is thought likely (large PR, high repo activity etc.): * PR author: create the "What's New" pull request * PR reviewer: once the "What's New" PR is created, **merge the main PR**. - (this will fix any `travis-ci`_ linkcheck errors where the links in the + (this will fix any `cirrus-ci`_ linkcheck errors where the links in the "What's New" PR reference new features introduced in the main PR) * PR reviewer: review the "What's New" PR, merge once acceptable @@ -48,7 +48,7 @@ for the minimum time, minimising conflicts and minimising the need to rebase or merge from trunk. -Writing a contribution +Writing a Contribution ====================== As introduced above, a contribution is the description of a change to Iris @@ -59,16 +59,15 @@ what's new document. The appropriate contribution for a pull request might in fact be an addition or change to an existing "What's New" entry. -Each contribution will ideally be written as a single concise bullet point -in a reStructuredText format. Where possible do not exceed **column 80** and -ensure that any subsequent lines of the same bullet point are aligned with the -first. The content should target an Iris user as the audience. The required -content, in order, is as follows: +Each contribution will ideally be written as a single concise entry using a +reStructuredText auto-enumerated list ``#.`` directive. Where possible do not +exceed **column 80** and ensure that any subsequent lines of the same entry are +aligned with the first. The content should target an Iris user as the audience. +The required content, in order, is as follows: * Names of those who contributed the change. These should be their GitHub user name. Link the name to their GitHub profile. E.g. - ```@bjlittle `_ and - `@tkknight `_ changed...`` + ```@tkknight `_ changed...`` * The new/changed behaviour @@ -79,15 +78,14 @@ content, in order, is as follows: * Pull request references, bracketed, following the final period. E.g. ``(:pull:`1111`, :pull:`9999`)`` -* A trailing blank line (standard reStructuredText bullet format) +* A trailing blank line (standard reStructuredText list format) For example:: - * `@bjlittle `_ and - `@tkknight `_ changed changed argument ``x`` - to be optional in :class:`~iris.module.class` and - :meth:`iris.module.method`. This allows greater flexibility as requested in - :issue:`9999`. (:pull:`1111`, :pull:`9999`) + #. `@tkknight `_ changed changed argument ``x`` + to be optional in :class:`~iris.module.class` and + :meth:`iris.module.method`. This allows greater flexibility as requested in + :issue:`9999`. (:pull:`1111`, :pull:`9999`) The above example also demonstrates some of the possible syntax for including @@ -96,14 +94,14 @@ examine past what's :ref:`iris_whatsnew` entries. .. note:: The reStructuredText syntax will be checked as part of building the documentation. Any warnings should be corrected. - `travis-ci`_ will automatically build the documentation when + `cirrus-ci`_ will automatically build the documentation when creating a pull request, however you can also manually :ref:`build ` the documentation. -.. _travis-ci: https://travis-ci.org/github/SciTools/iris +.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris -Contribution categories +Contribution Categories ======================= The structure of the what's new release note should be easy to read by diff --git a/docs/iris/src/developers_guide/gitwash/LICENSE b/docs/src/developers_guide/gitwash/LICENSE similarity index 100% rename from docs/iris/src/developers_guide/gitwash/LICENSE rename to docs/src/developers_guide/gitwash/LICENSE diff --git a/docs/iris/src/developers_guide/gitwash/branch_dropdown.png b/docs/src/developers_guide/gitwash/branch_dropdown.png similarity index 100% rename from docs/iris/src/developers_guide/gitwash/branch_dropdown.png rename to docs/src/developers_guide/gitwash/branch_dropdown.png diff --git a/docs/iris/src/developers_guide/gitwash/configure_git.rst b/docs/src/developers_guide/gitwash/configure_git.rst similarity index 99% rename from docs/iris/src/developers_guide/gitwash/configure_git.rst rename to docs/src/developers_guide/gitwash/configure_git.rst index b958a683ee2..6fc288daf99 100644 --- a/docs/iris/src/developers_guide/gitwash/configure_git.rst +++ b/docs/src/developers_guide/gitwash/configure_git.rst @@ -3,7 +3,7 @@ .. _configure-git: ============= -Configure git +Configure Git ============= .. _git-config-basic: @@ -51,7 +51,7 @@ command:: To set up on another computer, you can copy your ``~/.gitconfig`` file, or run the commands above. -In detail +In Detail ========= user.name and user.email @@ -124,7 +124,7 @@ Or from the command line:: .. _fancy-log: -Fancy log output +Fancy Log Output ---------------- This is a very nice alias to get a fancy log output; it should go in the diff --git a/docs/iris/src/developers_guide/gitwash/development_workflow.rst b/docs/src/developers_guide/gitwash/development_workflow.rst similarity index 95% rename from docs/iris/src/developers_guide/gitwash/development_workflow.rst rename to docs/src/developers_guide/gitwash/development_workflow.rst index b67885e6bd6..f6144a05e97 100644 --- a/docs/iris/src/developers_guide/gitwash/development_workflow.rst +++ b/docs/src/developers_guide/gitwash/development_workflow.rst @@ -1,14 +1,14 @@ .. _development-workflow: #################### -Development workflow +Development Workflow #################### You already have your own forked copy of the `iris`_ repository, by following :ref:`forking`. You have :ref:`set-up-fork`. You have configured git by following :ref:`configure-git`. Now you are ready for some real work. -Workflow summary +Workflow Summary ================ In what follows we'll refer to the upstream iris ``master`` branch, as @@ -34,7 +34,7 @@ what you've done, and why you did it. See `linux git workflow`_ for some explanation. -Consider deleting your master branch +Consider Deleting Your Master Branch ==================================== It may sound strange, but deleting your own ``master`` branch can help reduce @@ -43,7 +43,7 @@ details. .. _update-mirror-trunk: -Update the mirror of trunk +Update the Mirror of Trunk ========================== First make sure you have done :ref:`linking-to-upstream`. @@ -59,7 +59,7 @@ you last checked, ``upstream/master`` will change after you do the fetch. .. _make-feature-branch: -Make a new feature branch +Make a New Feature Branch ========================= When you are ready to make some changes to the code, you should start a new @@ -99,7 +99,7 @@ From now on git will know that ``my-new-feature`` is related to the .. _edit-flow: -The editing workflow +The Editing Workflow ==================== Overview @@ -112,7 +112,7 @@ Overview git commit -am 'NF - some message' git push -In more detail +In More Detail -------------- #. Make some changes @@ -144,14 +144,14 @@ In more detail push`` (see `git push`_). -Testing your changes +Testing Your Changes ==================== Once you are happy with your changes, work thorough the :ref:`pr_check` and make sure your branch passes all the relevant tests. -Ask for your changes to be reviewed or merged +Ask for Your Changes to be Reviewed or Merged ============================================= When you are ready to ask for someone to review your code and consider a merge: @@ -175,10 +175,10 @@ When you are ready to ask for someone to review your code and consider a merge: pull request message. This is still a good way of getting some preliminary code review. -Some other things you might want to do +Some Other Things you Might Want to do ====================================== -Delete a branch on github +Delete a Branch on Github ------------------------- :: @@ -193,7 +193,7 @@ Note the colon ``:`` before ``test-branch``. See also: http://github.com/guides/remove-a-remote-branch -Several people sharing a single repository +Several People Sharing a Single Repository ------------------------------------------ If you want to work on some stuff with other people, where you are all @@ -225,7 +225,7 @@ usual:: git commit -am 'ENH - much better code' git push origin master # pushes directly into your repo -Explore your repository +Explore Your Repository ----------------------- To see a graphical representation of the repository branches and @@ -243,7 +243,7 @@ graph of the repository. .. _rebase-on-trunk: -Rebasing on trunk +Rebasing on Trunk ----------------- For more information please see the diff --git a/docs/iris/src/developers_guide/gitwash/forking.rst b/docs/src/developers_guide/gitwash/forking.rst similarity index 89% rename from docs/iris/src/developers_guide/gitwash/forking.rst rename to docs/src/developers_guide/gitwash/forking.rst index e10b8f84ca1..161847ed793 100644 --- a/docs/iris/src/developers_guide/gitwash/forking.rst +++ b/docs/src/developers_guide/gitwash/forking.rst @@ -3,7 +3,7 @@ .. _forking: =================================== -Making your own copy (fork) of Iris +Making Your own Copy (fork) of Iris =================================== You need to do this only once. The instructions here are very similar @@ -12,7 +12,7 @@ that page for more detail. We're repeating some of it here just to give the specifics for the `Iris`_ project, and to suggest some default names. -Set up and configure a github account +Set up and Configure a Github Account ===================================== If you don't have a github account, go to the github page, and make one. @@ -21,7 +21,7 @@ You then need to configure your account to allow write access, see the `generating sss keys for GitHub`_ help on `github help`_. -Create your own forked copy of Iris +Create Your own Forked Copy of Iris =================================== #. Log into your github account. diff --git a/docs/iris/src/developers_guide/gitwash/forking_button.png b/docs/src/developers_guide/gitwash/forking_button.png similarity index 100% rename from docs/iris/src/developers_guide/gitwash/forking_button.png rename to docs/src/developers_guide/gitwash/forking_button.png diff --git a/docs/iris/src/developers_guide/gitwash/git_intro.rst b/docs/src/developers_guide/gitwash/git_intro.rst similarity index 100% rename from docs/iris/src/developers_guide/gitwash/git_intro.rst rename to docs/src/developers_guide/gitwash/git_intro.rst diff --git a/docs/iris/src/developers_guide/gitwash/git_links.inc b/docs/src/developers_guide/gitwash/git_links.inc similarity index 100% rename from docs/iris/src/developers_guide/gitwash/git_links.inc rename to docs/src/developers_guide/gitwash/git_links.inc diff --git a/docs/iris/src/developers_guide/gitwash/index.rst b/docs/src/developers_guide/gitwash/index.rst similarity index 84% rename from docs/iris/src/developers_guide/gitwash/index.rst rename to docs/src/developers_guide/gitwash/index.rst index d0e70597f1b..3cde6225831 100644 --- a/docs/iris/src/developers_guide/gitwash/index.rst +++ b/docs/src/developers_guide/gitwash/index.rst @@ -1,6 +1,6 @@ .. _using-git: -Working with Iris source code +Working With Iris Source Code ============================= .. toctree:: diff --git a/docs/iris/src/developers_guide/gitwash/links.inc b/docs/src/developers_guide/gitwash/links.inc similarity index 100% rename from docs/iris/src/developers_guide/gitwash/links.inc rename to docs/src/developers_guide/gitwash/links.inc diff --git a/docs/iris/src/developers_guide/gitwash/pull_button.png b/docs/src/developers_guide/gitwash/pull_button.png similarity index 100% rename from docs/iris/src/developers_guide/gitwash/pull_button.png rename to docs/src/developers_guide/gitwash/pull_button.png diff --git a/docs/iris/src/developers_guide/gitwash/set_up_fork.rst b/docs/src/developers_guide/gitwash/set_up_fork.rst similarity index 95% rename from docs/iris/src/developers_guide/gitwash/set_up_fork.rst rename to docs/src/developers_guide/gitwash/set_up_fork.rst index 9dc6618c64f..70d602c97c3 100644 --- a/docs/iris/src/developers_guide/gitwash/set_up_fork.rst +++ b/docs/src/developers_guide/gitwash/set_up_fork.rst @@ -3,7 +3,7 @@ .. _set-up-fork: ================ -Set up your fork +Set up Your Fork ================ First you follow the instructions for :ref:`forking`. @@ -17,10 +17,10 @@ Overview cd iris git remote add upstream git://github.com/SciTools/iris.git -In detail +In Detail ========= -Clone your fork +Clone Your Fork --------------- #. Clone your fork to the local computer with ``git clone @@ -42,7 +42,7 @@ Clone your fork .. _linking-to-upstream: -Linking your repository to the upstream repo +Linking Your Repository to the Upstream Repo -------------------------------------------- :: diff --git a/docs/iris/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst similarity index 67% rename from docs/iris/src/developers_guide/release.rst rename to docs/src/developers_guide/release.rst index d71f1491861..56328f910f2 100644 --- a/docs/iris/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -3,17 +3,28 @@ Releases ======== -A release of Iris is a `tag on the SciTools/Iris`_ +A release of Iris is a `tag on the SciTools/Iris`_ Github repository. The summary below is of the main areas that constitute the release. The final section details the :ref:`iris_development_releases_steps` to take. -Release branch +Before Release -------------- -Once the features intended for the release are on master, a release branch +Deprecations +~~~~~~~~~~~~ + +Ensure that any behaviour which has been deprecated for the correct number of +previous releases is now finally changed. More detail, including the correct +number of releases, is in :ref:`iris_development_deprecations`. + + +Release Branch +-------------- + +Once the features intended for the release are on master, a release branch should be created, in the SciTools/Iris repository. This will have the name: :literal:`v{major release number}.{minor release number}.x` @@ -26,7 +37,7 @@ This branch shall be used to finalise the release details in preparation for the release candidate. -Release candidate +Release Candidate ----------------- Prior to a release, a release candidate tag may be created, marked as a @@ -35,12 +46,12 @@ number, e.g.: :literal:`v1.9.0rc1` -If created, the pre-release shall be available for a minimum of two weeks +If created, the pre-release shall be available for a minimum of two weeks prior to the release being cut. However a 4 week period should be the goal to allow user groups to be notified of the existence of the pre-release and encouraged to test the functionality. -A pre-release is expected for a minor release, but will not for a +A pre-release is expected for a major or minor release, but not for a point release. If new features are required for a release after a release candidate has been @@ -56,10 +67,10 @@ This content should be reviewed and adapted as required. Steps to achieve this can be found in the :ref:`iris_development_releases_steps`. -The release +The Release ----------- -The final steps are to change the version string in the source of +The final steps are to change the version string in the source of :literal:`Iris.__init__.py` and include the release date in the relevant what's new page within the documentation. @@ -67,17 +78,17 @@ Once all checks are complete, the release is cut by the creation of a new tag in the SciTools Iris repository. -Conda recipe +Conda Recipe ------------ Once a release is cut, the `Iris feedstock`_ for the conda recipe must be updated to build the latest release of Iris and push this artefact to -`conda forge`_. +`conda forge`_. .. _Iris feedstock: https://github.com/conda-forge/iris-feedstock/tree/master/recipe .. _conda forge: https://anaconda.org/conda-forge/iris -Merge back +Merge Back ---------- After the release is cut, the changes shall be merged back onto the @@ -90,7 +101,7 @@ pull request to master. This work flow ensures that the commit identifiers are consistent between the :literal:`.x` branch and :literal:`master`. -Point releases +Point Releases -------------- Bug fixes may be implemented and targeted as the :literal:`.x` branch. These @@ -102,64 +113,66 @@ New features shall not be included in a point release, these are for bug fixes. A point release does not require a release candidate, but the rest of the release process is to be followed, including the merge back of changes into -:literal:`master`. +:literal:`master`. .. _iris_development_releases_steps: -Maintainer steps +Maintainer Steps ---------------- -These steps assume a release for ``v1.9`` is to be created +These steps assume a release for ``v1.9`` is to be created. -Release steps +Release Steps ~~~~~~~~~~~~~ -#. Create the branch ``1.9.x`` on the main repo, not in a forked repo, for the - release candidate or release. The only exception is for a point/bugfix - release as it should already exist -#. Update the what's new for the release: - - * Copy ``docs/iris/src/whatsnew/latest.rst`` to a file named - ``v1.9.rst`` - * Delete the ``docs/iris/src/whatsnew/latest.rst`` file so it will not - cause an issue in the build - * In ``v1.9.rst`` update the page title (first line of the file) to show - the date and version in the format of ``v1.9 (DD MMM YYYY)``. For - example ``v1.9 (03 Aug 2020)`` +#. Create the release feature branch ``1.9.x`` on `SciTools/iris`_. + The only exception is for a point/bugfix release, as it should already exist +#. Update the ``iris.__init__.py`` version string e.g., to ``1.9.0`` +#. Update the what's new for the release: + + * Use git to rename ``docs/src/whatsnew/latest.rst`` to the release + version file ``v1.9.rst`` + * Use git to delete the ``docs/src/whatsnew/latest.rst.template`` file + * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. + Note that, the Iris version and release date are updated automatically + when the documentation is built * Review the file for correctness - * Add ``v1.9.rst`` to git and commit all changes, including removal of - ``latest.rst`` + * Work with the development team to populate the ``Release Highlights`` + dropdown at the top of the file, which provides extra detail on notable + changes + * Use git to add and commit all changes, including removal of + ``latest.rst.template`` -#. Update the what's new index ``docs/iris/src/whatsnew/index.rst`` +#. Update the what's new index ``docs/src/whatsnew/index.rst`` - * Temporarily remove reference to ``latest.rst`` + * Remove the reference to ``latest.rst`` * Add a reference to ``v1.9.rst`` to the top of the list -#. Update the ``Iris.__init__.py`` version string, to ``1.9.0`` -#. Check your changes by building the documentation and viewing the changes -#. Once all the above steps are complete, the release is cut, using +#. Check your changes by building the documentation and reviewing +#. Once all the above steps are complete, the release is cut, using the :guilabel:`Draft a new release` button on the `Iris release page `_ -Post release steps +Post Release Steps ~~~~~~~~~~~~~~~~~~ -#. Check the documentation has built on `Read The Docs`_. The build is +#. Check the documentation has built on `Read The Docs`_. The build is triggered by any commit to master. Additionally check that the versions available in the pop out menu in the bottom left corner include the new release version. If it is not present you will need to configure the versions available in the **admin** dashboard in Read The Docs -#. Copy ``docs/iris/src/whatsnew/latest.rst.template`` to - ``docs/iris/src/whatsnew/latest.rst``. This will reset +#. Copy ``docs/src/whatsnew/latest.rst.template`` to + ``docs/src/whatsnew/latest.rst``. This will reset the file with the ``unreleased`` heading and placeholders for the what's new headings -#. Add back in the reference to ``latest.rst`` to the what's new index - ``docs/iris/src/whatsnew/index.rst`` -#. Update ``Iris.__init__.py`` version string to show as ``1.10.dev0`` +#. Add back in the reference to ``latest.rst`` to the what's new index + ``docs/src/whatsnew/index.rst`` +#. Update ``iris.__init__.py`` version string to show as ``1.10.dev0`` #. Merge back to master .. _Read The Docs: https://readthedocs.org/projects/scitools-iris/builds/ +.. _SciTools/iris: https://github.com/SciTools/iris .. _tag on the SciTools/Iris: https://github.com/SciTools/iris/releases diff --git a/docs/iris/src/further_topics/index.rst b/docs/src/further_topics/index.rst similarity index 94% rename from docs/iris/src/further_topics/index.rst rename to docs/src/further_topics/index.rst index 8a4d95b6cd1..dc162d6a1e2 100644 --- a/docs/iris/src/further_topics/index.rst +++ b/docs/src/further_topics/index.rst @@ -5,7 +5,7 @@ Introduction Some specific areas of Iris may require further explanation or a deep dive into additional detail above and beyond that offered by the -:ref:`User guide `. +:ref:`User Guide `. This section provides a collection of additional material on focused topics that may be of interest to the more advanced or curious user. diff --git a/docs/iris/src/further_topics/lenient_maths.rst b/docs/src/further_topics/lenient_maths.rst similarity index 99% rename from docs/iris/src/further_topics/lenient_maths.rst rename to docs/src/further_topics/lenient_maths.rst index 6f139fd9bf2..4aad721780d 100644 --- a/docs/iris/src/further_topics/lenient_maths.rst +++ b/docs/src/further_topics/lenient_maths.rst @@ -1,6 +1,6 @@ .. _lenient maths: -Lenient cube maths +Lenient Cube Maths ****************** This section provides an overview of lenient cube maths. In particular, it explains @@ -46,7 +46,7 @@ a practical worked example, which we'll explore together next. .. _lenient example: -Lenient example +Lenient Example =============== .. testsetup:: lenient-example @@ -154,7 +154,7 @@ Now let's compare and contrast this lenient result with the strict alternative. But before we do so, let's first clarify how to control the behaviour of cube maths. -Control the behaviour +Control the Behaviour ===================== As stated earlier, lenient cube maths is the default behaviour from Iris ``3.0.0``. @@ -191,7 +191,7 @@ scope of the ``LENIENT`` `context manager`_, Lenient(maths=True) -Strict example +Strict Example ============== Now that we know how to control the underlying behaviour of cube maths, @@ -229,7 +229,7 @@ This is because strict cube maths, in general, will only return common metadata and common coordinates that are :ref:`strictly equivalent `. -Finer detail +Finer Detail ============ In general, if you want to preserve as much metadata and coordinate information as @@ -278,4 +278,4 @@ resultant :class:`~iris.cube.Cube`, .. _atmosphere hybrid height parametric vertical coordinate: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#atmosphere-hybrid-height-coordinate -.. _context manager: https://docs.python.org/3/library/contextlib.html \ No newline at end of file +.. _context manager: https://docs.python.org/3/library/contextlib.html diff --git a/docs/iris/src/further_topics/lenient_metadata.rst b/docs/src/further_topics/lenient_metadata.rst similarity index 98% rename from docs/iris/src/further_topics/lenient_metadata.rst rename to docs/src/further_topics/lenient_metadata.rst index 1b31759d9a1..b68ed501ba7 100644 --- a/docs/iris/src/further_topics/lenient_metadata.rst +++ b/docs/src/further_topics/lenient_metadata.rst @@ -1,6 +1,6 @@ .. _lenient metadata: -Lenient metadata +Lenient Metadata **************** This section discusses lenient metadata; what it is, what it means, and how you @@ -27,7 +27,7 @@ methods that provide this rich metadata behaviour, all of which are explored more fully in :ref:`metadata`. -Strict behaviour +Strict Behaviour ================ .. testsetup:: strict-behaviour @@ -137,7 +137,7 @@ practical behaviour is available. .. _lenient behaviour: -Lenient behaviour +Lenient Behaviour ================= .. testsetup:: lenient-behaviour @@ -210,7 +210,7 @@ lenient behaviour for each of the metadata classes. .. _lenient equality: -Lenient equality +Lenient Equality ---------------- Lenient equality is enabled using the ``lenient`` keyword argument, therefore @@ -273,7 +273,7 @@ forgiving and practical alternative to strict behaviour. .. _lenient difference: -Lenient difference +Lenient Difference ------------------ Similar to :ref:`lenient equality`, the lenient ``difference`` method @@ -330,15 +330,15 @@ highlights the change in how such dissimilar metadata is treated gracefully, .. _lenient combination: -Lenient combination +Lenient Combination ------------------- The behaviour of the lenient ``combine`` metadata class method is outlined in :numref:`lenient combine table`, and as with :ref:`lenient equality` and -:ref:`lenient difference` is enabled throught the ``lenient`` keyword argument. +:ref:`lenient difference` is enabled through the ``lenient`` keyword argument. The difference in behaviour between **lenient** and -:ref:`strict combination ` is centered around the lenient +:ref:`strict combination ` is centred around the lenient handling of combining **something** with **nothing** (``None``) to return **something**. Whereas strict combination will only return a result from combining identical objects. @@ -380,7 +380,7 @@ for more inclusive, richer metadata, .. _lenient members: -Lenient members +Lenient Members --------------- :ref:`lenient behaviour` is not applied regardlessly across all metadata members @@ -429,7 +429,7 @@ strict behaviour, regardlessly. .. _special lenient name: -Special lenient name behaviour +Special Lenient Name Behaviour ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The ``standard_name``, ``long_name`` and ``var_name`` have a closer association diff --git a/docs/iris/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst similarity index 98% rename from docs/iris/src/further_topics/metadata.rst rename to docs/src/further_topics/metadata.rst index 3536c87a2bb..e6d6ebc57a8 100644 --- a/docs/iris/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -42,7 +42,7 @@ Collectively, the aforementioned classes will be known here as the Iris `SciTools/iris`_ -Common metadata +Common Metadata =============== Each of the Iris `CF Conventions`_ classes use **metadata** to define them and @@ -69,7 +69,7 @@ actual `data attribute`_ names of the metadata members on the Iris class. :align: center =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== - Metadata members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` Metadata members + Metadata Members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` Metadata Members =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== ``standard_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``standard_name`` ``long_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``long_name`` @@ -90,7 +90,7 @@ actual `data attribute`_ names of the metadata members on the Iris class. terms. -Common metadata API +Common Metadata API =================== .. testsetup:: @@ -149,7 +149,7 @@ a **common** and **consistent** approach to managing your metadata, which we'll now explore a little more fully. -Metadata classes +Metadata Classes ---------------- The ``metadata`` property will return an appropriate `namedtuple`_ metadata class @@ -162,7 +162,7 @@ each container class is shown in :numref:`metadata classes table` below, :align: center ========================================== ======================================================== - Container class Metadata class + Container Class Metadata Class ========================================== ======================================================== :class:`~iris.coords.AncillaryVariable` :class:`~iris.common.metadata.AncillaryVariableMetadata` :class:`~iris.coords.AuxCoord` :class:`~iris.common.metadata.CoordMetadata` @@ -232,7 +232,7 @@ discussion on options how to **set** and **get** metadata on the instance of an Iris `CF Conventions`_ container class (:numref:`metadata classes table`). -Metadata class behaviour +Metadata Class Behaviour ------------------------ As mentioned previously, the metadata classes in :numref:`metadata classes table` @@ -301,7 +301,7 @@ which we explore next. .. _richer metadata: -Richer metadata behaviour +Richer Metadata Behaviour ------------------------- .. testsetup:: richer-metadata @@ -320,7 +320,7 @@ allows you to easily **compare**, **combine**, **convert** and understand the .. _metadata equality: -Metadata equality +Metadata Equality ^^^^^^^^^^^^^^^^^ The metadata classes support both **equality** (``__eq__``) and **inequality** @@ -357,7 +357,7 @@ a means to enable **lenient** equality, as discussed in :ref:`lenient equality`. .. _strict equality: -Strict equality +Strict Equality """"""""""""""" By default, metadata class equality will perform a **strict** comparison between @@ -426,7 +426,7 @@ However, metadata class equality is rich enough to handle this eventuality, .. _compare like: -Comparing like with like +Comparing Like With Like """""""""""""""""""""""" So far in our journey through metadata class equality, we have only considered @@ -446,7 +446,7 @@ metadata class contains **different** members, as shown in .. _exception rule: -Exception to the rule +Exception to the Rule ~~~~~~~~~~~~~~~~~~~~~ In general, **different** metadata classes cannot be compared, however support @@ -502,7 +502,7 @@ methods of metadata classes. .. _metadata difference: -Metadata difference +Metadata Difference ^^^^^^^^^^^^^^^^^^^ Being able to compare metadata is valuable, especially when we have the @@ -605,7 +605,7 @@ Now, let's compare the two above instances and see what ``attributes`` member di .. _diff like: -Diffing like with like +Diffing Like With Like """""""""""""""""""""" As discussed in :ref:`compare like`, it only makes sense to determine the @@ -655,7 +655,7 @@ In general, however, comparing **different** metadata classes will result in a .. _metadata combine: -Metadata combination +Metadata Combination ^^^^^^^^^^^^^^^^^^^^ .. testsetup:: metadata-combine @@ -740,7 +740,7 @@ metadata class. This is explored in a little further detail next. .. _combine like: -Combine like with like +Combine Like With Like """""""""""""""""""""" Akin to the :ref:`equal ` and @@ -788,7 +788,7 @@ However, note that commutativity in this case cannot be honoured, for obvious re .. _metadata conversion: -Metadata conversion +Metadata Conversion ^^^^^^^^^^^^^^^^^^^ .. testsetup:: metadata-convert @@ -853,7 +853,7 @@ class instance, .. _metadata assignment: -Metadata assignment +Metadata Assignment ^^^^^^^^^^^^^^^^^^^ .. testsetup:: metadata-assign @@ -888,7 +888,7 @@ coordinate, DimCoordMetadata(standard_name='latitude', long_name=None, var_name='latitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False) -Assign by iterable +Assign by Iterable """""""""""""""""" It is also possible to assign to the ``metadata`` property of an Iris @@ -903,7 +903,7 @@ number** of associated member values, e.g., DimCoordMetadata(standard_name='latitude', long_name=None, var_name='latitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False) -Assign by namedtuple +Assign by Namedtuple """""""""""""""""""" A `namedtuple`_ may also be used to assign to the ``metadata`` property of an @@ -933,7 +933,7 @@ of the ``longitude`` coordinate, DimCoordMetadata(standard_name='latitude', long_name=None, var_name='latitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False) -Assign by mapping +Assign by Mapping """"""""""""""""" It is also possible to assign to the ``metadata`` property using a `mapping`_, diff --git a/docs/iris/src/index.rst b/docs/src/index.rst similarity index 98% rename from docs/iris/src/index.rst rename to docs/src/index.rst index f230e36f755..80aa696ba10 100644 --- a/docs/iris/src/index.rst +++ b/docs/src/index.rst @@ -46,7 +46,7 @@ For **Iris 2.4** and earlier documentation please see the :container: container-lg pb-3 :column: col-lg-4 col-md-4 col-sm-6 col-xs-12 p-2 - Install Iris to use or for development. + Install Iris as a user or developer. +++ .. link-button:: installing_iris :type: ref @@ -91,7 +91,7 @@ For **Iris 2.4** and earlier documentation please see the .. toctree:: :maxdepth: 1 - :caption: Getting started + :caption: Getting Started :hidden: installing diff --git a/docs/iris/src/installing.rst b/docs/src/installing.rst similarity index 69% rename from docs/iris/src/installing.rst rename to docs/src/installing.rst index fa6fa9ee782..9f90f26a491 100644 --- a/docs/iris/src/installing.rst +++ b/docs/src/installing.rst @@ -7,7 +7,7 @@ Iris is available using conda for the following platforms: * Linux 64-bit, * Mac OSX 64-bit, and -* Windows 64-bit. +* Windows 64-bit.g Windows 10 now has support for Linux distributions via WSL_ (Windows Subsystem for Linux). This is a great option to get started with Iris @@ -17,12 +17,14 @@ any WSL_ distributions. .. _WSL: https://docs.microsoft.com/en-us/windows/wsl/install-win10 .. note:: Iris currently supports and is tested against **Python 3.6** and - **Python 3.7**. + **Python 3.7**. + +.. note:: This documentation was built using Python |python_version|. .. _installing_using_conda: -Installing using conda (users) +Installing Using Conda (Users) ------------------------------ To install Iris using conda, you must first download and install conda, @@ -41,11 +43,45 @@ need the Iris sample data. This can also be installed using conda:: Further documentation on using conda and the features it provides can be found at https://conda.io/en/latest/index.html. +.. _installing_from_source_without_conda: + +Installing from Source Without Conda on Debian-Based Linux Distros (Developers) +------------------------------------------------------------------------------- + +Iris can also be installed without a conda environment. The instructions in +this section are valid for Debian-based Linux distributions (Debian, Ubuntu, +Kubuntu, etc.). + +Iris and its dependencies need some shared libraries in order to work properly. +These can be installed +with apt:: + + sudo apt-get install python3-pip python3-tk libudunits2-dev libproj-dev proj-bin libgeos-dev libcunit1-dev + +Consider executing:: + + sudo apt-get update + +before and after installation of Debian packages. + +The rest can be done with pip. Begin with numpy:: + + pip3 install numpy + +Finally, Iris and its Python dependencies can be installed with the following +command:: + + pip3 install setuptools cftime==1.2.1 cf-units scitools-pyke scitools-iris + +This procedure was tested on a Ubuntu 20.04 system on the +27th of January, 2021. +Be aware that through updates of the involved Debian and/or Python packages, +dependency conflicts might arise or the procedure might have to modified. .. _installing_from_source: -Installing from source (developers) ------------------------------------ +Installing from Source with Conda (Developers) +---------------------------------------------- The latest Iris source release is available from https://github.com/SciTools/iris. @@ -81,7 +117,7 @@ to find your local Iris code:: python setup.py develop -Running the tests +Running the Tests ----------------- To ensure your setup is configured correctly you can run the test suite using @@ -92,7 +128,7 @@ the command:: For more information see :ref:`developer_running_tests`. -Custom site configuration +Custom Site Configuration ------------------------- The default site configuration values can be overridden by creating the file diff --git a/docs/iris/src/spelling_allow.txt b/docs/src/spelling_allow.txt similarity index 100% rename from docs/iris/src/spelling_allow.txt rename to docs/src/spelling_allow.txt diff --git a/docs/iris/src/sphinxext/custom_class_autodoc.py b/docs/src/sphinxext/custom_class_autodoc.py similarity index 100% rename from docs/iris/src/sphinxext/custom_class_autodoc.py rename to docs/src/sphinxext/custom_class_autodoc.py diff --git a/docs/iris/src/sphinxext/custom_data_autodoc.py b/docs/src/sphinxext/custom_data_autodoc.py similarity index 100% rename from docs/iris/src/sphinxext/custom_data_autodoc.py rename to docs/src/sphinxext/custom_data_autodoc.py diff --git a/docs/iris/src/sphinxext/generate_package_rst.py b/docs/src/sphinxext/generate_package_rst.py similarity index 100% rename from docs/iris/src/sphinxext/generate_package_rst.py rename to docs/src/sphinxext/generate_package_rst.py diff --git a/docs/iris/src/techpapers/change_management.rst b/docs/src/techpapers/change_management.rst similarity index 98% rename from docs/iris/src/techpapers/change_management.rst rename to docs/src/techpapers/change_management.rst index ab45fe79263..f39d64f430b 100644 --- a/docs/iris/src/techpapers/change_management.rst +++ b/docs/src/techpapers/change_management.rst @@ -4,7 +4,7 @@ .. _change_management: -Change Management in Iris from the User's perspective +Change Management in Iris From the User's Perspective ***************************************************** As Iris changes, user code will need revising from time to time to keep it @@ -16,7 +16,7 @@ Here, we define ways to make this as easy as possible. .. include:: ../userguide/change_management_goals.txt -Key principles you can rely on +Key Principles you can Rely on ============================== Iris code editions are published as defined version releases, with a given @@ -42,7 +42,7 @@ If your code produces :ref:`deprecation warnings `, then it -User Actions : How you should respond to changes and releases +User Actions : How you Should Respond to Changes and Releases ============================================================= Checklist : @@ -96,7 +96,7 @@ Key concepts covered here: .. _iris_backward_compatibility: -Backwards compatibility +Backwards Compatibility ----------------------- "Backwards-compatible" changes are those that leave any existing valid API @@ -135,7 +135,7 @@ See :ref:`Usage of iris.FUTURE `, below. .. _iris_api: -Terminology : API, features, usages and behaviours +Terminology : API, Features, Usages and Behaviours ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The API is the components of the iris module and its submodules which are @@ -320,7 +320,7 @@ This is to warn users : * eventually to rewrite old code to use the newer or better alternatives -Deprecated features support through the Release cycle +Deprecated Features Support Through the Release Cycle ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The whole point of a deprecation is that the feature continues to work, but @@ -341,7 +341,7 @@ follows: .. _iris_future_usage: -Future options, `iris.FUTURE` +Future Options, `iris.FUTURE` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ A special approach is needed where the replacement behaviour is not controlled diff --git a/docs/iris/src/techpapers/index.rst b/docs/src/techpapers/index.rst similarity index 89% rename from docs/iris/src/techpapers/index.rst rename to docs/src/techpapers/index.rst index 3074569eae0..773c8f70598 100644 --- a/docs/iris/src/techpapers/index.rst +++ b/docs/src/techpapers/index.rst @@ -1,7 +1,7 @@ .. _techpapers_index: -Iris technical papers +Iris Technical Papers ===================== Extra information on specific technical issues. diff --git a/docs/iris/src/techpapers/missing_data_handling.rst b/docs/src/techpapers/missing_data_handling.rst similarity index 98% rename from docs/iris/src/techpapers/missing_data_handling.rst rename to docs/src/techpapers/missing_data_handling.rst index 46279bc5661..13b00d34245 100644 --- a/docs/iris/src/techpapers/missing_data_handling.rst +++ b/docs/src/techpapers/missing_data_handling.rst @@ -1,5 +1,5 @@ ============================= -Missing data handling in Iris +Missing Data Handling in Iris ============================= This document provides a brief overview of how Iris handles missing data values @@ -73,7 +73,7 @@ all have the same fill-value. If the components have differing fill-values, a default fill-value will be used instead. -Other operations +Other Operations ---------------- Other operations, such as :class:`~iris.cube.Cube` arithmetic operations, diff --git a/docs/iris/src/techpapers/um_files_loading.rst b/docs/src/techpapers/um_files_loading.rst similarity index 99% rename from docs/iris/src/techpapers/um_files_loading.rst rename to docs/src/techpapers/um_files_loading.rst index d8c796b31f6..72d34962ce7 100644 --- a/docs/iris/src/techpapers/um_files_loading.rst +++ b/docs/src/techpapers/um_files_loading.rst @@ -14,7 +14,7 @@ =================================== -Iris handling of PP and Fieldsfiles +Iris Handling of PP and Fieldsfiles =================================== This document provides a basic account of how PP and Fieldsfiles data is @@ -40,7 +40,7 @@ For details of Iris terms (cubes, coordinates, attributes), refer to For details of CF conventions, see http://cfconventions.org/. -Overview of loading process +Overview of Loading Process --------------------------- The basics of Iris loading are explained at :ref:`loading_iris_cubes`. @@ -165,7 +165,7 @@ For example: sections are written only if the actual values are unevenly spaced. -Phenomenon identification +Phenomenon Identification ------------------------- **UM Field elements** @@ -218,7 +218,7 @@ For example: LBUSER4 and LBUSER7 elements. -Vertical coordinates +Vertical Coordinates -------------------- **UM Field elements** @@ -319,7 +319,7 @@ See an example printout of a hybrid height cube, .. _um_time_metadata: -Time information +Time Information ---------------- **UM Field elements** @@ -391,7 +391,7 @@ See an example printout of a forecast data cube, 'forecast_reference_time' is a constant. -Statistical measures +Statistical Measures -------------------- **UM Field elements** @@ -438,7 +438,7 @@ For example: (CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),) -Other metadata +Other Metadata -------------- LBRSVD4 diff --git a/docs/iris/src/userguide/change_management_goals.txt b/docs/src/userguide/change_management_goals.txt similarity index 100% rename from docs/iris/src/userguide/change_management_goals.txt rename to docs/src/userguide/change_management_goals.txt diff --git a/docs/iris/src/userguide/citation.rst b/docs/src/userguide/citation.rst similarity index 87% rename from docs/iris/src/userguide/citation.rst rename to docs/src/userguide/citation.rst index 56eab0a4eb1..0a3a85fb89c 100644 --- a/docs/iris/src/userguide/citation.rst +++ b/docs/src/userguide/citation.rst @@ -8,7 +8,7 @@ If Iris played an important part in your research then please add us to your reference list by using one of the recommendations below. ************ -BibTeX entry +BibTeX Entry ************ For example:: @@ -24,7 +24,7 @@ For example:: ******************* -Downloaded software +Downloaded Software ******************* Suggested format:: @@ -37,7 +37,7 @@ For example:: ******************** -Checked out software +Checked Out Software ******************** Suggested format:: @@ -48,7 +48,7 @@ For example:: Iris. Met Office. git@github.com:SciTools/iris.git 06-03-2013 -.. _How to cite and describe software: http://software.ac.uk/so-exactly-what-software-did-you-use +.. _How to cite and describe software: https://software.ac.uk/how-cite-software Reference: [Jackson]_. diff --git a/docs/iris/src/userguide/code_maintenance.rst b/docs/src/userguide/code_maintenance.rst similarity index 97% rename from docs/iris/src/userguide/code_maintenance.rst rename to docs/src/userguide/code_maintenance.rst index d03808e18f5..b2b498bc80e 100644 --- a/docs/iris/src/userguide/code_maintenance.rst +++ b/docs/src/userguide/code_maintenance.rst @@ -1,11 +1,11 @@ -Code maintenance +Code Maintenance ================ From a user point of view "code maintenance" means ensuring that your existing working code stays working, in the face of changes to Iris. -Stability and change +Stability and Change --------------------- In practice, as Iris develops, most users will want to periodically upgrade @@ -25,7 +25,7 @@ maintenance effort is probably still necessary: for some completely unconnected reason. -Principles of change management +Principles of Change Management ------------------------------- When you upgrade software to a new version, you often find that you need to diff --git a/docs/iris/src/userguide/concat.png b/docs/src/userguide/concat.png similarity index 100% rename from docs/iris/src/userguide/concat.png rename to docs/src/userguide/concat.png diff --git a/docs/iris/src/userguide/concat.svg b/docs/src/userguide/concat.svg similarity index 100% rename from docs/iris/src/userguide/concat.svg rename to docs/src/userguide/concat.svg diff --git a/docs/iris/src/userguide/cube_diagram.dia b/docs/src/userguide/cube_diagram.dia similarity index 100% rename from docs/iris/src/userguide/cube_diagram.dia rename to docs/src/userguide/cube_diagram.dia diff --git a/docs/iris/src/userguide/cube_diagram.png b/docs/src/userguide/cube_diagram.png similarity index 100% rename from docs/iris/src/userguide/cube_diagram.png rename to docs/src/userguide/cube_diagram.png diff --git a/docs/iris/src/userguide/cube_maths.rst b/docs/src/userguide/cube_maths.rst similarity index 97% rename from docs/iris/src/userguide/cube_maths.rst rename to docs/src/userguide/cube_maths.rst index eebff53e624..d2d4d84b681 100644 --- a/docs/iris/src/userguide/cube_maths.rst +++ b/docs/src/userguide/cube_maths.rst @@ -1,7 +1,7 @@ .. _cube maths: ========== -Cube maths +Cube Maths ========== @@ -29,7 +29,7 @@ In order to reduce the amount of metadata which becomes inconsistent, fundamental arithmetic operations such as addition, subtraction, division and multiplication can be applied directly to any cube. -Calculating the difference between two cubes +Calculating the Difference Between Two Cubes -------------------------------------------- Let's load some air temperature which runs from 1860 to 2100:: @@ -77,7 +77,7 @@ but with the data representing their difference: .. _cube-maths_anomaly: -Calculating a cube anomaly +Calculating a Cube Anomaly -------------------------- In section :doc:`cube_statistics` we discussed how the dimensionality of a cube @@ -165,7 +165,7 @@ broadcasting behaviour:: >>> print(result.summary(True)) unknown / (K) (time: 240; latitude: 37; longitude: 49) -Combining multiple phenomena to form a new one +Combining Multiple Phenomena to Form a New One ---------------------------------------------- Combining cubes of potential-temperature and pressure we can calculate @@ -223,7 +223,7 @@ The result could now be plotted using the guidance provided in the .. _cube_maths_combining_units: -Combining units +Combining Units --------------- It should be noted that when combining cubes by multiplication, division or @@ -243,7 +243,7 @@ unit (if ``a`` had units ``'m2'`` then ``a ** 0.5`` would result in a cube with units ``'m'``). Iris inherits units from `cf_units `_ -which in turn inherits from `UDUNITS `_. +which in turn inherits from `UDUNITS `_. As well as the units UDUNITS provides, cf units also provides the units ``'no-unit'`` and ``'unknown'``. A unit of ``'no-unit'`` means that the associated data is not suitable for describing with a unit, cf units diff --git a/docs/iris/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst similarity index 99% rename from docs/iris/src/userguide/cube_statistics.rst rename to docs/src/userguide/cube_statistics.rst index 310551c76f1..4eb016078e6 100644 --- a/docs/iris/src/userguide/cube_statistics.rst +++ b/docs/src/userguide/cube_statistics.rst @@ -1,12 +1,12 @@ .. _cube-statistics: =============== -Cube statistics +Cube Statistics =============== .. _cube-statistics-collapsing: -Collapsing entire data dimensions +Collapsing Entire Data Dimensions --------------------------------- .. testsetup:: @@ -100,7 +100,7 @@ in the gallery takes a zonal mean of an ``XYT`` cube by using the .. _cube-statistics-collapsing-average: -Area averaging +Area Averaging ^^^^^^^^^^^^^^ Some operators support additional keywords to the ``cube.collapsed`` method. @@ -152,14 +152,14 @@ including an example on taking a :ref:`global area-weighted mean .. _cube-statistics-aggregated-by: -Partially reducing data dimensions +Partially Reducing Data Dimensions ---------------------------------- Instead of completely collapsing a dimension, other methods can be applied to reduce or filter the number of data points of a particular dimension. -Aggregation of grouped data +Aggregation of Grouped Data ^^^^^^^^^^^^^^^^^^^^^^^^^^^ The :meth:`Cube.aggregated_by ` operation diff --git a/docs/iris/src/userguide/index.rst b/docs/src/userguide/index.rst similarity index 100% rename from docs/iris/src/userguide/index.rst rename to docs/src/userguide/index.rst diff --git a/docs/iris/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst similarity index 99% rename from docs/iris/src/userguide/interpolation_and_regridding.rst rename to docs/src/userguide/interpolation_and_regridding.rst index ffed21a7f52..5a5a985ccb5 100644 --- a/docs/iris/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -8,7 +8,7 @@ warnings.simplefilter('ignore') ================================= -Cube interpolation and regridding +Cube Interpolation and Regridding ================================= Iris provides powerful cube-aware interpolation and regridding functionality, @@ -123,7 +123,7 @@ will be orthogonal: air_temperature / (K) (latitude: 13; longitude: 14) -Interpolating non-horizontal coordinates +Interpolating Non-Horizontal Coordinates ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Interpolation in Iris is not limited to horizontal-spatial coordinates - any @@ -195,7 +195,7 @@ For example, to mask values that lie beyond the range of the original data: .. _caching_an_interpolator: -Caching an interpolator +Caching an Interpolator ^^^^^^^^^^^^^^^^^^^^^^^ If you need to interpolate a cube on multiple sets of sample points you can @@ -305,7 +305,7 @@ cells have now become rectangular in a plate carrée (equirectangular) projectio The spatial grid of the resulting cube is really global, with a large proportion of the data being masked. -Area-weighted regridding +Area-Weighted Regridding ^^^^^^^^^^^^^^^^^^^^^^^^ It is often the case that a point-based regridding scheme (such as @@ -384,7 +384,7 @@ To visualise the above regrid, let's plot the original data, along with 3 distin .. _caching_a_regridder: -Caching a regridder +Caching a Regridder ^^^^^^^^^^^^^^^^^^^ If you need to regrid multiple cubes with a common source grid onto a common @@ -415,7 +415,7 @@ In each case ``result`` will be the input cube regridded to the grid defined by the target grid cube (in this case ``rotated_psl``) that we used to define the cached regridder. -Regridding lazy data +Regridding Lazy Data ^^^^^^^^^^^^^^^^^^^^ If you are working with large cubes, especially when you are regridding to a diff --git a/docs/iris/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst similarity index 99% rename from docs/iris/src/userguide/iris_cubes.rst rename to docs/src/userguide/iris_cubes.rst index 5929c402f2f..de206486d3f 100644 --- a/docs/iris/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -1,7 +1,7 @@ .. _iris_data_structures: ==================== -Iris data structures +Iris Data Structures ==================== The top level object in Iris is called a cube. A cube contains data and metadata about a phenomenon. @@ -71,11 +71,11 @@ A cube consists of: * a list of coordinate "factories" used for deriving coordinates from the values of other coordinates in the cube -Cubes in practice +Cubes in Practice ----------------- -A simple cube example +A Simple Cube Example ===================== Suppose we have some gridded data which has 24 air temperature readings (in Kelvin) which is located at @@ -137,7 +137,7 @@ For example, it is possible to attach any of the following: a collection of "ensembles" (i.e. multiple model runs). -Printing a cube +Printing a Cube =============== Every Iris cube can be printed to screen as you will see later in the user guide. It is worth familiarising yourself with the diff --git a/docs/iris/src/userguide/loading_iris_cubes.rst b/docs/src/userguide/loading_iris_cubes.rst similarity index 99% rename from docs/iris/src/userguide/loading_iris_cubes.rst rename to docs/src/userguide/loading_iris_cubes.rst index 006a9194083..659c28420a6 100644 --- a/docs/iris/src/userguide/loading_iris_cubes.rst +++ b/docs/src/userguide/loading_iris_cubes.rst @@ -1,7 +1,7 @@ .. _loading_iris_cubes: =================== -Loading Iris cubes +Loading Iris Cubes =================== To load a single file into a **list** of Iris cubes @@ -116,7 +116,7 @@ This was the output discussed at the end of the :doc:`iris_cubes` section. appropriate column for each cube data dimension that they describe. -Loading multiple files +Loading Multiple Files ----------------------- To load more than one file into a list of cubes, a list of filenames can be @@ -142,7 +142,7 @@ star wildcards can be used:: The cubes returned will not necessarily be in the same order as the order of the filenames. -Lazy loading +Lazy Loading ------------ In fact when Iris loads data from most file types, it normally only reads the @@ -155,7 +155,7 @@ For more on the benefits, handling and uses of lazy data, see :doc:`Real and Laz .. _constrained-loading: -Constrained loading +Constrained Loading ----------------------- Given a large dataset, it is possible to restrict or constrain the load to match specific Iris cube metadata. @@ -261,7 +261,7 @@ then specific STASH codes can be filtered:: :class:`iris.Constraint` reference documentation. -Constraining a circular coordinate across its boundary +Constraining a Circular Coordinate Across its Boundary ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Occasionally you may need to constrain your cube with a region that crosses the @@ -403,7 +403,7 @@ Notice how the dates printed are between the range specified in the ``st_swithun and that they span multiple years. -Strict loading +Strict Loading -------------- The :py:func:`iris.load_cube` and :py:func:`iris.load_cubes` functions are diff --git a/docs/iris/src/userguide/merge.png b/docs/src/userguide/merge.png similarity index 100% rename from docs/iris/src/userguide/merge.png rename to docs/src/userguide/merge.png diff --git a/docs/iris/src/userguide/merge.svg b/docs/src/userguide/merge.svg similarity index 100% rename from docs/iris/src/userguide/merge.svg rename to docs/src/userguide/merge.svg diff --git a/docs/iris/src/userguide/merge_and_concat.png b/docs/src/userguide/merge_and_concat.png similarity index 100% rename from docs/iris/src/userguide/merge_and_concat.png rename to docs/src/userguide/merge_and_concat.png diff --git a/docs/iris/src/userguide/merge_and_concat.rst b/docs/src/userguide/merge_and_concat.rst similarity index 99% rename from docs/iris/src/userguide/merge_and_concat.rst rename to docs/src/userguide/merge_and_concat.rst index 0d844ac403b..ffa36ccdebf 100644 --- a/docs/iris/src/userguide/merge_and_concat.rst +++ b/docs/src/userguide/merge_and_concat.rst @@ -1,7 +1,7 @@ .. _merge_and_concat: ===================== -Merge and concatenate +Merge and Concatenate ===================== We saw in the :doc:`loading_iris_cubes` chapter that Iris tries to load as few cubes as @@ -203,7 +203,7 @@ single cube. An example of fixing an issue like this can be found in the :ref:`merge_concat_common_issues` section. -Merge in Iris load +Merge in Iris Load ================== The CubeList's :meth:`~iris.cube.CubeList.merge` method is used internally @@ -365,7 +365,7 @@ single cube. An example of fixing an issue like this can be found in the .. _merge_concat_common_issues: -Common issues with merge and concatenate +Common Issues With Merge and Concatenate ---------------------------------------- The Iris algorithms that drive :meth:`~iris.cube.CubeList.merge` and @@ -529,7 +529,7 @@ Trying to merge the input cubes with duplicate cubes not allowed raises an error highlighting the presence of the duplicate cube. -**Single value coordinates** +**Single Value Coordinates** Coordinates containing only a single value can cause confusion when combining input cubes. Remember: diff --git a/docs/iris/src/userguide/merge_and_concat.svg b/docs/src/userguide/merge_and_concat.svg similarity index 100% rename from docs/iris/src/userguide/merge_and_concat.svg rename to docs/src/userguide/merge_and_concat.svg diff --git a/docs/iris/src/userguide/multi_array.png b/docs/src/userguide/multi_array.png similarity index 100% rename from docs/iris/src/userguide/multi_array.png rename to docs/src/userguide/multi_array.png diff --git a/docs/iris/src/userguide/multi_array.svg b/docs/src/userguide/multi_array.svg similarity index 100% rename from docs/iris/src/userguide/multi_array.svg rename to docs/src/userguide/multi_array.svg diff --git a/docs/iris/src/userguide/multi_array_to_cube.png b/docs/src/userguide/multi_array_to_cube.png similarity index 100% rename from docs/iris/src/userguide/multi_array_to_cube.png rename to docs/src/userguide/multi_array_to_cube.png diff --git a/docs/iris/src/userguide/multi_array_to_cube.svg b/docs/src/userguide/multi_array_to_cube.svg similarity index 100% rename from docs/iris/src/userguide/multi_array_to_cube.svg rename to docs/src/userguide/multi_array_to_cube.svg diff --git a/docs/iris/src/userguide/navigating_a_cube.rst b/docs/src/userguide/navigating_a_cube.rst similarity index 98% rename from docs/iris/src/userguide/navigating_a_cube.rst rename to docs/src/userguide/navigating_a_cube.rst index a7b7717ae3c..df18c032c14 100644 --- a/docs/iris/src/userguide/navigating_a_cube.rst +++ b/docs/src/userguide/navigating_a_cube.rst @@ -1,5 +1,5 @@ ================= -Navigating a cube +Navigating a Cube ================= .. testsetup:: @@ -15,7 +15,7 @@ Navigating a cube After loading any cube, you will want to investigate precisely what it contains. This section is all about accessing and manipulating the metadata contained within a cube. -Cube string representations +Cube String Representations --------------------------- We have already seen a basic string representation of a cube when printing: @@ -52,7 +52,7 @@ variable. In most cases it is reasonable to ignore anything starting with a "``_ dir(cube) help(cube) -Working with cubes +Working With Cubes ------------------ Every cube has a standard name, long name and units which are accessed with @@ -111,7 +111,7 @@ cube with the :attr:`Cube.cell_methods ` attribute: print(cube.cell_methods) -Accessing coordinates on the cube +Accessing Coordinates on the Cube --------------------------------- A cube's coordinates can be retrieved via :meth:`Cube.coords `. @@ -148,7 +148,7 @@ numpy array. If the coordinate has no bounds ``None`` will be returned:: print(type(coord.bounds)) -Adding metadata to a cube +Adding Metadata to a Cube ------------------------- We can add and remove coordinates via :func:`Cube.add_dim_coord`, @@ -177,7 +177,7 @@ We can add and remove coordinates via :func:`Cube.add_dim_coord`_ package in order to generate @@ -13,7 +13,7 @@ been extended within Iris to facilitate easy visualisation of a cube's data. *************************** -Matplotlib's pyplot basics +Matplotlib's Pyplot Basics *************************** A simple line plot can be created using the @@ -35,7 +35,7 @@ There are two modes of rendering within Matplotlib; **interactive** and **non-interactive**. -Interactive plot rendering +Interactive Plot Rendering ========================== The previous example was *non-interactive* as the figure is only rendered *after* the call to :py:func:`plt.show() `. @@ -84,7 +84,7 @@ so ensure that interactive mode is turned off with:: plt.interactive(False) -Saving a plot +Saving a Plot ============= The :py:func:`matplotlib.pyplot.savefig` function is similar to **plt.show()** @@ -113,7 +113,7 @@ Some of the formats which are supported by **plt.savefig**: ====== ====== ====================================================================== ****************** -Iris cube plotting +Iris Cube Plotting ****************** The Iris modules :py:mod:`iris.quickplot` and :py:mod:`iris.plot` extend the @@ -149,7 +149,7 @@ where appropriate. import iris.quickplot as qplt -Plotting 1-dimensional cubes +Plotting 1-Dimensional Cubes ============================ The simplest 1D plot is achieved with the :py:func:`iris.plot.plot` function. @@ -181,7 +181,7 @@ For example, the previous plot can be improved quickly by replacing -Multi-line plot +Multi-Line Plot --------------- A multi-lined (or over-plotted) plot, with a legend, can be achieved easily by @@ -209,13 +209,13 @@ the temperature at some latitude cross-sections. ``_. In order to run this example, you will need to copy the code into a file - and run it using ``python2.7 my_file.py``. + and run it using ``python my_file.py``. -Plotting 2-dimensional cubes +Plotting 2-Dimensional Cubes ============================ -Creating maps +Creating Maps ------------- Whenever a 2D plot is created using an :class:`iris.coord_systems.CoordSystem`, a cartopy :class:`~cartopy.mpl.GeoAxes` instance is created, which can be @@ -230,7 +230,7 @@ things. :meth:`cartopy's coastlines() `. -Cube contour +Cube Contour ------------ A simple contour plot of a cube can be created with either the :func:`iris.plot.contour` or :func:`iris.quickplot.contour` functions: @@ -239,7 +239,7 @@ A simple contour plot of a cube can be created with either the :include-source: -Cube filled contour +Cube Filled Contour ------------------- Similarly a filled contour plot of a cube can be created with the :func:`iris.plot.contourf` or :func:`iris.quickplot.contourf` functions: @@ -248,7 +248,7 @@ Similarly a filled contour plot of a cube can be created with the :include-source: -Cube block plot +Cube Block Plot --------------- In some situations the underlying coordinates are better represented with a continuous bounded coordinate, in which case a "block" plot may be more @@ -268,7 +268,7 @@ or :func:`iris.quickplot.pcolormesh`. .. _brewer-info: *********************** -Brewer colour palettes +Brewer Colour Palettes *********************** Iris includes colour specifications and designs developed by @@ -303,7 +303,7 @@ The following subset of Brewer palettes found at .. plot:: userguide/plotting_examples/brewer.py -Plotting with Brewer +Plotting With Brewer ==================== To plot a cube using a Brewer colour palette, simply select one of the Iris @@ -316,7 +316,7 @@ become available once :mod:`iris.plot` or :mod:`iris.quickplot` are imported. .. _brewer-cite: -Adding a citation +Adding a Citation ================= Citations can be easily added to a plot using the diff --git a/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py b/docs/src/userguide/plotting_examples/1d_quickplot_simple.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py rename to docs/src/userguide/plotting_examples/1d_quickplot_simple.py diff --git a/docs/iris/src/userguide/plotting_examples/1d_simple.py b/docs/src/userguide/plotting_examples/1d_simple.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/1d_simple.py rename to docs/src/userguide/plotting_examples/1d_simple.py diff --git a/docs/iris/src/userguide/plotting_examples/1d_with_legend.py b/docs/src/userguide/plotting_examples/1d_with_legend.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/1d_with_legend.py rename to docs/src/userguide/plotting_examples/1d_with_legend.py diff --git a/docs/iris/src/userguide/plotting_examples/brewer.py b/docs/src/userguide/plotting_examples/brewer.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/brewer.py rename to docs/src/userguide/plotting_examples/brewer.py diff --git a/docs/iris/src/userguide/plotting_examples/cube_blockplot.py b/docs/src/userguide/plotting_examples/cube_blockplot.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/cube_blockplot.py rename to docs/src/userguide/plotting_examples/cube_blockplot.py diff --git a/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py b/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py rename to docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py diff --git a/docs/iris/src/userguide/plotting_examples/cube_brewer_contourf.py b/docs/src/userguide/plotting_examples/cube_brewer_contourf.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/cube_brewer_contourf.py rename to docs/src/userguide/plotting_examples/cube_brewer_contourf.py diff --git a/docs/iris/src/userguide/plotting_examples/cube_contour.py b/docs/src/userguide/plotting_examples/cube_contour.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/cube_contour.py rename to docs/src/userguide/plotting_examples/cube_contour.py diff --git a/docs/iris/src/userguide/plotting_examples/cube_contourf.py b/docs/src/userguide/plotting_examples/cube_contourf.py similarity index 100% rename from docs/iris/src/userguide/plotting_examples/cube_contourf.py rename to docs/src/userguide/plotting_examples/cube_contourf.py diff --git a/docs/iris/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst similarity index 98% rename from docs/iris/src/userguide/real_and_lazy_data.rst rename to docs/src/userguide/real_and_lazy_data.rst index 574ca4e1a0e..0bc18464579 100644 --- a/docs/iris/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -10,7 +10,7 @@ ================== -Real and lazy data +Real and Lazy Data ================== We have seen in the :doc:`iris_cubes` section of the user guide that @@ -21,7 +21,7 @@ In this section of the user guide we will look specifically at the concepts of real and lazy data as they apply to the cube and other data structures in Iris. -What is real and lazy data? +What is Real and Lazy Data? --------------------------- In Iris, we use the term **real data** to describe data arrays that are loaded @@ -97,7 +97,7 @@ In such cases, a required portion can be extracted and realised without calculat .. _when_real_data: -When does my data become real? +When Does My Data Become Real? ------------------------------ Certain operations, such as cube indexing and statistics, can be @@ -134,7 +134,7 @@ You can also realise (and so load into memory) your cube's lazy data if you 'tou To 'touch' the data means directly accessing the data by calling ``cube.data``, as in the previous example. -Core data +Core Data ^^^^^^^^^ Cubes have the concept of "core data". This returns the cube's data in its @@ -225,7 +225,7 @@ coordinates' lazy points and bounds: Printing a lazy :class:`~iris.coords.AuxCoord` will realise its points and bounds arrays! -Dask processing options +Dask Processing Options ----------------------- Iris uses dask to provide lazy data arrays for both Iris cubes and coordinates, diff --git a/docs/iris/src/userguide/regridding_plots/interpolate_column.py b/docs/src/userguide/regridding_plots/interpolate_column.py similarity index 88% rename from docs/iris/src/userguide/regridding_plots/interpolate_column.py rename to docs/src/userguide/regridding_plots/interpolate_column.py index 273ef365ccb..4378ec98be1 100644 --- a/docs/iris/src/userguide/regridding_plots/interpolate_column.py +++ b/docs/src/userguide/regridding_plots/interpolate_column.py @@ -1,4 +1,3 @@ -import iris import iris.quickplot as qplt import iris.analysis import matplotlib.pyplot as plt @@ -12,8 +11,13 @@ # Interpolate the "perfect" linear interpolation. Really this is just # a high number of interpolation points, in this case 1000 of them. -altitude_points = [("altitude", np.linspace(400, 1250, 1000))] -scheme = iris.analysis.Linear(extrapolation_mode="mask") +altitude_points = [ + ( + "altitude", + np.linspace(min(alt_coord.points), max(alt_coord.points), 1000), + ) +] +scheme = iris.analysis.Linear() linear_column = column.interpolate(altitude_points, scheme) # Now interpolate the data onto 10 evenly spaced altitude levels, @@ -27,7 +31,6 @@ # Plot the black markers for the original data. qplt.plot( column, - column.coord("altitude"), marker="o", color="black", linestyle="", @@ -39,7 +42,6 @@ # Plot the gray line to display the linear interpolation. qplt.plot( linear_column, - linear_column.coord("altitude"), color="gray", label="Linear interpolation", zorder=0, @@ -48,7 +50,6 @@ # Plot the red markers for the new data. qplt.plot( new_column, - new_column.coord("altitude"), marker="D", color="red", linestyle="", diff --git a/docs/iris/src/userguide/regridding_plots/regridded_to_global.py b/docs/src/userguide/regridding_plots/regridded_to_global.py similarity index 100% rename from docs/iris/src/userguide/regridding_plots/regridded_to_global.py rename to docs/src/userguide/regridding_plots/regridded_to_global.py diff --git a/docs/iris/src/userguide/regridding_plots/regridded_to_global_area_weighted.py b/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py similarity index 100% rename from docs/iris/src/userguide/regridding_plots/regridded_to_global_area_weighted.py rename to docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py diff --git a/docs/iris/src/userguide/regridding_plots/regridded_to_rotated.py b/docs/src/userguide/regridding_plots/regridded_to_rotated.py similarity index 100% rename from docs/iris/src/userguide/regridding_plots/regridded_to_rotated.py rename to docs/src/userguide/regridding_plots/regridded_to_rotated.py diff --git a/docs/iris/src/userguide/regridding_plots/regridding_plot.py b/docs/src/userguide/regridding_plots/regridding_plot.py similarity index 100% rename from docs/iris/src/userguide/regridding_plots/regridding_plot.py rename to docs/src/userguide/regridding_plots/regridding_plot.py diff --git a/docs/iris/src/userguide/saving_iris_cubes.rst b/docs/src/userguide/saving_iris_cubes.rst similarity index 95% rename from docs/iris/src/userguide/saving_iris_cubes.rst rename to docs/src/userguide/saving_iris_cubes.rst index cca8b44bd16..237ceb18b65 100644 --- a/docs/iris/src/userguide/saving_iris_cubes.rst +++ b/docs/src/userguide/saving_iris_cubes.rst @@ -1,12 +1,12 @@ .. _saving_iris_cubes: ================== -Saving Iris cubes +Saving Iris Cubes ================== Iris supports the saving of cubes and cube lists to: -* CF netCDF (version 1.6) +* CF netCDF (version 1.7) * GRIB edition 2 (if `iris-grib `_ is installed) * Met Office PP @@ -39,8 +39,8 @@ and the keyword argument `saver` is not required. attempting to overwrite an existing file. -Controlling the save process ------------------------------ +Controlling the Save Process +---------------------------- The :py:func:`iris.save` function passes all other keywords through to the saver function defined, or automatically set from the file extension. This enables saver specific functionality to be called. @@ -73,8 +73,8 @@ See for more details on supported arguments for the individual savers. -Customising the save process ------------------------------ +Customising the Save Process +---------------------------- When saving to GRIB or PP, the save process may be intercepted between the translation step and the file writing. This enables customisation of the output messages, based on Cube metadata if required, over and above the translations supplied by Iris. @@ -103,14 +103,14 @@ Similarly a PP field may need to be written out with a specific value for LBEXP. iris.fileformats.pp.save_fields(tweaked_fields(cubes[0]), '/tmp/app.pp') -netCDF -^^^^^^^ +NetCDF +^^^^^^ NetCDF is a flexible container for metadata and cube metadata is closely related to the CF for netCDF semantics. This means that cube metadata is well represented in netCDF files, closely resembling the in memory metadata representation. Thus there is no provision for similar save customisation functionality for netCDF saving, all customisations should be applied to the cube prior to saving to netCDF. -Bespoke saver --------------- +Bespoke Saver +------------- A bespoke saver may be written to support an alternative file format. This can be provided to the :py:func:`iris.save` function, enabling Iris to write to a different file format. Such a custom saver will need be written to meet the needs of the file format and to handle the metadata translation from cube metadata effectively. diff --git a/docs/iris/src/userguide/subsetting_a_cube.rst b/docs/src/userguide/subsetting_a_cube.rst similarity index 99% rename from docs/iris/src/userguide/subsetting_a_cube.rst rename to docs/src/userguide/subsetting_a_cube.rst index 5d9a560be9d..02cf1645a11 100644 --- a/docs/iris/src/userguide/subsetting_a_cube.rst +++ b/docs/src/userguide/subsetting_a_cube.rst @@ -1,7 +1,7 @@ .. _subsetting_a_cube: ================= -Subsetting a cube +Subsetting a Cube ================= The :doc:`loading_iris_cubes` section of the user guide showed how to load data into multidimensional Iris cubes. @@ -11,7 +11,7 @@ Iris provides several ways of reducing both the amount of data and/or the number In all cases **the subset of a valid cube is itself a valid cube**. -Cube extraction +Cube Extraction ^^^^^^^^^^^^^^^^ A subset of a cube can be "extracted" from a multi-dimensional cube in order to reduce its dimensionality: @@ -101,7 +101,7 @@ same way as loading with constraints: um_version: 7.3 -Cube iteration +Cube Iteration ^^^^^^^^^^^^^^^ It is not possible to directly iterate over an Iris cube. That is, you cannot use code such as ``for x in cube:``. However, you can iterate over cube slices, as this section details. @@ -152,7 +152,7 @@ slicing the 3 dimensional cube (15, 100, 100) by longitude (i starts at 0 and 15 cube using the slices method. -Cube indexing +Cube Indexing ^^^^^^^^^^^^^ In the same way that you would expect a numeric multidimensional array to be **indexed** to take a subset of your original array, you can **index** a Cube for the same purpose. diff --git a/docs/iris/src/whatsnew/1.0.rst b/docs/src/whatsnew/1.0.rst similarity index 97% rename from docs/iris/src/whatsnew/1.0.rst rename to docs/src/whatsnew/1.0.rst index 11d29320b68..b226dc609b8 100644 --- a/docs/iris/src/whatsnew/1.0.rst +++ b/docs/src/whatsnew/1.0.rst @@ -10,7 +10,7 @@ work. Following this release we plan to deliver significant performance improvements and additional features. -The role of 1.x +The Role of 1.x =============== The 1.x series of releases is intended to provide a relatively stable, @@ -58,7 +58,7 @@ A summary of the main features added with version 1.0: contain bounds. -CF-netCDF coordinate systems +CF-NetCDF Coordinate Systems ---------------------------- The coordinate systems in Iris are now defined by the CF-netCDF @@ -73,7 +73,7 @@ The coordinate systems available in Iris 1.0 and their corresponding Iris classes are: ================================================================================================================= ========================================= -CF name Iris class +CF Name Iris Class ================================================================================================================= ========================================= `Latitude-longitude `_ :class:`~iris.coord_systems.GeogCS` `Rotated pole `_ :class:`~iris.coord_systems.RotatedGeogCS` @@ -88,7 +88,7 @@ coordinate system used by the British .. _whats-new-cartopy: -Using Cartopy for mapping in matplotlib +Using Cartopy for Mapping in Matplotlib --------------------------------------- The underlying map drawing package has now been updated to use @@ -135,7 +135,7 @@ For more examples of what can be done with Cartopy, see the Iris gallery and `Cartopy's documentation `_. -Hybrid-pressure +Hybrid-Pressure --------------- With the introduction of the :class:`~iris.aux_factory.HybridPressureFactory` @@ -181,7 +181,7 @@ dealing with large numbers of netCDF files, or in long running processes. -Brewer colour palettes +Brewer Colour Palettes ---------------------- Iris includes a selection of carefully designed colour palettes produced @@ -207,7 +207,7 @@ To include a reference in a journal article or report please refer to in the citation guidance provided by Cynthia Brewer. -Metadata attributes +Metadata Attributes ------------------- Iris now stores "source" and "history" metadata in Cube attributes. @@ -241,7 +241,7 @@ Where previously it would have appeared as:: cube.add_aux_coord(src_coord) -New loading functions +New Loading Functions --------------------- The main functions for loading cubes are now: @@ -264,7 +264,7 @@ now use the :func:`iris.load_cube()` and :func:`iris.load_cubes()` functions instead. -Cube projection +Cube Projection --------------- Iris now has the ability to project a cube into a number of map projections. @@ -302,7 +302,7 @@ preserved. This function currently assumes global data and will if necessary extrapolate beyond the geographical extent of the source cube. -Incompatible changes +Incompatible Changes ==================== * The "source" and "history" metadata are now represented as Cube diff --git a/docs/iris/src/whatsnew/1.1.rst b/docs/src/whatsnew/1.1.rst similarity index 98% rename from docs/iris/src/whatsnew/1.1.rst rename to docs/src/whatsnew/1.1.rst index f2b0995fa04..86f0bb16fa4 100644 --- a/docs/iris/src/whatsnew/1.1.rst +++ b/docs/src/whatsnew/1.1.rst @@ -44,7 +44,7 @@ some notable improvements to netCDF/PP import. with product template 4.9. -Coordinate categorisation +Coordinate Categorisation ------------------------- An :func:`~iris.coord_categorisation.add_day_of_year` categorisation @@ -52,7 +52,7 @@ function has been added to the existing suite in :mod:`iris.coord_categorisation`. -Custom seasons +Custom Seasons ~~~~~~~~~~~~~~ The conventional seasonal categorisation functions have been @@ -87,7 +87,7 @@ This function adds a coordinate containing True/False values determined by membership of a single custom season. -Bugs fixed +Bugs Fixed ========== * PP export no longer attempts to set/overwrite the STASH code based on diff --git a/docs/iris/src/whatsnew/1.10.rst b/docs/src/whatsnew/1.10.rst similarity index 99% rename from docs/iris/src/whatsnew/1.10.rst rename to docs/src/whatsnew/1.10.rst index 3f51287fa18..92822087dda 100644 --- a/docs/iris/src/whatsnew/1.10.rst +++ b/docs/src/whatsnew/1.10.rst @@ -1,5 +1,5 @@ v1.10 (05 Sep 2016) -********************* +******************* This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -137,7 +137,7 @@ Features attributes is now allowed. -Bugs fixed +Bugs Fixed ========== * Altered Cell Methods to display coordinate's standard_name rather than @@ -215,7 +215,7 @@ Bugs fixed thrown while trying to subset over a non-dimensional scalar coordinate. -Incompatible changes +Incompatible Changes ==================== * The source and target for diff --git a/docs/iris/src/whatsnew/1.11.rst b/docs/src/whatsnew/1.11.rst similarity index 99% rename from docs/iris/src/whatsnew/1.11.rst rename to docs/src/whatsnew/1.11.rst index e0d46d0f09f..356e6ec85b1 100644 --- a/docs/iris/src/whatsnew/1.11.rst +++ b/docs/src/whatsnew/1.11.rst @@ -16,7 +16,7 @@ Features * The coordinate system :class:`iris.coord_systems.LambertAzimuthalEqualArea` has been added with NetCDF saving support. -Bugs fixed +Bugs Fixed ========== * Fixed a floating point tolerance bug in diff --git a/docs/iris/src/whatsnew/1.12.rst b/docs/src/whatsnew/1.12.rst similarity index 100% rename from docs/iris/src/whatsnew/1.12.rst rename to docs/src/whatsnew/1.12.rst diff --git a/docs/iris/src/whatsnew/1.13.rst b/docs/src/whatsnew/1.13.rst similarity index 98% rename from docs/iris/src/whatsnew/1.13.rst rename to docs/src/whatsnew/1.13.rst index 2d3b3ffce54..028c2985057 100644 --- a/docs/iris/src/whatsnew/1.13.rst +++ b/docs/src/whatsnew/1.13.rst @@ -1,5 +1,5 @@ v1.13 (17 May 2017) -************************* +******************* This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -17,7 +17,7 @@ Features :meth:`iris.cube.share_data` flag. -Bug fixes +Bug Fixes ========= * The bounds are now set correctly on the longitude coordinate if a zonal mean diff --git a/docs/iris/src/whatsnew/1.2.rst b/docs/src/whatsnew/1.2.rst similarity index 98% rename from docs/iris/src/whatsnew/1.2.rst rename to docs/src/whatsnew/1.2.rst index d4bb863a3b4..dce0b6dc042 100644 --- a/docs/iris/src/whatsnew/1.2.rst +++ b/docs/src/whatsnew/1.2.rst @@ -44,7 +44,7 @@ Features :class:`~iris.cube.Cube`. -Bugs fixed +Bugs Fixed ========== * The GRIB hindcast interpretation of negative forecast times can be enabled @@ -54,7 +54,7 @@ Bugs fixed coordinates. -Incompatible changes +Incompatible Changes ==================== * The deprecated :attr:`iris.cube.Cube.unit` and :attr:`iris.coords.Coord.unit` diff --git a/docs/iris/src/whatsnew/1.3.rst b/docs/src/whatsnew/1.3.rst similarity index 97% rename from docs/iris/src/whatsnew/1.3.rst rename to docs/src/whatsnew/1.3.rst index 9a2ac2eba1f..beaa594ab54 100644 --- a/docs/iris/src/whatsnew/1.3.rst +++ b/docs/src/whatsnew/1.3.rst @@ -30,7 +30,7 @@ Features .. _whats-new-abf: -Loading ABF/ABL files +Loading ABF/ABL Files --------------------- Support for the ABF and ABL file formats (as @@ -51,7 +51,7 @@ For example:: .. _whats-new-cf-profile: -Customised CF profiles +Customised CF Profiles ---------------------- Iris now provides hooks in the CF-netCDF export process to allow @@ -74,7 +74,7 @@ For further implementation details see ``iris/fileformats/netcdf.py``. .. _whats-new-concat: -Cube concatenation +Cube Concatenation ------------------ Iris now provides initial support for concatenating Cubes along one or @@ -101,7 +101,7 @@ combine these into a single Cube as follows:: As this is an experimental feature, your feedback is especially welcome. -Bugs fixed +Bugs Fixed ========== * Printing a Cube now supports Unicode attribute values. @@ -123,7 +123,7 @@ Deprecations naming conventions. ====================================== =========================================== - Deprecated property/method New method + Deprecated Property/Method New Method ====================================== =========================================== :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()` :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()` diff --git a/docs/iris/src/whatsnew/1.4.rst b/docs/src/whatsnew/1.4.rst similarity index 93% rename from docs/iris/src/whatsnew/1.4.rst rename to docs/src/whatsnew/1.4.rst index 29f2079af8c..858f985ec6e 100644 --- a/docs/iris/src/whatsnew/1.4.rst +++ b/docs/src/whatsnew/1.4.rst @@ -61,7 +61,7 @@ Features .. _OPeNDAP: http://www.opendap.org/about .. _exp-regrid: -Experimental regridding enhancements +Experimental Regridding Enhancements ------------------------------------ Bilinear, area-weighted and area-conservative regridding functions are now @@ -72,7 +72,7 @@ development. In the meantime: -Bilinear rectilinear regridding +Bilinear Rectilinear Regridding ------------------------------- :func:`~iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid` @@ -85,7 +85,7 @@ For example:: regridded_cube = regrid_bilinear_rectilinear_src_and_grid(source_cube, target_grid_cube) -Area-weighted regridding +Area-Weighted Regridding ------------------------ :func:`~iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid` @@ -98,7 +98,7 @@ For example:: regridded_cube = regrid_area_weighted(source_cube, target_grid_cube) -Area-conservative regridding +Area-Conservative Regridding ---------------------------- :func:`~iris.experimental.regrid_conservative.regrid_conservative_via_esmpy` @@ -113,7 +113,7 @@ For example:: .. _iris-pandas: -Iris-Pandas interoperability +Iris-Pandas Interoperability ---------------------------- Conversion to and from Pandas Series_ and DataFrames_ is now available. @@ -125,7 +125,7 @@ See :mod:`iris.pandas` for more details. .. _load-opendap: -Load cubes from the internet via OPeNDAP +Load Cubes From the Internet via OPeNDAP ---------------------------------------- Cubes can now be loaded directly from the internet, via OPeNDAP_. @@ -137,7 +137,7 @@ For example:: .. _geotiff_export: -GeoTiff export +GeoTiff Export -------------- With this experimental feature, two dimensional cubes can now be exported to @@ -155,7 +155,7 @@ For example:: .. _cube-merge-update: -Cube merge update +Cube Merge Update ----------------- Cube merging now favours numerical coordinates over string coordinates @@ -167,7 +167,7 @@ dimensions"*. .. _season-year-name: -Unambiguous season year naming +Unambiguous Season Year Naming ------------------------------ The default names of categorisation coordinates are now less ambiguous. @@ -178,7 +178,7 @@ For example, :func:`~iris.coord_categorisation.add_month_number` and .. _grib-novert: -Cubes with no vertical coord can now be exported to GRIB +Cubes With no Vertical Coord can now be Exported to GRIB -------------------------------------------------------- Iris can now export cubes with no vertical coord to GRIB. @@ -188,7 +188,7 @@ https://github.com/SciTools/iris/issues/519. .. _simple_cfg: -Simplified resource configuration +Simplified Resource Configuration --------------------------------- A new configuration variable called :data:`iris.config.TEST_DATA_DIR` @@ -202,7 +202,7 @@ be set by adding a ``test_data_dir`` entry to the ``Resources`` section of .. _grib_params: -Extended GRIB parameter translation +Extended GRIB Parameter Translation ----------------------------------- - More GRIB2 params are recognised on input. @@ -213,7 +213,7 @@ Extended GRIB parameter translation .. _one-d-linear: -One dimensional linear interpolation fix +One dimensional Linear Interpolation Fix ---------------------------------------- :func:`~iris.analysis.interpolate.linear` can now extrapolate from a single @@ -232,7 +232,7 @@ to cause the loss of coordinate metadata when calculating the curl or the derivative of a cube has been fixed. -Incompatible changes +Incompatible Changes ==================== * As part of simplifying the mechanism for accessing test data, diff --git a/docs/iris/src/whatsnew/1.5.rst b/docs/src/whatsnew/1.5.rst similarity index 99% rename from docs/iris/src/whatsnew/1.5.rst rename to docs/src/whatsnew/1.5.rst index ea7965fe155..72bdbac480f 100644 --- a/docs/iris/src/whatsnew/1.5.rst +++ b/docs/src/whatsnew/1.5.rst @@ -125,7 +125,7 @@ Features systems and mapping 0 to 360 longitudes to the -180 to 180 range. -Bugs fixed +Bugs Fixed ========== * NetCDF error handling on save has been extended to capture file path and diff --git a/docs/iris/src/whatsnew/1.6.rst b/docs/src/whatsnew/1.6.rst similarity index 97% rename from docs/iris/src/whatsnew/1.6.rst rename to docs/src/whatsnew/1.6.rst index 3855d714794..8b0205b86f7 100644 --- a/docs/iris/src/whatsnew/1.6.rst +++ b/docs/src/whatsnew/1.6.rst @@ -146,7 +146,7 @@ Features .. _caching: -A new utility function to assist with caching +A New Utility Function to Assist With Caching --------------------------------------------- To assist with management of caching results to file, the new utility function :func:`iris.util.file_is_newer_than` may be used to easily determine whether @@ -173,7 +173,7 @@ consuming processing, or to reap the benefit of fast-loading a pickled cube. .. _rms: -The RMS aggregator supports weights +The RMS Aggregator Supports Weights ----------------------------------- The :data:`iris.analysis.RMS` aggregator has been extended to allow the use of @@ -189,7 +189,7 @@ For example, an RMS weighted cube collapse is performed as follows: .. _equalise: -Equalise cube attributes +Equalise Cube Attributes ------------------------ To assist with :class:`iris.cube.Cube` merging, the new experimental in-place @@ -202,7 +202,7 @@ have the same attributes. .. _tolerance: -Masking a collapsed result by missing-data tolerance +Masking a Collapsed Result by Missing-Data Tolerance ---------------------------------------------------- The result from collapsing masked cube data may now be completely @@ -216,7 +216,7 @@ less than or equal to the provided tolerance. .. _promote: -Promote a scalar coordinate +Promote a Scalar Coordinate --------------------------- The new utility function :func:`iris.util.new_axis` creates a new cube with @@ -229,7 +229,7 @@ Note that, this function will load the data payload of the cube. .. _peak: -A new PEAK aggregator providing spline interpolation +A New PEAK Aggregator Providing Spline Interpolation ---------------------------------------------------- The new :data:`iris.analysis.PEAK` aggregator calculates the global peak @@ -244,7 +244,7 @@ For example, to calculate the peak time: collapsed_cube = cube.collapsed('time', PEAK) -Bugs fixed +Bugs Fixed ========== * :meth:`iris.cube.Cube.rolling_window` has been extended to support masked @@ -283,7 +283,7 @@ Bugs fixed * Exception no longer raised for any ellipsoid definition in nimrod loading. -Incompatible changes +Incompatible Changes ==================== * The experimental 'concatenate' function is now a method of a @@ -312,7 +312,7 @@ Incompatible changes been removed. ====================================== =========================================== - Removed property/method New method + Removed Property/Method New Method ====================================== =========================================== :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()` :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()` @@ -335,7 +335,7 @@ Incompatible changes removed. =============================================================== ======================================================= - Removed function New function + Removed Function New Function =============================================================== ======================================================= :func:`~iris.coord_categorisation.add_custom_season` :func:`~iris.coord_categorisation.add_season` :func:`~iris.coord_categorisation.add_custom_season_number` :func:`~iris.coord_categorisation.add_season_number` diff --git a/docs/iris/src/whatsnew/1.7.rst b/docs/src/whatsnew/1.7.rst similarity index 99% rename from docs/iris/src/whatsnew/1.7.rst rename to docs/src/whatsnew/1.7.rst index f6e818fedf6..44ebe9ec601 100644 --- a/docs/iris/src/whatsnew/1.7.rst +++ b/docs/src/whatsnew/1.7.rst @@ -1,5 +1,5 @@ v1.7 (04 Jul 2014) -******************** +****************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -196,7 +196,7 @@ Features * A speed improvement when loading PP or FF data and constraining on STASH code. -Bugs fixed +Bugs Fixed ========== * Data containing more than one reference cube for constructing hybrid height @@ -282,7 +282,7 @@ v1.7.4 (15 Apr 2015) create LambertConformal coordinate systems with Cartopy >= 0.12. -Incompatible changes +Incompatible Changes ==================== * Saving a cube with a STASH attribute to NetCDF now produces a variable diff --git a/docs/iris/src/whatsnew/1.8.rst b/docs/src/whatsnew/1.8.rst similarity index 99% rename from docs/iris/src/whatsnew/1.8.rst rename to docs/src/whatsnew/1.8.rst index 579d4d20c58..0e327b4f5aa 100644 --- a/docs/iris/src/whatsnew/1.8.rst +++ b/docs/src/whatsnew/1.8.rst @@ -1,5 +1,5 @@ v1.8 (14 Apr 2015) -******************** +****************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -151,7 +151,7 @@ Features "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid". -Bugs fixed +Bugs Fixed ========== * Fix in netCDF loader to correctly determine whether the longitude coordinate diff --git a/docs/iris/src/whatsnew/1.9.rst b/docs/src/whatsnew/1.9.rst similarity index 99% rename from docs/iris/src/whatsnew/1.9.rst rename to docs/src/whatsnew/1.9.rst index c9d91bf33cb..9829d8ff3b2 100644 --- a/docs/iris/src/whatsnew/1.9.rst +++ b/docs/src/whatsnew/1.9.rst @@ -1,5 +1,5 @@ v1.9 (10 Dec 2015) -******************** +****************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -93,7 +93,7 @@ Features read Fieldsfile data after the original :class:`iris.experimental.um.FieldsFileVariant` has been closed. -Bugs fixed +Bugs Fixed ========== * Fixed a bug in :meth:`iris.unit.Unit.convert` @@ -170,7 +170,7 @@ v1.9.2 (28 Jan 2016) * Fixed a bug regarding unsuccessful dot import. -Incompatible changes +Incompatible Changes ==================== * GRIB message/file reading and writing may not be available for Python 3 due diff --git a/docs/iris/src/whatsnew/2.0.rst b/docs/src/whatsnew/2.0.rst similarity index 99% rename from docs/iris/src/whatsnew/2.0.rst rename to docs/src/whatsnew/2.0.rst index fbd012dd1f3..400a395e907 100644 --- a/docs/iris/src/whatsnew/2.0.rst +++ b/docs/src/whatsnew/2.0.rst @@ -60,7 +60,7 @@ Features respectively. -The :data:`iris.FUTURE` has arrived! +The :data:`iris.FUTURE` has Arrived! ------------------------------------ Throughout version 1 of Iris a set of toggles in @@ -111,7 +111,7 @@ all existing toggles in :attr:`iris.FUTURE` now default to :data:`True`. off is now deprecated. -Bugs fixed +Bugs Fixed ========== * Indexing or slicing an :class:`~iris.coords.AuxCoord` coordinate will return a coordinate with diff --git a/docs/iris/src/whatsnew/2.1.rst b/docs/src/whatsnew/2.1.rst similarity index 98% rename from docs/iris/src/whatsnew/2.1.rst rename to docs/src/whatsnew/2.1.rst index ef03f023b2c..18c562d3da0 100644 --- a/docs/iris/src/whatsnew/2.1.rst +++ b/docs/src/whatsnew/2.1.rst @@ -43,7 +43,7 @@ Features the ``standard_parallel`` keyword argument (:pull:`3041`). -Bugs fixed +Bugs Fixed ========== * All var names being written to NetCDF are now CF compliant. @@ -59,7 +59,7 @@ Bugs fixed ``axes`` keyword (:pull:`3010`). -Incompatible changes +Incompatible Changes ==================== * The deprecated :mod:`iris.experimental.um` was removed. @@ -94,4 +94,4 @@ Internal * Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy. Full requirements can be seen in the `requirements `_ - directory of the Iris' the source. \ No newline at end of file + directory of the Iris' the source. diff --git a/docs/iris/src/whatsnew/2.2.rst b/docs/src/whatsnew/2.2.rst similarity index 99% rename from docs/iris/src/whatsnew/2.2.rst rename to docs/src/whatsnew/2.2.rst index 48280895fed..a1f48f962b2 100644 --- a/docs/iris/src/whatsnew/2.2.rst +++ b/docs/src/whatsnew/2.2.rst @@ -66,7 +66,7 @@ Features a NaN-tolerant array comparison. -Bugs fixed +Bugs Fixed ========== * The bug has been fixed that prevented printing time coordinates with bounds diff --git a/docs/iris/src/whatsnew/2.3.rst b/docs/src/whatsnew/2.3.rst similarity index 99% rename from docs/iris/src/whatsnew/2.3.rst rename to docs/src/whatsnew/2.3.rst index 5997a7f4dc1..693b67efbac 100644 --- a/docs/iris/src/whatsnew/2.3.rst +++ b/docs/src/whatsnew/2.3.rst @@ -147,7 +147,7 @@ Features `metarelate/metOcean commit 448f2ef, 2019-11-29 `_ -Bugs fixed +Bugs Fixed ========== * Cube equality of boolean data is now handled correctly. @@ -238,7 +238,7 @@ Documentation ============= * Adopted a - `new colour logo for Iris `_ + `new colour logo for Iris `_ * Added a gallery example showing how to concatenate NEMO ocean model data, see :ref:`sphx_glr_generated_gallery_oceanography_plot_load_nemo.py`. diff --git a/docs/iris/src/whatsnew/2.4.rst b/docs/src/whatsnew/2.4.rst similarity index 99% rename from docs/iris/src/whatsnew/2.4.rst rename to docs/src/whatsnew/2.4.rst index c62e84c1296..0e271389b5b 100644 --- a/docs/iris/src/whatsnew/2.4.rst +++ b/docs/src/whatsnew/2.4.rst @@ -47,7 +47,7 @@ Features ``STASH`` from the attributes dictionary of a :class:`~iris.cube.Cube`. -Bugs fixed +Bugs Fixed ========== * Fixed a problem which was causing file loads to fetch *all* field data diff --git a/docs/src/whatsnew/3.0.1.rst b/docs/src/whatsnew/3.0.1.rst new file mode 100644 index 00000000000..163fe4ff3e6 --- /dev/null +++ b/docs/src/whatsnew/3.0.1.rst @@ -0,0 +1,522 @@ +.. include:: ../common_links.inc + +v3.0.1 (27 Jan 2021) +******************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`alert` v3.0.1 Patches + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The patches included in this release include: + + 💼 **Internal** + + #. `@bjlittle`_ gracefully promote formula terms within :mod:`~iris.aux_factory` that have ``units`` of ``unknown`` + to ``units`` of ``1`` (dimensionless), where the formula term **must** have dimensionless ``units``. Without this + graceful treatment of ``units`` the resulting :class:`~iris.cube.Cube` will **not** contain the expected auxiliary + factory, and the associated derived coordinate will be missing. (:pull:`3965`) + + +.. dropdown:: :opticon:`report` Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this major release of Iris include: + + * We've finally dropped support for ``Python 2``, so welcome to ``Iris 3`` + and ``Python 3``! + * We've extended our coverage of the `CF Conventions and Metadata`_ by + introducing support for `CF Ancillary Data`_ and `Quality Flags`_, + * Lazy regridding is now available for several regridding schemes, + * Managing and manipulating metadata within Iris is now easier and more + consistent thanks to the introduction of a new common metadata API, + * :ref:`Cube arithmetic ` has been significantly improved with + regards to extended broadcasting, auto-transposition and a more lenient + behaviour towards handling metadata and coordinates, + * Our :ref:`documentation ` has been refreshed, + restructured, revitalised and rehosted on `readthedocs`_, + * It's now easier than ever to :ref:`install Iris ` + as a user or a developer, and the newly revamped developers guide walks + you though how you can :ref:`get involved ` + and contribute to Iris, + * Also, this is a major release of Iris, so please be aware of the + :ref:`incompatible changes ` and + :ref:`deprecations `. + + And finally, get in touch with us on `GitHub`_ if you have any issues or + feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Congratulations to `@bouweandela`_, `@jvegasbsc`_, and `@zklaus`_ who + recently became Iris core developers. They bring a wealth of expertise to the + team, and are using Iris to underpin `ESMValTool`_ - "*A community diagnostic + and performance metrics tool for routine evaluation of Earth system models + in CMIP*". Welcome aboard! 🎉 + +#. Congratulations also goes to `@jonseddon`_ who recently became an Iris core + developer. We look forward to seeing more of your awesome contributions! 🎉 + + +✨ Features +=========== + +#. `@MoseleyS`_ greatly enhanced the :mod:`~iris.fileformats.nimrod` + module to provide richer meta-data translation when loading ``Nimrod`` data + into cubes. This covers most known operational use-cases. (:pull:`3647`) + +#. `@stephenworsley`_ improved the handling of + :class:`iris.coords.CellMeasure`\ s in the :class:`~iris.cube.Cube` + statistical operations :meth:`~iris.cube.Cube.collapsed`, + :meth:`~iris.cube.Cube.aggregated_by` and + :meth:`~iris.cube.Cube.rolling_window`. These previously removed every + :class:`~iris.coords.CellMeasure` attached to the cube. Now, a + :class:`~iris.coords.CellMeasure` will only be removed if it is associated + with an axis over which the statistic is being run. (:pull:`3549`) + +#. `@stephenworsley`_, `@pp-mo`_ and `@abooton`_ added support for + `CF Ancillary Data`_ variables. These are created as + :class:`iris.coords.AncillaryVariable`, and appear as components of cubes + much like :class:`~iris.coords.AuxCoord`\ s, with the new + :class:`~iris.cube.Cube` methods + :meth:`~iris.cube.Cube.add_ancillary_variable`, + :meth:`~iris.cube.Cube.remove_ancillary_variable`, + :meth:`~iris.cube.Cube.ancillary_variable`, + :meth:`~iris.cube.Cube.ancillary_variables` and + :meth:`~iris.cube.Cube.ancillary_variable_dims`. + They are loaded from and saved to NetCDF-CF files. Special support for + `Quality Flags`_ is also provided, to ensure they load and save with + appropriate units. (:pull:`3800`) + +#. `@bouweandela`_ implemented lazy regridding for the + :class:`~iris.analysis.Linear`, :class:`~iris.analysis.Nearest`, and + :class:`~iris.analysis.AreaWeighted` regridding schemes. (:pull:`3701`) + +#. `@bjlittle`_ added `logging`_ support within :mod:`iris.analysis.maths`, + :mod:`iris.common.metadata`, and :mod:`iris.common.resolve`. Each module + defines a :class:`logging.Logger` instance called ``logger`` with a default + ``level`` of ``INFO``. To enable ``DEBUG`` logging use + ``logger.setLevel("DEBUG")``. (:pull:`3785`) + +#. `@bjlittle`_ added the :mod:`iris.common.resolve` module, which provides + infrastructure to support the analysis, identification and combination + of metadata common between two :class:`~iris.cube.Cube` operands into a + single resultant :class:`~iris.cube.Cube` that will be auto-transposed, + and with the appropriate broadcast shape. (:pull:`3785`) + +#. `@bjlittle`_ added the :ref:`common metadata API `, which provides + a unified treatment of metadata across Iris, and allows users to easily + manage and manipulate their metadata in a consistent way. (:pull:`3785`) + +#. `@bjlittle`_ added :ref:`lenient metadata ` support, to + allow users to control **strict** or **lenient** metadata equivalence, + difference and combination. (:pull:`3785`) + +#. `@bjlittle`_ added :ref:`lenient cube maths ` support and + resolved several long standing major issues with cube arithmetic regarding + a more robust treatment of cube broadcasting, cube dimension auto-transposition, + and preservation of common metadata and coordinates during cube math operations. + Resolves :issue:`1887`, :issue:`2765`, and :issue:`3478`. (:pull:`3785`) + +#. `@pp-mo`_ and `@TomekTrzeciak`_ enhanced :meth:`~iris.cube.Cube.collapse` to allow a 1-D weights array when + collapsing over a single dimension. + Previously, the weights had to be the same shape as the whole cube, which could cost a lot of memory in some cases. + The 1-D form is supported by most weighted array statistics (such as :meth:`np.average`), so this now works + with the corresponding Iris schemes (in that case, :const:`~iris.analysis.MEAN`). (:pull:`3943`) + + +🐛 Bugs Fixed +============= + +#. `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.remove_coord` to now also + remove derived coordinates by removing aux_factories. (:pull:`3641`) + +#. `@jonseddon`_ fixed ``isinstance(cube, collections.Iterable)`` to now behave + as expected if a :class:`~iris.cube.Cube` is iterated over, while also + ensuring that ``TypeError`` is still raised. (Fixed by setting the + ``__iter__()`` method in :class:`~iris.cube.Cube` to ``None``). + (:pull:`3656`) + +#. `@stephenworsley`_ enabled cube concatenation along an axis shared by cell + measures; these cell measures are now concatenated together in the resulting + cube. Such a scenario would previously cause concatenation to inappropriately + fail. (:pull:`3566`) + +#. `@stephenworsley`_ newly included :class:`~iris.coords.CellMeasure`\ s in + :class:`~iris.cube.Cube` copy operations. Previously copying a + :class:`~iris.cube.Cube` would ignore any attached + :class:`~iris.coords.CellMeasure`. (:pull:`3546`) + +#. `@bjlittle`_ set a :class:`~iris.coords.CellMeasure`'s + ``measure`` attribute to have a default value of ``area``. + Previously, the ``measure`` was provided as a keyword argument to + :class:`~iris.coords.CellMeasure` with a default value of ``None``, which + caused a ``TypeError`` when no ``measure`` was provided, since ``area`` or + ``volume`` are the only accepted values. (:pull:`3533`) + +#. `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use + `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot + axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh` + did not include this behaviour). (:pull:`3762`) + +#. `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to + now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_ + (previously would take the unit from a time coordinate, if present, even + though the coordinate's value had been changed via ``date2num``). + (:pull:`3762`) + +#. `@pp-mo`_ newly included attributes of cell measures in NETCDF-CF + file loading; they were previously being discarded. They are now available on + the :class:`~iris.coords.CellMeasure` in the loaded :class:`~iris.cube.Cube`. + (:pull:`3800`) + +#. `@pp-mo`_ fixed the netcdf loader to now handle any grid-mapping + variables with missing ``false_easting`` and ``false_northing`` properties, + which was previously failing for some coordinate systems. See :issue:`3629`. + (:pull:`3804`) + +#. `@stephenworsley`_ changed the way tick labels are assigned from string coords. + Previously, the first tick label would occasionally be duplicated. This also + removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`) + +#. `@znicholls`_ fixed :meth:`~iris.quickplot._title` to only check + ``units.is_time_reference`` if the ``units`` symbol is not used. (:pull:`3902`) + +#. `@rcomer`_ fixed a bug whereby numpy array type attributes on a cube's + coordinates could prevent printing it. See :issue:`3921`. (:pull:`3922`) + +.. _whatsnew 3.0.1 changes: + +💣 Incompatible Changes +======================= + +#. `@pp-mo`_ rationalised :class:`~iris.cube.CubeList` extraction + methods: + + The former method ``iris.cube.CubeList.extract_strict``, and the ``strict`` + keyword of the :meth:`~iris.cube.CubeList.extract` method have been removed, + and are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube` + and :meth:`~iris.cube.CubeList.extract_cubes`. + The new routines perform the same operation, but in a style more like other + ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`. + Unlike ``strict`` extraction, the type of return value is now completely + consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a + :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes` + always returns an :class:`iris.cube.CubeList` of a length equal to the + number of constraints. (:pull:`3715`) + +#. `@pp-mo`_ removed the former function + ``iris.analysis.coord_comparison``. (:pull:`3562`) + +#. `@bjlittle`_ moved the + :func:`iris.experimental.equalise_cubes.equalise_attributes` function from + the :mod:`iris.experimental` module into the :mod:`iris.util` module. Please + use the :func:`iris.util.equalise_attributes` function instead. + (:pull:`3527`) + +#. `@bjlittle`_ removed the module ``iris.experimental.concatenate``. In + ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the + :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the + :func:`iris.experimental.concatenate.concatenate` function raised an + exception. (:pull:`3523`) + +#. `@stephenworsley`_ changed the default units of :class:`~iris.coords.DimCoord` + and :class:`~iris.coords.AuxCoord` from `"1"` to `"unknown"`. (:pull:`3795`) + +#. `@stephenworsley`_ changed Iris objects loaded from NetCDF-CF files to have + ``units='unknown'`` where the corresponding NetCDF variable has no ``units`` + property. Previously these cases defaulted to ``units='1'``. + This affects loading of coordinates whose file variable has no "units" + attribute (not valid, under `CF units rules`_): These will now have units + of `"unknown"`, rather than `"1"`, which **may prevent the creation of + a hybrid vertical coordinate**. While these cases used to "work", this was + never really correct behaviour. (:pull:`3795`) + +#. `@SimonPeatman`_ added attribute ``var_name`` to coordinates created by the + :func:`iris.analysis.trajectory.interpolate` function. This prevents + duplicate coordinate errors in certain circumstances. (:pull:`3718`) + +#. `@bjlittle`_ aligned the :func:`iris.analysis.maths.apply_ufunc` with the + rest of the :mod:`iris.analysis.maths` API by changing its keyword argument + from ``other_cube`` to ``other``. (:pull:`3785`) + +#. `@bjlittle`_ changed the :meth:`iris.analysis.maths.IFunc.__call__` to ignore + any surplus ``other`` keyword argument for a ``data_func`` that requires + **only one** argument. This aligns the behaviour of + :meth:`iris.analysis.maths.IFunc.__call__` with + :func:`~iris.analysis.maths.apply_ufunc`. Previously a ``ValueError`` + exception was raised. (:pull:`3785`) + + +.. _whatsnew 3.0.1 deprecations: + +🔥 Deprecations +=============== + +#. `@stephenworsley`_ removed the deprecated :class:`iris.Future` flags + ``cell_date_time_objects``, ``netcdf_promote``, ``netcdf_no_unlimited`` and + ``clip_latitudes``. (:pull:`3459`) + +#. `@stephenworsley`_ changed :attr:`iris.fileformats.pp.PPField.lbproc` to be an + ``int``. The deprecated attributes ``flag1``, ``flag2`` etc. have been + removed from it. (:pull:`3461`) + +#. `@bjlittle`_ deprecated :func:`~iris.util.as_compatible_shape` in preference + for :class:`~iris.common.resolve.Resolve` e.g., ``Resolve(src, tgt)(tgt.core_data())``. + The :func:`~iris.util.as_compatible_shape` function will be removed in a future + release of Iris. (:pull:`3892`) + + +🔗 Dependencies +=============== + +#. `@stephenworsley`_, `@trexfeathers`_ and `@bjlittle`_ removed ``Python2`` + support, modernising the codebase by switching to exclusive ``Python3`` + support. (:pull:`3513`) + +#. `@bjlittle`_ improved the developer set up process. Configuring Iris and + :ref:`installing_from_source` as a developer with all the required package + dependencies is now easier with our curated conda environment YAML files. + (:pull:`3812`) + +#. `@stephenworsley`_ pinned Iris to require `Dask`_ ``>=2.0``. (:pull:`3460`) + +#. `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require + `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version + of `Matplotlib`_. (:pull:`3762`) + +#. `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_. + Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in + pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer + necessary now that ``Python2`` support has been dropped. (:pull:`3468`) + +#. `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version + of `Proj`_. (:pull:`3762`) + +#. `@stephenworsley`_ and `@trexfeathers`_ removed GDAL from the extensions + dependency group. We no longer consider it to be an extension. (:pull:`3762`) + + +.. _whatsnew 3.0.1 docs: + +📚 Documentation +================ + +#. `@tkknight`_ moved the + :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py` + from the general part of the gallery to oceanography. (:pull:`3761`) + +#. `@tkknight`_ updated documentation to use a modern sphinx theme and be + served from https://scitools-iris.readthedocs.io/en/latest/. (:pull:`3752`) + +#. `@bjlittle`_ added support for the `black`_ code formatter. This is + now automatically checked on GitHub PRs, replacing the older, unittest-based + ``iris.tests.test_coding_standards.TestCodeFormat``. Black provides automatic + code format correction for most IDEs. See the new developer guide section on + :ref:`code_formatting`. (:pull:`3518`) + +#. `@tkknight`_ and `@trexfeathers`_ refreshed the :ref:`whats_new_contributions` + for the :ref:`iris_whatsnew`. This includes always creating the ``latest`` + what's new page so it appears on the latest documentation at + https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves + :issue:`2104`, :issue:`3451`, :issue:`3818`, :issue:`3837`. Also updated the + :ref:`iris_development_releases_steps` to follow when making a release. + (:pull:`3769`, :pull:`3838`, :pull:`3843`) + +#. `@tkknight`_ enabled the PDF creation of the documentation on the + `Read the Docs`_ service. The PDF may be accessed by clicking on the version + at the bottom of the side bar, then selecting ``PDF`` from the ``Downloads`` + section. (:pull:`3765`) + +#. `@stephenworsley`_ added a warning to the + :func:`iris.analysis.cartography.project` function regarding its behaviour on + projections with non-rectangular boundaries. (:pull:`3762`) + +#. `@stephenworsley`_ added the :ref:`cube_maths_combining_units` section to the + user guide to clarify how ``Units`` are handled during cube arithmetic. + (:pull:`3803`) + +#. `@tkknight`_ overhauled the :ref:`developers_guide` including information on + getting involved in becoming a contributor and general structure of the + guide. This resolves :issue:`2170`, :issue:`2331`, :issue:`3453`, + :issue:`314`, :issue:`2902`. (:pull:`3852`) + +#. `@rcomer`_ added argument descriptions to the :class:`~iris.coords.DimCoord` + docstring. (:pull:`3681`) + +#. `@tkknight`_ added two url's to be ignored for the ``make linkcheck``. This + will ensure the Iris github project is not repeatedly hit during the + linkcheck for issues and pull requests as it can result in connection + refused and thus travis-ci_ job failures. For more information on linkcheck, + see :ref:`contributing.documentation.testing`. (:pull:`3873`) + +#. `@tkknight`_ enabled the napolean_ package that is used by sphinx_ to cater + for the existing google style docstrings and to also allow for `numpy`_ + docstrings. This resolves :issue:`3841`. (:pull:`3871`) + +#. `@tkknight`_ configured ``sphinx-build`` to promote warnings to errors when + building the documentation via ``make html``. This will minimise technical + debt accruing for the documentation. (:pull:`3877`) + +#. `@tkknight`_ updated :ref:`installing_iris` to include a reference to + Windows Subsystem for Linux. (:pull:`3885`) + +#. `@tkknight`_ updated the :ref:`iris_docs` homepage to include panels so the + links are more visible to users. This uses the sphinx-panels_ extension. + (:pull:`3884`) + +#. `@bjlittle`_ created the :ref:`Further topics ` section and + included documentation for :ref:`metadata`, :ref:`lenient metadata`, and + :ref:`lenient maths`. (:pull:`3890`) + +#. `@jonseddon`_ updated the CF version of the netCDF saver in the + :ref:`saving_iris_cubes` section and in the equivalent function docstring. + (:pull:`3925`) + +#. `@bjlittle`_ applied `Title Case Capitalization`_ to the documentation. + (:pull:`3940`) + + +💼 Internal +=========== + +#. `@pp-mo`_ and `@lbdreyer`_ removed all Iris test dependencies on `iris-grib`_ + by transferring all relevant content to the `iris-grib`_ repository. (:pull:`3662`, + :pull:`3663`, :pull:`3664`, :pull:`3665`, :pull:`3666`, :pull:`3669`, + :pull:`3670`, :pull:`3671`, :pull:`3672`, :pull:`3742`, :pull:`3746`) + +#. `@lbdreyer`_ and `@pp-mo`_ overhauled the handling of dimensional + metadata to remove duplication. (:pull:`3422`, :pull:`3551`) + +#. `@trexfeathers`_ simplified the standard license header for all files, which + removes the need to repeatedly update year numbers in the header. + (:pull:`3489`) + +#. `@stephenworsley`_ changed the numerical values in tests involving the + Robinson projection due to improvements made in + `Proj`_. (:pull:`3762`) (see also `Proj#1292`_ and `Proj#2151`_) + +#. `@stephenworsley`_ changed tests to account for more detailed descriptions of + projections in `GDAL`_. (:pull:`3762`) (see also `GDAL#1185`_) + +#. `@stephenworsley`_ changed tests to account for `GDAL`_ now saving fill values + for data without masked points. (:pull:`3762`) + +#. `@trexfeathers`_ changed every graphics test that includes `Cartopy's coastlines`_ + to account for new adaptive coastline scaling. (:pull:`3762`) + (see also `Cartopy#1105`_) + +#. `@trexfeathers`_ changed graphics tests to account for some new default + grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_) + +#. `@trexfeathers`_ added additional acceptable graphics test targets to account + for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and + axes borders). (:pull:`3762`) + +#. `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore + `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``. + (:pull:`3846`) + +#. `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``. + (:pull:`3866`) + +#. `@lbdreyer`_ updated the CF standard name table to the latest version: `v75`_. + (:pull:`3867`) + +#. `@bjlittle`_ added :pep:`517` and :pep:`518` support for building and + installing Iris, in particular to handle the `PyKE`_ package dependency. + (:pull:`3812`) + +#. `@bjlittle`_ added metadata support for comparing :attr:`~iris.cube.Cube.attributes` + dictionaries that contain `numpy`_ arrays using `xxHash`_, an extremely fast + non-cryptographic hash algorithm, running at RAM speed limits. + +#. `@bjlittle`_ added the ``iris.tests.assertDictEqual`` method to override + :meth:`unittest.TestCase.assertDictEqual` in order to cope with testing + metadata :attr:`~iris.cube.Cube.attributes` dictionary comparison where + the value of a key may be a `numpy`_ array. (:pull:`3785`) + +#. `@bjlittle`_ added the :func:`~iris.config.get_logger` function for creating + a generic :class:`logging.Logger` with a :class:`logging.StreamHandler` and + custom :class:`logging.Formatter`. (:pull:`3785`) + +#. `@owena11`_ identified and optimised a bottleneck in ``FieldsFile`` header + loading due to the use of :func:`numpy.fromfile`. (:pull:`3791`) + +#. `@znicholls`_ added a test for plotting with the label being taken from the unit's symbol, + see :meth:`~iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol` (:pull:`3902`). + +#. `@znicholls`_ made :func:`~iris.tests.idiff.step_over_diffs` robust to hyphens (``-``) in + the input path (i.e. the ``result_dir`` argument) (:pull:`3902`). + +#. `@bjlittle`_ migrated the CIaaS from `travis-ci`_ to `cirrus-ci`_, and removed `stickler-ci`_ + support. (:pull:`3928`) + +#. `@bjlittle`_ introduced `nox`_ as a common and easy entry-point for test automation. + It can be used both from `cirrus-ci`_ in the cloud, and locally by the developer to + run the Iris tests, the doc-tests, the gallery doc-tests, and lint Iris + with `flake8`_ and `black`_. (:pull:`3928`) + +.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ +.. _Matplotlib: https://matplotlib.org/ +.. _CF units rules: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#units +.. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data +.. _Quality Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags +.. _iris-grib: https://github.com/SciTools/iris-grib +.. _Cartopy: https://github.com/SciTools/cartopy +.. _Cartopy's coastlines: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html?highlight=coastlines#cartopy.mpl.geoaxes.GeoAxes.coastlines +.. _Cartopy#1105: https://github.com/SciTools/cartopy/pull/1105 +.. _Cartopy#1117: https://github.com/SciTools/cartopy/pull/1117 +.. _Dask: https://github.com/dask/dask +.. _matplotlib.dates.date2num: https://matplotlib.org/api/dates_api.html#matplotlib.dates.date2num +.. _Proj: https://github.com/OSGeo/PROJ +.. _black: https://black.readthedocs.io/en/stable/ +.. _Proj#1292: https://github.com/OSGeo/PROJ/pull/1292 +.. _Proj#2151: https://github.com/OSGeo/PROJ/pull/2151 +.. _GDAL: https://github.com/OSGeo/gdal +.. _GDAL#1185: https://github.com/OSGeo/gdal/pull/1185 +.. _@MoseleyS: https://github.com/MoseleyS +.. _@stephenworsley: https://github.com/stephenworsley +.. _@pp-mo: https://github.com/pp-mo +.. _@abooton: https://github.com/abooton +.. _@bouweandela: https://github.com/bouweandela +.. _@bjlittle: https://github.com/bjlittle +.. _@trexfeathers: https://github.com/trexfeathers +.. _@jonseddon: https://github.com/jonseddon +.. _@tkknight: https://github.com/tkknight +.. _@lbdreyer: https://github.com/lbdreyer +.. _@SimonPeatman: https://github.com/SimonPeatman +.. _@TomekTrzeciak: https://github.com/TomekTrzeciak +.. _@rcomer: https://github.com/rcomer +.. _@jvegasbsc: https://github.com/jvegasbsc +.. _@zklaus: https://github.com/zklaus +.. _@znicholls: https://github.com/znicholls +.. _ESMValTool: https://github.com/ESMValGroup/ESMValTool +.. _v75: https://cfconventions.org/Data/cf-standard-names/75/build/cf-standard-name-table.html +.. _sphinx-panels: https://sphinx-panels.readthedocs.io/en/latest/ +.. _logging: https://docs.python.org/3/library/logging.html +.. _numpy: https://github.com/numpy/numpy +.. _xxHash: https://github.com/Cyan4973/xxHash +.. _PyKE: https://pypi.org/project/scitools-pyke/ +.. _matplotlib.rcdefaults: https://matplotlib.org/3.1.1/api/matplotlib_configuration_api.html?highlight=rcdefaults#matplotlib.rcdefaults +.. _@owena11: https://github.com/owena11 +.. _GitHub: https://github.com/SciTools/iris/issues/new/choose +.. _readthedocs: https://readthedocs.org/ +.. _CF Conventions and Metadata: https://cfconventions.org/ +.. _flake8: https://flake8.pycqa.org/en/stable/ +.. _nox: https://nox.thea.codes/en/stable/ +.. _Title Case Capitalization: https://apastyle.apa.org/style-grammar-guidelines/capitalization/title-case +.. _travis-ci: https://travis-ci.org/github/SciTools/iris +.. _stickler-ci: https://stickler-ci.com/ diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst new file mode 100644 index 00000000000..0f61d620331 --- /dev/null +++ b/docs/src/whatsnew/3.0.rst @@ -0,0 +1,505 @@ +.. include:: ../common_links.inc + +v3.0 (25 Jan 2021) +****************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this major release of Iris include: + + * We've finally dropped support for ``Python 2``, so welcome to ``Iris 3`` + and ``Python 3``! + * We've extended our coverage of the `CF Conventions and Metadata`_ by + introducing support for `CF Ancillary Data`_ and `Quality Flags`_, + * Lazy regridding is now available for several regridding schemes, + * Managing and manipulating metadata within Iris is now easier and more + consistent thanks to the introduction of a new common metadata API, + * :ref:`Cube arithmetic ` has been significantly improved with + regards to extended broadcasting, auto-transposition and a more lenient + behaviour towards handling metadata and coordinates, + * Our :ref:`documentation ` has been refreshed, + restructured, revitalised and rehosted on `readthedocs`_, + * It's now easier than ever to :ref:`install Iris ` + as a user or a developer, and the newly revamped developers guide walks + you though how you can :ref:`get involved ` + and contribute to Iris, + * Also, this is a major release of Iris, so please be aware of the + :ref:`incompatible changes ` and + :ref:`deprecations `. + + And finally, get in touch with us on `GitHub`_ if you have any issues or + feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Congratulations to `@bouweandela`_, `@jvegasbsc`_, and `@zklaus`_ who + recently became Iris core developers. They bring a wealth of expertise to the + team, and are using Iris to underpin `ESMValTool`_ - "*A community diagnostic + and performance metrics tool for routine evaluation of Earth system models + in CMIP*". Welcome aboard! 🎉 + +#. Congratulations also goes to `@jonseddon`_ who recently became an Iris core + developer. We look forward to seeing more of your awesome contributions! 🎉 + + +✨ Features +=========== + +#. `@MoseleyS`_ greatly enhanced the :mod:`~iris.fileformats.nimrod` + module to provide richer meta-data translation when loading ``Nimrod`` data + into cubes. This covers most known operational use-cases. (:pull:`3647`) + +#. `@stephenworsley`_ improved the handling of + :class:`iris.coords.CellMeasure`\ s in the :class:`~iris.cube.Cube` + statistical operations :meth:`~iris.cube.Cube.collapsed`, + :meth:`~iris.cube.Cube.aggregated_by` and + :meth:`~iris.cube.Cube.rolling_window`. These previously removed every + :class:`~iris.coords.CellMeasure` attached to the cube. Now, a + :class:`~iris.coords.CellMeasure` will only be removed if it is associated + with an axis over which the statistic is being run. (:pull:`3549`) + +#. `@stephenworsley`_, `@pp-mo`_ and `@abooton`_ added support for + `CF Ancillary Data`_ variables. These are created as + :class:`iris.coords.AncillaryVariable`, and appear as components of cubes + much like :class:`~iris.coords.AuxCoord`\ s, with the new + :class:`~iris.cube.Cube` methods + :meth:`~iris.cube.Cube.add_ancillary_variable`, + :meth:`~iris.cube.Cube.remove_ancillary_variable`, + :meth:`~iris.cube.Cube.ancillary_variable`, + :meth:`~iris.cube.Cube.ancillary_variables` and + :meth:`~iris.cube.Cube.ancillary_variable_dims`. + They are loaded from and saved to NetCDF-CF files. Special support for + `Quality Flags`_ is also provided, to ensure they load and save with + appropriate units. (:pull:`3800`) + +#. `@bouweandela`_ implemented lazy regridding for the + :class:`~iris.analysis.Linear`, :class:`~iris.analysis.Nearest`, and + :class:`~iris.analysis.AreaWeighted` regridding schemes. (:pull:`3701`) + +#. `@bjlittle`_ added `logging`_ support within :mod:`iris.analysis.maths`, + :mod:`iris.common.metadata`, and :mod:`iris.common.resolve`. Each module + defines a :class:`logging.Logger` instance called ``logger`` with a default + ``level`` of ``INFO``. To enable ``DEBUG`` logging use + ``logger.setLevel("DEBUG")``. (:pull:`3785`) + +#. `@bjlittle`_ added the :mod:`iris.common.resolve` module, which provides + infrastructure to support the analysis, identification and combination + of metadata common between two :class:`~iris.cube.Cube` operands into a + single resultant :class:`~iris.cube.Cube` that will be auto-transposed, + and with the appropriate broadcast shape. (:pull:`3785`) + +#. `@bjlittle`_ added the :ref:`common metadata API `, which provides + a unified treatment of metadata across Iris, and allows users to easily + manage and manipulate their metadata in a consistent way. (:pull:`3785`) + +#. `@bjlittle`_ added :ref:`lenient metadata ` support, to + allow users to control **strict** or **lenient** metadata equivalence, + difference and combination. (:pull:`3785`) + +#. `@bjlittle`_ added :ref:`lenient cube maths ` support and + resolved several long standing major issues with cube arithmetic regarding + a more robust treatment of cube broadcasting, cube dimension auto-transposition, + and preservation of common metadata and coordinates during cube math operations. + Resolves :issue:`1887`, :issue:`2765`, and :issue:`3478`. (:pull:`3785`) + +#. `@pp-mo`_ and `@TomekTrzeciak`_ enhanced :meth:`~iris.cube.Cube.collapse` to allow a 1-D weights array when + collapsing over a single dimension. + Previously, the weights had to be the same shape as the whole cube, which could cost a lot of memory in some cases. + The 1-D form is supported by most weighted array statistics (such as :meth:`np.average`), so this now works + with the corresponding Iris schemes (in that case, :const:`~iris.analysis.MEAN`). (:pull:`3943`) + + +🐛 Bugs Fixed +============= + +#. `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.remove_coord` to now also + remove derived coordinates by removing aux_factories. (:pull:`3641`) + +#. `@jonseddon`_ fixed ``isinstance(cube, collections.Iterable)`` to now behave + as expected if a :class:`~iris.cube.Cube` is iterated over, while also + ensuring that ``TypeError`` is still raised. (Fixed by setting the + ``__iter__()`` method in :class:`~iris.cube.Cube` to ``None``). + (:pull:`3656`) + +#. `@stephenworsley`_ enabled cube concatenation along an axis shared by cell + measures; these cell measures are now concatenated together in the resulting + cube. Such a scenario would previously cause concatenation to inappropriately + fail. (:pull:`3566`) + +#. `@stephenworsley`_ newly included :class:`~iris.coords.CellMeasure`\ s in + :class:`~iris.cube.Cube` copy operations. Previously copying a + :class:`~iris.cube.Cube` would ignore any attached + :class:`~iris.coords.CellMeasure`. (:pull:`3546`) + +#. `@bjlittle`_ set a :class:`~iris.coords.CellMeasure`'s + ``measure`` attribute to have a default value of ``area``. + Previously, the ``measure`` was provided as a keyword argument to + :class:`~iris.coords.CellMeasure` with a default value of ``None``, which + caused a ``TypeError`` when no ``measure`` was provided, since ``area`` or + ``volume`` are the only accepted values. (:pull:`3533`) + +#. `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use + `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot + axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh` + did not include this behaviour). (:pull:`3762`) + +#. `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to + now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_ + (previously would take the unit from a time coordinate, if present, even + though the coordinate's value had been changed via ``date2num``). + (:pull:`3762`) + +#. `@pp-mo`_ newly included attributes of cell measures in NETCDF-CF + file loading; they were previously being discarded. They are now available on + the :class:`~iris.coords.CellMeasure` in the loaded :class:`~iris.cube.Cube`. + (:pull:`3800`) + +#. `@pp-mo`_ fixed the netcdf loader to now handle any grid-mapping + variables with missing ``false_easting`` and ``false_northing`` properties, + which was previously failing for some coordinate systems. See :issue:`3629`. + (:pull:`3804`) + +#. `@stephenworsley`_ changed the way tick labels are assigned from string coords. + Previously, the first tick label would occasionally be duplicated. This also + removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`) + +#. `@znicholls`_ fixed :meth:`~iris.quickplot._title` to only check + ``units.is_time_reference`` if the ``units`` symbol is not used. (:pull:`3902`) + +#. `@rcomer`_ fixed a bug whereby numpy array type attributes on a cube's + coordinates could prevent printing it. See :issue:`3921`. (:pull:`3922`) + +.. _whatsnew 3.0 changes: + +💣 Incompatible Changes +======================= + +#. `@pp-mo`_ rationalised :class:`~iris.cube.CubeList` extraction + methods: + + The former method ``iris.cube.CubeList.extract_strict``, and the ``strict`` + keyword of the :meth:`~iris.cube.CubeList.extract` method have been removed, + and are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube` + and :meth:`~iris.cube.CubeList.extract_cubes`. + The new routines perform the same operation, but in a style more like other + ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`. + Unlike ``strict`` extraction, the type of return value is now completely + consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a + :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes` + always returns an :class:`iris.cube.CubeList` of a length equal to the + number of constraints. (:pull:`3715`) + +#. `@pp-mo`_ removed the former function + ``iris.analysis.coord_comparison``. (:pull:`3562`) + +#. `@bjlittle`_ moved the + :func:`iris.experimental.equalise_cubes.equalise_attributes` function from + the :mod:`iris.experimental` module into the :mod:`iris.util` module. Please + use the :func:`iris.util.equalise_attributes` function instead. + (:pull:`3527`) + +#. `@bjlittle`_ removed the module ``iris.experimental.concatenate``. In + ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the + :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the + :func:`iris.experimental.concatenate.concatenate` function raised an + exception. (:pull:`3523`) + +#. `@stephenworsley`_ changed the default units of :class:`~iris.coords.DimCoord` + and :class:`~iris.coords.AuxCoord` from `"1"` to `"unknown"`. (:pull:`3795`) + +#. `@stephenworsley`_ changed Iris objects loaded from NetCDF-CF files to have + ``units='unknown'`` where the corresponding NetCDF variable has no ``units`` + property. Previously these cases defaulted to ``units='1'``. + This affects loading of coordinates whose file variable has no "units" + attribute (not valid, under `CF units rules`_): These will now have units + of `"unknown"`, rather than `"1"`, which **may prevent the creation of + a hybrid vertical coordinate**. While these cases used to "work", this was + never really correct behaviour. (:pull:`3795`) + +#. `@SimonPeatman`_ added attribute ``var_name`` to coordinates created by the + :func:`iris.analysis.trajectory.interpolate` function. This prevents + duplicate coordinate errors in certain circumstances. (:pull:`3718`) + +#. `@bjlittle`_ aligned the :func:`iris.analysis.maths.apply_ufunc` with the + rest of the :mod:`iris.analysis.maths` API by changing its keyword argument + from ``other_cube`` to ``other``. (:pull:`3785`) + +#. `@bjlittle`_ changed the :meth:`iris.analysis.maths.IFunc.__call__` to ignore + any surplus ``other`` keyword argument for a ``data_func`` that requires + **only one** argument. This aligns the behaviour of + :meth:`iris.analysis.maths.IFunc.__call__` with + :func:`~iris.analysis.maths.apply_ufunc`. Previously a ``ValueError`` + exception was raised. (:pull:`3785`) + + +.. _whatsnew 3.0 deprecations: + +🔥 Deprecations +=============== + +#. `@stephenworsley`_ removed the deprecated :class:`iris.Future` flags + ``cell_date_time_objects``, ``netcdf_promote``, ``netcdf_no_unlimited`` and + ``clip_latitudes``. (:pull:`3459`) + +#. `@stephenworsley`_ changed :attr:`iris.fileformats.pp.PPField.lbproc` to be an + ``int``. The deprecated attributes ``flag1``, ``flag2`` etc. have been + removed from it. (:pull:`3461`) + +#. `@bjlittle`_ deprecated :func:`~iris.util.as_compatible_shape` in preference + for :class:`~iris.common.resolve.Resolve` e.g., ``Resolve(src, tgt)(tgt.core_data())``. + The :func:`~iris.util.as_compatible_shape` function will be removed in a future + release of Iris. (:pull:`3892`) + + +🔗 Dependencies +=============== + +#. `@stephenworsley`_, `@trexfeathers`_ and `@bjlittle`_ removed ``Python2`` + support, modernising the codebase by switching to exclusive ``Python3`` + support. (:pull:`3513`) + +#. `@bjlittle`_ improved the developer set up process. Configuring Iris and + :ref:`installing_from_source` as a developer with all the required package + dependencies is now easier with our curated conda environment YAML files. + (:pull:`3812`) + +#. `@stephenworsley`_ pinned Iris to require `Dask`_ ``>=2.0``. (:pull:`3460`) + +#. `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require + `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version + of `Matplotlib`_. (:pull:`3762`) + +#. `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_. + Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in + pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer + necessary now that ``Python2`` support has been dropped. (:pull:`3468`) + +#. `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version + of `Proj`_. (:pull:`3762`) + +#. `@stephenworsley`_ and `@trexfeathers`_ removed GDAL from the extensions + dependency group. We no longer consider it to be an extension. (:pull:`3762`) + + +.. _whatsnew 3.0 docs: + +📚 Documentation +================ + +#. `@tkknight`_ moved the + :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py` + from the general part of the gallery to oceanography. (:pull:`3761`) + +#. `@tkknight`_ updated documentation to use a modern sphinx theme and be + served from https://scitools-iris.readthedocs.io/en/latest/. (:pull:`3752`) + +#. `@bjlittle`_ added support for the `black`_ code formatter. This is + now automatically checked on GitHub PRs, replacing the older, unittest-based + ``iris.tests.test_coding_standards.TestCodeFormat``. Black provides automatic + code format correction for most IDEs. See the new developer guide section on + :ref:`code_formatting`. (:pull:`3518`) + +#. `@tkknight`_ and `@trexfeathers`_ refreshed the :ref:`whats_new_contributions` + for the :ref:`iris_whatsnew`. This includes always creating the ``latest`` + what's new page so it appears on the latest documentation at + https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves + :issue:`2104`, :issue:`3451`, :issue:`3818`, :issue:`3837`. Also updated the + :ref:`iris_development_releases_steps` to follow when making a release. + (:pull:`3769`, :pull:`3838`, :pull:`3843`) + +#. `@tkknight`_ enabled the PDF creation of the documentation on the + `Read the Docs`_ service. The PDF may be accessed by clicking on the version + at the bottom of the side bar, then selecting ``PDF`` from the ``Downloads`` + section. (:pull:`3765`) + +#. `@stephenworsley`_ added a warning to the + :func:`iris.analysis.cartography.project` function regarding its behaviour on + projections with non-rectangular boundaries. (:pull:`3762`) + +#. `@stephenworsley`_ added the :ref:`cube_maths_combining_units` section to the + user guide to clarify how ``Units`` are handled during cube arithmetic. + (:pull:`3803`) + +#. `@tkknight`_ overhauled the :ref:`developers_guide` including information on + getting involved in becoming a contributor and general structure of the + guide. This resolves :issue:`2170`, :issue:`2331`, :issue:`3453`, + :issue:`314`, :issue:`2902`. (:pull:`3852`) + +#. `@rcomer`_ added argument descriptions to the :class:`~iris.coords.DimCoord` + docstring. (:pull:`3681`) + +#. `@tkknight`_ added two url's to be ignored for the ``make linkcheck``. This + will ensure the Iris github project is not repeatedly hit during the + linkcheck for issues and pull requests as it can result in connection + refused and thus travis-ci_ job failures. For more information on linkcheck, + see :ref:`contributing.documentation.testing`. (:pull:`3873`) + +#. `@tkknight`_ enabled the napolean_ package that is used by sphinx_ to cater + for the existing google style docstrings and to also allow for `numpy`_ + docstrings. This resolves :issue:`3841`. (:pull:`3871`) + +#. `@tkknight`_ configured ``sphinx-build`` to promote warnings to errors when + building the documentation via ``make html``. This will minimise technical + debt accruing for the documentation. (:pull:`3877`) + +#. `@tkknight`_ updated :ref:`installing_iris` to include a reference to + Windows Subsystem for Linux. (:pull:`3885`) + +#. `@tkknight`_ updated the :ref:`iris_docs` homepage to include panels so the + links are more visible to users. This uses the sphinx-panels_ extension. + (:pull:`3884`) + +#. `@bjlittle`_ created the :ref:`Further topics ` section and + included documentation for :ref:`metadata`, :ref:`lenient metadata`, and + :ref:`lenient maths`. (:pull:`3890`) + +#. `@jonseddon`_ updated the CF version of the netCDF saver in the + :ref:`saving_iris_cubes` section and in the equivalent function docstring. + (:pull:`3925`) + +#. `@bjlittle`_ applied `Title Case Capitalization`_ to the documentation. + (:pull:`3940`) + + +💼 Internal +=========== + +#. `@pp-mo`_ and `@lbdreyer`_ removed all Iris test dependencies on `iris-grib`_ + by transferring all relevant content to the `iris-grib`_ repository. (:pull:`3662`, + :pull:`3663`, :pull:`3664`, :pull:`3665`, :pull:`3666`, :pull:`3669`, + :pull:`3670`, :pull:`3671`, :pull:`3672`, :pull:`3742`, :pull:`3746`) + +#. `@lbdreyer`_ and `@pp-mo`_ overhauled the handling of dimensional + metadata to remove duplication. (:pull:`3422`, :pull:`3551`) + +#. `@trexfeathers`_ simplified the standard license header for all files, which + removes the need to repeatedly update year numbers in the header. + (:pull:`3489`) + +#. `@stephenworsley`_ changed the numerical values in tests involving the + Robinson projection due to improvements made in + `Proj`_. (:pull:`3762`) (see also `Proj#1292`_ and `Proj#2151`_) + +#. `@stephenworsley`_ changed tests to account for more detailed descriptions of + projections in `GDAL`_. (:pull:`3762`) (see also `GDAL#1185`_) + +#. `@stephenworsley`_ changed tests to account for `GDAL`_ now saving fill values + for data without masked points. (:pull:`3762`) + +#. `@trexfeathers`_ changed every graphics test that includes `Cartopy's coastlines`_ + to account for new adaptive coastline scaling. (:pull:`3762`) + (see also `Cartopy#1105`_) + +#. `@trexfeathers`_ changed graphics tests to account for some new default + grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_) + +#. `@trexfeathers`_ added additional acceptable graphics test targets to account + for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and + axes borders). (:pull:`3762`) + +#. `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore + `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``. + (:pull:`3846`) + +#. `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``. + (:pull:`3866`) + +#. `@lbdreyer`_ updated the CF standard name table to the latest version: `v75`_. + (:pull:`3867`) + +#. `@bjlittle`_ added :pep:`517` and :pep:`518` support for building and + installing Iris, in particular to handle the `PyKE`_ package dependency. + (:pull:`3812`) + +#. `@bjlittle`_ added metadata support for comparing :attr:`~iris.cube.Cube.attributes` + dictionaries that contain `numpy`_ arrays using `xxHash`_, an extremely fast + non-cryptographic hash algorithm, running at RAM speed limits. + +#. `@bjlittle`_ added the ``iris.tests.assertDictEqual`` method to override + :meth:`unittest.TestCase.assertDictEqual` in order to cope with testing + metadata :attr:`~iris.cube.Cube.attributes` dictionary comparison where + the value of a key may be a `numpy`_ array. (:pull:`3785`) + +#. `@bjlittle`_ added the :func:`~iris.config.get_logger` function for creating + a generic :class:`logging.Logger` with a :class:`logging.StreamHandler` and + custom :class:`logging.Formatter`. (:pull:`3785`) + +#. `@owena11`_ identified and optimised a bottleneck in ``FieldsFile`` header + loading due to the use of :func:`numpy.fromfile`. (:pull:`3791`) + +#. `@znicholls`_ added a test for plotting with the label being taken from the unit's symbol, + see :meth:`~iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol` (:pull:`3902`). + +#. `@znicholls`_ made :func:`~iris.tests.idiff.step_over_diffs` robust to hyphens (``-``) in + the input path (i.e. the ``result_dir`` argument) (:pull:`3902`). + +#. `@bjlittle`_ migrated the CIaaS from `travis-ci`_ to `cirrus-ci`_, and removed `stickler-ci`_ + support. (:pull:`3928`) + +#. `@bjlittle`_ introduced `nox`_ as a common and easy entry-point for test automation. + It can be used both from `cirrus-ci`_ in the cloud, and locally by the developer to + run the Iris tests, the doc-tests, the gallery doc-tests, and lint Iris + with `flake8`_ and `black`_. (:pull:`3928`) + +.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ +.. _Matplotlib: https://matplotlib.org/ +.. _CF units rules: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#units +.. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data +.. _Quality Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags +.. _iris-grib: https://github.com/SciTools/iris-grib +.. _Cartopy: https://github.com/SciTools/cartopy +.. _Cartopy's coastlines: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html?highlight=coastlines#cartopy.mpl.geoaxes.GeoAxes.coastlines +.. _Cartopy#1105: https://github.com/SciTools/cartopy/pull/1105 +.. _Cartopy#1117: https://github.com/SciTools/cartopy/pull/1117 +.. _Dask: https://github.com/dask/dask +.. _matplotlib.dates.date2num: https://matplotlib.org/api/dates_api.html#matplotlib.dates.date2num +.. _Proj: https://github.com/OSGeo/PROJ +.. _black: https://black.readthedocs.io/en/stable/ +.. _Proj#1292: https://github.com/OSGeo/PROJ/pull/1292 +.. _Proj#2151: https://github.com/OSGeo/PROJ/pull/2151 +.. _GDAL: https://github.com/OSGeo/gdal +.. _GDAL#1185: https://github.com/OSGeo/gdal/pull/1185 +.. _@MoseleyS: https://github.com/MoseleyS +.. _@stephenworsley: https://github.com/stephenworsley +.. _@pp-mo: https://github.com/pp-mo +.. _@abooton: https://github.com/abooton +.. _@bouweandela: https://github.com/bouweandela +.. _@bjlittle: https://github.com/bjlittle +.. _@trexfeathers: https://github.com/trexfeathers +.. _@jonseddon: https://github.com/jonseddon +.. _@tkknight: https://github.com/tkknight +.. _@lbdreyer: https://github.com/lbdreyer +.. _@SimonPeatman: https://github.com/SimonPeatman +.. _@TomekTrzeciak: https://github.com/TomekTrzeciak +.. _@rcomer: https://github.com/rcomer +.. _@jvegasbsc: https://github.com/jvegasbsc +.. _@zklaus: https://github.com/zklaus +.. _@znicholls: https://github.com/znicholls +.. _ESMValTool: https://github.com/ESMValGroup/ESMValTool +.. _v75: https://cfconventions.org/Data/cf-standard-names/75/build/cf-standard-name-table.html +.. _sphinx-panels: https://sphinx-panels.readthedocs.io/en/latest/ +.. _logging: https://docs.python.org/3/library/logging.html +.. _numpy: https://github.com/numpy/numpy +.. _xxHash: https://github.com/Cyan4973/xxHash +.. _PyKE: https://pypi.org/project/scitools-pyke/ +.. _matplotlib.rcdefaults: https://matplotlib.org/3.1.1/api/matplotlib_configuration_api.html?highlight=rcdefaults#matplotlib.rcdefaults +.. _@owena11: https://github.com/owena11 +.. _GitHub: https://github.com/SciTools/iris/issues/new/choose +.. _readthedocs: https://readthedocs.org/ +.. _CF Conventions and Metadata: https://cfconventions.org/ +.. _flake8: https://flake8.pycqa.org/en/stable/ +.. _nox: https://nox.thea.codes/en/stable/ +.. _Title Case Capitalization: https://apastyle.apa.org/style-grammar-guidelines/capitalization/title-case +.. _travis-ci: https://travis-ci.org/github/SciTools/iris +.. _stickler-ci: https://stickler-ci.com/ diff --git a/docs/iris/src/whatsnew/images/notebook_repr.png b/docs/src/whatsnew/images/notebook_repr.png similarity index 100% rename from docs/iris/src/whatsnew/images/notebook_repr.png rename to docs/src/whatsnew/images/notebook_repr.png diff --git a/docs/iris/src/whatsnew/images/transverse_merc.png b/docs/src/whatsnew/images/transverse_merc.png similarity index 100% rename from docs/iris/src/whatsnew/images/transverse_merc.png rename to docs/src/whatsnew/images/transverse_merc.png diff --git a/docs/iris/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst similarity index 92% rename from docs/iris/src/whatsnew/index.rst rename to docs/src/whatsnew/index.rst index 3fd5fe60700..257674718a5 100644 --- a/docs/iris/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -1,6 +1,6 @@ .. _iris_whatsnew: -What's new in Iris +What's New in Iris ****************** These "What's new" pages describe the important changes between major @@ -11,6 +11,7 @@ Iris versions. :maxdepth: 1 latest.rst + 3.0.1.rst 3.0.rst 2.4.rst 2.3.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst new file mode 100644 index 00000000000..1efa08874a0 --- /dev/null +++ b/docs/src/whatsnew/latest.rst @@ -0,0 +1,119 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * N/A + + And finally, get in touch with us on `GitHub`_ if you have any issues or + feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. Congratulations to `@jamesp`_ who recently became an Iris core developer + after joining the Iris development team at the `Met Office`_. 🎉 + + +✨ Features +=========== + +#. `@pelson`_ and `@trexfeathers`_ enhanced :meth:`iris.plot.plot` and + :meth:`iris.quickplot.plot` to automatically place the cube on the x axis if + the primary coordinate being plotted against is a vertical coordinate. E.g. + ``iris.plot.plot(z_cube)`` will produce a z-vs-phenomenon plot, where before + it would have produced a phenomenon-vs-z plot. (:pull:`3906`) + + +🐛 Bugs Fixed +============= + +#. `@gcaria`_ fixed :meth:`~iris.cube.Cube.cell_measure_dims` to also accept the + string name of a :class:`~iris.coords.CellMeasure`. (:pull:`3931`) + +#. `@gcaria`_ fixed :meth:`~iris.cube.Cube.ancillary_variable_dims` to also accept + the string name of a :class:`~iris.coords.AncillaryVariable`. (:pull:`3931`) + + +💣 Incompatible Changes +======================= + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. N/A + + +📚 Documentation +================ + +#. `@rcomer`_ updated the "Seasonal ensemble model plots" Gallery example. + (:pull:`3933`) + +#. `@MHBalsmeier`_ described non-conda installation on Debian-based distros. + (:pull:`3958`) + +#. `@bjlittle`_ clarified in the doc-string that :class:`~iris.coords.Coord` + is now an `abstract base class`_ since Iris ``3.0.0``, and it is **not** + possible to create an instance of it. (:pull:`3971`) + +#. `@bjlittle`_ added automated Iris version discovery for the ``latest.rst`` + in the ``whatsnew`` documentation. (:pull:`3981`) + +#. `@tkknight`_ stated the Python version used to build the documentation + on :ref:`installing_iris` and to the footer of all pages. Also added the + copyright years to the footer. (:pull:`3989`) + + +💼 Internal +=========== + +#. `@rcomer`_ removed an old unused test file. (:pull:`3913`) + +#. `@tkknight`_ moved the ``docs/iris`` directory to be in the parent + directory ``docs``. (:pull:`3975`) + +#. `@jamesp`_ updated a test to the latest numpy version (:pull:`3977`) + +#. `@bjlittle`_ rationalised the ``noxfile.py``, and added the ability for + each ``nox`` session to list its ``conda`` environment packages and + environment info. (:pull:`3990`) + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@gcaria: https://github.com/gcaria +.. _@MHBalsmeier: https://github.com/MHBalsmeier + + +.. comment + Whatsnew resources in alphabetical order: + +.. _abstract base class: https://docs.python.org/3/library/abc.html +.. _GitHub: https://github.com/SciTools/iris/issues/new/choose +.. _Met Office: https://www.metoffice.gov.uk/ diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template new file mode 100644 index 00000000000..de02207474b --- /dev/null +++ b/docs/src/whatsnew/latest.rst.template @@ -0,0 +1,95 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`alert` v3.X.X Patches + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The patches in this release of Iris include: + + #. N/A + + +.. dropdown:: :opticon:`report` Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * N/A + + And finally, get in touch with us on `GitHub`_ if you have any issues or + feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. N/A + + +✨ Features +=========== + +#. N/A + + +🐛 Bugs Fixed +============= + +#. N/A + + +💣 Incompatible Changes +======================= + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. N/A + + +📚 Documentation +================ + +#. N/A + + +💼 Internal +=========== + +#. N/A + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + + + + +.. comment + Whatsnew resources in alphabetical order: + +.. _GitHub: https://github.com/SciTools/iris/issues/new/choose diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index e31c7b58d7a..a78d0a76820 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -106,7 +106,7 @@ def callback(cube, field, filename): # Iris revision. -__version__ = "3.1.dev0" +__version__ = "3.1.0dev0" # Restrict the names imported when using "from iris import *" __all__ = [ diff --git a/lib/iris/_representation.py b/lib/iris/_representation.py new file mode 100644 index 00000000000..301f4a9a22c --- /dev/null +++ b/lib/iris/_representation.py @@ -0,0 +1,273 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Provides objects describing cube summaries. +""" + +import iris.util + + +class DimensionHeader: + def __init__(self, cube): + if cube.shape == (): + self.scalar = True + self.dim_names = [] + self.shape = [] + self.contents = ["scalar cube"] + else: + self.scalar = False + self.dim_names = [] + for dim in range(len(cube.shape)): + dim_coords = cube.coords( + contains_dimension=dim, dim_coords=True + ) + if dim_coords: + self.dim_names.append(dim_coords[0].name()) + else: + self.dim_names.append("-- ") + self.shape = list(cube.shape) + self.contents = [ + name + ": %d" % dim_len + for name, dim_len in zip(self.dim_names, self.shape) + ] + + +class FullHeader: + def __init__(self, cube, name_padding=35): + self.name = cube.name() + self.unit = cube.units + self.nameunit = "{name} / ({units})".format( + name=self.name, units=self.unit + ) + self.name_padding = name_padding + self.dimension_header = DimensionHeader(cube) + + +class CoordSummary: + def _summary_coord_extra(self, cube, coord): + # Returns the text needed to ensure this coordinate can be + # distinguished from all others with the same name. + extra = "" + similar_coords = cube.coords(coord.name()) + if len(similar_coords) > 1: + # Find all the attribute keys + keys = set() + for similar_coord in similar_coords: + keys.update(similar_coord.attributes.keys()) + # Look for any attributes that vary + vary = set() + attributes = {} + for key in keys: + for similar_coord in similar_coords: + if key not in similar_coord.attributes: + vary.add(key) + break + value = similar_coord.attributes[key] + if attributes.setdefault(key, value) != value: + vary.add(key) + break + keys = sorted(vary & set(coord.attributes.keys())) + bits = [ + "{}={!r}".format(key, coord.attributes[key]) for key in keys + ] + if bits: + extra = ", ".join(bits) + return extra + + +class VectorSummary(CoordSummary): + def __init__(self, cube, vector, iscoord): + self.name = iris.util.clip_string(vector.name()) + dims = vector.cube_dims(cube) + self.dim_chars = [ + "x" if dim in dims else "-" for dim in range(len(cube.shape)) + ] + if iscoord: + extra = self._summary_coord_extra(cube, vector) + self.extra = iris.util.clip_string(extra) + else: + self.extra = "" + + +class ScalarSummary(CoordSummary): + def __init__(self, cube, coord): + self.name = coord.name() + if ( + coord.units in ["1", "no_unit", "unknown"] + or coord.units.is_time_reference() + ): + self.unit = "" + else: + self.unit = " {!s}".format(coord.units) + coord_cell = coord.cell(0) + if isinstance(coord_cell.point, str): + self.string_type = True + self.lines = [ + iris.util.clip_string(str(item)) + for item in coord_cell.point.split("\n") + ] + self.point = None + self.bound = None + self.content = "\n".join(self.lines) + else: + self.string_type = False + self.lines = None + self.point = "{!s}".format(coord_cell.point) + coord_cell_cbound = coord_cell.bound + if coord_cell_cbound is not None: + self.bound = "({})".format( + ", ".join(str(val) for val in coord_cell_cbound) + ) + self.content = "{}{}, bound={}{}".format( + self.point, self.unit, self.bound, self.unit + ) + else: + self.bound = None + self.content = "{}{}".format(self.point, self.unit) + extra = self._summary_coord_extra(cube, coord) + self.extra = iris.util.clip_string(extra) + + +class Section: + def _init_(self): + self.contents = [] + + def is_empty(self): + return self.contents == [] + + +class VectorSection(Section): + def __init__(self, title, cube, vectors, iscoord): + self.title = title + self.contents = [ + VectorSummary(cube, vector, iscoord) for vector in vectors + ] + + +class ScalarSection(Section): + def __init__(self, title, cube, scalars): + self.title = title + self.contents = [ScalarSummary(cube, scalar) for scalar in scalars] + + +class ScalarCellMeasureSection(Section): + def __init__(self, title, cell_measures): + self.title = title + self.contents = [cm.name() for cm in cell_measures] + + +class AttributeSection(Section): + def __init__(self, title, attributes): + self.title = title + self.names = [] + self.values = [] + self.contents = [] + for name, value in sorted(attributes.items()): + value = iris.util.clip_string(str(value)) + self.names.append(name) + self.values.append(value) + content = "{}: {}".format(name, value) + self.contents.append(content) + + +class CellMethodSection(Section): + def __init__(self, title, cell_methods): + self.title = title + self.contents = [str(cm) for cm in cell_methods] + + +class CubeSummary: + def __init__(self, cube, shorten=False, name_padding=35): + self.section_indent = 5 + self.item_indent = 10 + self.extra_indent = 13 + self.shorten = shorten + self.header = FullHeader(cube, name_padding) + + # Cache the derived coords so we can rely on consistent + # object IDs. + derived_coords = cube.derived_coords + # Determine the cube coordinates that are scalar (single-valued) + # AND non-dimensioned. + dim_coords = cube.dim_coords + aux_coords = cube.aux_coords + all_coords = dim_coords + aux_coords + derived_coords + scalar_coords = [ + coord + for coord in all_coords + if not cube.coord_dims(coord) and coord.shape == (1,) + ] + # Determine the cube coordinates that are not scalar BUT + # dimensioned. + scalar_coord_ids = set(map(id, scalar_coords)) + vector_dim_coords = [ + coord for coord in dim_coords if id(coord) not in scalar_coord_ids + ] + vector_aux_coords = [ + coord for coord in aux_coords if id(coord) not in scalar_coord_ids + ] + vector_derived_coords = [ + coord + for coord in derived_coords + if id(coord) not in scalar_coord_ids + ] + + # cell measures + vector_cell_measures = [ + cm for cm in cube.cell_measures() if cm.shape != (1,) + ] + + # Ancillary Variables + vector_ancillary_variables = [av for av in cube.ancillary_variables()] + + # Sort scalar coordinates by name. + scalar_coords.sort(key=lambda coord: coord.name()) + # Sort vector coordinates by data dimension and name. + vector_dim_coords.sort( + key=lambda coord: (cube.coord_dims(coord), coord.name()) + ) + vector_aux_coords.sort( + key=lambda coord: (cube.coord_dims(coord), coord.name()) + ) + vector_derived_coords.sort( + key=lambda coord: (cube.coord_dims(coord), coord.name()) + ) + scalar_cell_measures = [ + cm for cm in cube.cell_measures() if cm.shape == (1,) + ] + + self.vector_sections = {} + + def add_vector_section(title, contents, iscoord=True): + self.vector_sections[title] = VectorSection( + title, cube, contents, iscoord + ) + + add_vector_section("Dimension coordinates:", vector_dim_coords) + add_vector_section("Auxiliary coordinates:", vector_aux_coords) + add_vector_section("Derived coordinates:", vector_derived_coords) + add_vector_section("Cell Measures:", vector_cell_measures, False) + add_vector_section( + "Ancillary Variables:", vector_ancillary_variables, False + ) + + self.scalar_sections = {} + + def add_scalar_section(section_class, title, *args): + self.scalar_sections[title] = section_class(title, *args) + + add_scalar_section( + ScalarSection, "Scalar Coordinates:", cube, scalar_coords + ) + add_scalar_section( + ScalarCellMeasureSection, + "Scalar cell measures:", + scalar_cell_measures, + ) + add_scalar_section(AttributeSection, "Attributes:", cube.attributes) + add_scalar_section( + CellMethodSection, "Cell methods:", cube.cell_methods + ) diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index 5b63ff53ed2..962b46e9e2f 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -11,6 +11,7 @@ from abc import ABCMeta, abstractmethod import warnings +import cf_units import dask.array as da import numpy as np @@ -619,6 +620,10 @@ def _check_dependencies(delta, sigma, surface_air_pressure): warnings.warn(msg, UserWarning, stacklevel=2) # Check units. + if sigma is not None and sigma.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + sigma.units = cf_units.Unit("1") + if sigma is not None and not sigma.units.is_dimensionless(): raise ValueError("Invalid units: sigma must be dimensionless.") if ( @@ -863,6 +868,10 @@ def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): ) raise ValueError(msg) + if sigma is not None and sigma.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + sigma.units = cf_units.Unit("1") + if sigma is not None and not sigma.units.is_dimensionless(): msg = ( "Invalid units: sigma coordinate {!r} " @@ -1127,6 +1136,10 @@ def _check_dependencies(sigma, eta, depth): warnings.warn(msg, UserWarning, stacklevel=2) # Check units. + if sigma is not None and sigma.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + sigma.units = cf_units.Unit("1") + if sigma is not None and not sigma.units.is_dimensionless(): msg = ( "Invalid units: sigma coordinate {!r} " @@ -1335,6 +1348,10 @@ def _check_dependencies(s, c, eta, depth, depth_c): # Check units. coords = ((s, "s"), (c, "c")) for coord, term in coords: + if coord is not None and coord.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + coord.units = cf_units.Unit("1") + if coord is not None and not coord.units.is_dimensionless(): msg = ( "Invalid units: {} coordinate {!r} " @@ -1551,6 +1568,10 @@ def _check_dependencies(s, eta, depth, a, b, depth_c): raise ValueError(msg) # Check units. + if s is not None and s.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + s.units = cf_units.Unit("1") + if s is not None and not s.units.is_dimensionless(): msg = ( "Invalid units: s coordinate {!r} " @@ -1776,6 +1797,10 @@ def _check_dependencies(s, c, eta, depth, depth_c): # Check units. coords = ((s, "s"), (c, "c")) for coord, term in coords: + if coord is not None and coord.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + coord.units = cf_units.Unit("1") + if coord is not None and not coord.units.is_dimensionless(): msg = ( "Invalid units: {} coordinate {!r} " diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index ad372478097..e772eeefce0 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -230,7 +230,7 @@ def __init__(self, lhs=None, rhs=None): """ #: The ``lhs`` operand to be resolved into the resultant :class:`~iris.cube.Cube`. - self.lhs_cube = None # set in _call__ + self.lhs_cube = None # set in __call__ #: The ``rhs`` operand to be resolved into the resultant :class:`~iris.cube.Cube`. self.rhs_cube = None # set in __call__ @@ -294,6 +294,25 @@ def __init__(self, lhs=None, rhs=None): self(lhs, rhs) def __call__(self, lhs, rhs): + """ + Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` + :class:`~iris.cube.Cube` operand metadata. + + Involves determining all the common coordinate metadata shared between + the operands, and the metadata that is local to each operand. Given + the common metadata, the broadcast shape of the resultant resolved + :class:`~iris.cube.Cube`, which may be auto-transposed, can be + determined. + + Args: + + * lhs: + The left-hand-side :class:`~iris.cube.Cube` operand. + + * rhs: + The right-hand-side :class:`~iris.cube.Cube` operand. + + """ from iris.cube import Cube emsg = ( @@ -338,11 +357,31 @@ def __call__(self, lhs, rhs): return self def _as_compatible_cubes(self): + """ + Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can + be transposed and/or broadcast successfully together. + + If compatible, the ``_broadcast_shape`` of the resultant resolved cube is + calculated, and the ``_src_cube_resolved`` (transposed/broadcast ``src`` + cube) and ``_tgt_cube_resolved`` (same as the ``tgt`` cube) are + calculated. + + An exception will be raised if the ``src`` and ``tgt`` cannot be + broadcast, even after a suitable transpose has been performed. + + .. note:: + + Requires that **all** ``src`` cube dimensions have been mapped + successfully to an appropriate ``tgt`` cube dimension. + + """ from iris.cube import Cube src_cube = self._src_cube tgt_cube = self._tgt_cube + assert src_cube.ndim == len(self.mapping) + # Use the mapping to calculate the new src cube shape. new_src_shape = [1] * tgt_cube.ndim for src_dim, tgt_dim in self.mapping.items(): @@ -430,6 +469,40 @@ def _aux_coverage( common_aux_metadata, common_scalar_metadata, ): + """ + Determine the dimensions covered by each of the local and common + auxiliary coordinates of the provided :class:`~iris.cube.Cube`. + + The cube dimensions not covered by any of the auxiliary coordinates is + also determined; these are known as `free` dimensions. + + The scalar coordinates local to the cube are also determined. + + Args: + + * cube: + The :class:`~iris.cube.Cube` to be analysed for coverage. + + * cube_items_aux: + The list of associated :class:`~iris.common.resolve._Item` metadata + for each auxiliary coordinate owned by the cube. + + * cube_items_scalar: + The list of associated :class:`~iris.common.resolve._Item` metadata + for each scalar coordinate owned by the cube. + + * common_aux_metadata: + The list of common auxiliary coordinate metadata shared by both + the LHS and RHS cube operands being resolved. + + * common_scalar_metadata: + The list of common scalar coordinate metadata shared by both + the LHS and RHS cube operands being resolved. + + Returns: + :class:`~iris.common.resolve._AuxCoverage` + + """ common_items_aux = [] common_items_scalar = [] local_items_aux = [] @@ -465,7 +538,33 @@ def _aux_coverage( dims_free=sorted(dims_free), ) - def _aux_mapping(self, src_coverage, tgt_coverage): + @staticmethod + def _aux_mapping(src_coverage, tgt_coverage): + """ + Establish the mapping of dimensions from the ``src`` to ``tgt`` + :class:`~iris.cube.Cube` using the auxiliary coordinate metadata + common between each of the operands. + + The ``src`` to ``tgt`` common auxiliary coordinate mapping is held by + the :attr:`~iris.common.resolve.Resolve.mapping`. + + Args: + + * src_coverage: + The :class:`~iris.common.resolve._DimCoverage` of the ``src`` + :class:`~iris.cube.Cube` i.e., map from the common ``src`` + dimensions. + + * tgt_coverage: + The :class:`~iris.common.resolve._DimCoverage` of the ``tgt`` + :class:`~iris.cube.Cube` i.e., map to the common ``tgt`` + dimensions. + + Returns: + Dictionary of ``src`` to ``tgt`` dimension mapping. + + """ + mapping = {} for tgt_item in tgt_coverage.common_items_aux: # Search for a src aux metadata match. tgt_metadata = tgt_item.metadata @@ -484,7 +583,7 @@ def _aux_mapping(self, src_coverage, tgt_coverage): tgt_dims = tgt_item.dims if len(src_dims) == len(tgt_dims): for src_dim, tgt_dim in zip(src_dims, tgt_dims): - self.mapping[src_dim] = tgt_dim + mapping[src_dim] = tgt_dim logger.debug(f"{src_dim}->{tgt_dim}") else: # This situation can only occur due to a systemic internal @@ -504,9 +603,26 @@ def _aux_mapping(self, src_coverage, tgt_coverage): tgt_item.dims, ) ) + return mapping @staticmethod def _categorise_items(cube): + """ + Inspect the provided :class:`~iris.cube.Cube` and group its + coordinates and associated metadata into dimension, auxiliary and + scalar categories. + + Args: + + * cube: + The :class:`~iris.cube.Cube` that will have its coordinates and + metadata grouped into their associated dimension, auxiliary and + scalar categories. + + Returns: + :class:`~iris.common.resolve._CategoryItems` + + """ category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # Categorise the dim coordinates of the cube. @@ -530,15 +646,40 @@ def _categorise_items(cube): return category @staticmethod - def _create_prepared_item(coord, dims, src=None, tgt=None): - if src is not None and tgt is not None: - combined = src.combine(tgt) + def _create_prepared_item( + coord, dims, src_metadata=None, tgt_metadata=None + ): + """ + Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` + containing the data and metadata required to construct and attach a coordinate + to the resultant resolved cube. + + Args: + + * coord: + The coordinate with the ``points`` and ``bounds`` to be extracted. + + * dims: + The dimensions that the ``coord`` spans on the resulting resolved :class:`~iris.cube.Cube`. + + * src_metadata: + The coordinate metadata from the ``src`` :class:`~iris.cube.Cube`. + + * tgt_metadata: + The coordinate metadata from the ``tgt`` :class:`~iris.cube.Cube`. + + Returns: + The :class:`~iris.common.resolve._PreparedItem`. + + """ + if src_metadata is not None and tgt_metadata is not None: + combined = src_metadata.combine(tgt_metadata) else: - combined = src or tgt + combined = src_metadata or tgt_metadata if not isinstance(dims, Iterable): dims = (dims,) prepared_metadata = _PreparedMetadata( - combined=combined, src=src, tgt=tgt + combined=combined, src=src_metadata, tgt=tgt_metadata ) bounds = coord.bounds result = _PreparedItem( @@ -573,6 +714,30 @@ def _show(items, heading): @staticmethod def _dim_coverage(cube, cube_items_dim, common_dim_metadata): + """ + Determine the dimensions covered by each of the local and common + dimension coordinates of the provided :class:`~iris.cube.Cube`. + + The cube dimensions not covered by any of the dimension coordinates is + also determined; these are known as `free` dimensions. + + Args: + + * cube: + The :class:`~iris.cube.Cube` to be analysed for coverage. + + * cube_items_dim: + The list of associated :class:`~iris.common.resolve._Item` metadata + for each dimension coordinate owned by the cube. + + * common_dim_metadata: + The list of common dimension coordinate metadata shared by both + the LHS and RHS cube operands being resolved. + + Returns: + :class:`~iris.common.resolve._DimCoverage` + + """ ndim = cube.ndim metadata = [None] * ndim coords = [None] * ndim @@ -599,13 +764,39 @@ def _dim_coverage(cube, cube_items_dim, common_dim_metadata): dims_free=sorted(dims_free), ) - def _dim_mapping(self, src_coverage, tgt_coverage): + @staticmethod + def _dim_mapping(src_coverage, tgt_coverage): + """ + Establish the mapping of dimensions from the ``src`` to ``tgt`` + :class:`~iris.cube.Cube` using the dimension coordinate metadata + common between each of the operands. + + The ``src`` to ``tgt`` common dimension coordinate mapping is held by + the :attr:`~iris.common.resolve.Resolve.mapping`. + + Args: + + * src_coverage: + The :class:`~iris.common.resolve._DimCoverage` of the ``src`` + :class:`~iris.cube.Cube` i.e., map from the common ``src`` + dimensions. + + * tgt_coverage: + The :class:`~iris.common.resolve._DimCoverage` of the ``tgt`` + :class:`~iris.cube.Cube` i.e., map to the common ``tgt`` + dimensions. + + Returns: + Dictionary of ``src`` to ``tgt`` dimension mapping. + + """ + mapping = {} for tgt_dim in tgt_coverage.dims_common: # Search for a src dim metadata match. tgt_metadata = tgt_coverage.metadata[tgt_dim] try: src_dim = src_coverage.metadata.index(tgt_metadata) - self.mapping[src_dim] = tgt_dim + mapping[src_dim] = tgt_dim logger.debug(f"{src_dim}->{tgt_dim}") except ValueError: # This exception can only occur due to a systemic internal @@ -621,9 +812,10 @@ def _dim_mapping(self, src_coverage, tgt_coverage): src_coverage.cube.name(), tgt_coverage.cube.name(), tgt_metadata, - tuple([tgt_dim]), + (tgt_dim,), ) ) + return mapping def _free_mapping( self, @@ -632,6 +824,57 @@ def _free_mapping( src_aux_coverage, tgt_aux_coverage, ): + """ + Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with + ``src`` to ``tgt`` :class:`~iris.cube.Cube` mappings from unmapped ``src`` + dimensions that are free from coordinate metadata coverage to ``tgt`` + dimensions that have local metadata coverage (i.e., is not common between + the ``src`` and ``tgt``) or dimensions that are free from coordinate + metadata coverage. + + If the ``src`` :class:`~iris.cube.Cube` does not have any free dimensions, + the attempt to map unmapped ``tgt`` dimensions that have local metadata + coverage to ``src`` dimensions that are free from coordinate metadata + coverage. + + An exception will be raised if there are any ``src`` :class:`~iris.cube.Cube` + dimensions not mapped to an associated ``tgt`` dimension. + + Args: + + * src_dim_coverage: + The :class:`~iris.common.resolve.._DimCoverage` of the ``src`` + :class:`~iris.cube.Cube`. + + * tgt_dim_coverage: + The :class:`~iris.common.resolve.._DimCoverage` of the ``tgt`` + :class:`~iris.cube.Cube`. + + * src_aux_coverage: + The :class:`~iris.common.resolve._AuxCoverage` of the ``src`` + :class:`~iris.cube.Cube`. + + * tgt_aux_coverage: + The :class:`~iris.common.resolve._AuxCoverage` of the ``tgt`` + :class:`~iris.cube.Cube`. + + .. note:: + + All unmapped dimensions with an extend >1 are mapped before those + with an extent of 1, as such dimensions cannot be broadcast. It + is important to map specific non-broadcastable dimensions before + generic broadcastable dimensions otherwise we are open to failing to + map all the src dimensions as a generic src broadcast dimension has + been mapped to the only tgt dimension that a specific non-broadcastable + dimension can be mapped to. + + .. note:: + + A local dimension cannot be mapped to another local dimension, + by definition, otherwise this dimension would be classed as a + common dimension. + + """ src_cube = src_dim_coverage.cube tgt_cube = tgt_dim_coverage.cube src_ndim = src_cube.ndim @@ -663,11 +906,16 @@ def _free_mapping( tgt_shape = tgt_cube.shape src_max, tgt_max = max(src_shape), max(tgt_shape) - def assign_mapping(extent, unmapped_local_items, free_items=None): + def _assign_mapping(extent, unmapped_local_items, free_items=None): result = None if free_items is None: free_items = [] if extent == 1: + # Map to the first available unmapped local dimension or + # the first available free dimension. + # Dimension shape doesn't matter here as the extent is 1, + # therefore broadcasting will take care of any discrepency + # between src and tgt dimension extent. if unmapped_local_items: result, _ = unmapped_local_items.pop(0) elif free_items: @@ -680,10 +928,10 @@ def _filter(items): ) def _pop(item, items): - result, _ = item + dim, _ = item index = items.index(item) items.pop(index) - return result + return dim items = _filter(unmapped_local_items) if items: @@ -700,11 +948,12 @@ def _pop(item, items): (dim, tgt_shape[dim]) for dim in tgt_unmapped_local ] tgt_free_items = [(dim, tgt_shape[dim]) for dim in tgt_free] + # Sort by decreasing src dimension extent and increasing src dimension + # as we want broadcast src dimensions to be mapped last. + src_key_func = lambda dim: (src_max - src_shape[dim], dim) - for src_dim in sorted( - src_free, key=lambda dim: (src_max - src_shape[dim], dim) - ): - tgt_dim = assign_mapping( + for src_dim in sorted(src_free, key=src_key_func): + tgt_dim = _assign_mapping( src_shape[src_dim], tgt_unmapped_local_items, tgt_free_items, @@ -725,11 +974,12 @@ def _pop(item, items): src_unmapped_local_items = [ (dim, src_shape[dim]) for dim in src_unmapped_local ] + # Sort by decreasing tgt dimension extent and increasing tgt dimension + # as we want broadcast tgt dimensions to be mapped last. + tgt_key_func = lambda dim: (tgt_max - tgt_shape[dim], dim) - for tgt_dim in sorted( - tgt_free, key=lambda dim: (tgt_max - tgt_shape[dim], dim) - ): - src_dim = assign_mapping( + for tgt_dim in sorted(tgt_free, key=tgt_key_func): + src_dim = _assign_mapping( tgt_shape[tgt_dim], src_unmapped_local_items ) if src_dim is not None: @@ -758,6 +1008,17 @@ def _pop(item, items): logger.debug(f"mapping free dimensions gives, mapping={self.mapping}") def _metadata_coverage(self): + """ + Using the pre-categorised metadata of the cubes, determine the dimensions + covered by their associated dimension and auxiliary coordinates, and which + dimensions are free of metadata coverage. + + This coverage analysis clarifies how the dimensions covered by common + metadata are related, thus establishing a dimensional mapping between + the cubes. It also identifies the dimensions covered by metadata that + is local to each cube, and indeed which dimensions are free of metadata. + + """ # Determine the common dim coordinate metadata coverage. common_dim_metadata = [ item.metadata for item in self.category_common.items_dim @@ -798,6 +1059,37 @@ def _metadata_coverage(self): ) def _metadata_mapping(self): + """ + Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an associated + ``tgt`` :class:`~iris.cube.Cube` dimension using the common dim and aux coordinate metadata. + + If the common metadata does not result in a full mapping of ``src`` to ``tgt`` dimensions + then free dimensions are analysed to determine whether the mapping can be completed. + + Once the ``src`` has been mapped to the ``tgt``, the cubes are checked to ensure that they + will successfully broadcast, and the ``src`` :class:`~iris.cube.Cube` is transposed appropriately, + if necessary. + + The :attr:`~iris.common.resolve.Resolve._broadcast_shape` is set, along with the + :attr:`~iris.common.resolve.Resolve._src_cube_resolved` and :attr:`~iris.common.resolve.Resolve._tgt_cube_resolved`, + which are the broadcast/transposed ``src`` and ``tgt``. + + .. note:: + + An exception will be raised if a ``src`` dimension cannot be mapped to a ``tgt`` dimension. + + .. note:: + + An exception will be raised if the full mapped ``src`` :class:`~iris.cube.Cube` cannot be + broadcast or transposed with the ``tgt`` :class:`~iris.cube.Cube`. + + .. note:: + + The ``src`` and ``tgt`` may be swapped in the case where they both have equal dimensionality and + the ``tgt`` does have the same shape as the resolved broadcast shape (and the ``src`` does) or + the ``tgt`` has more free dimensions than the ``src``. + + """ # Initialise the state. self.mapping = {} @@ -819,7 +1111,9 @@ def _metadata_mapping(self): # Use the dim coordinates to fully map the # src cube dimensions to the tgt cube dimensions. - self._dim_mapping(src_dim_coverage, tgt_dim_coverage) + self.mapping.update( + self._dim_mapping(src_dim_coverage, tgt_dim_coverage) + ) logger.debug( f"mapping common dim coordinates gives, mapping={self.mapping}" ) @@ -827,7 +1121,9 @@ def _metadata_mapping(self): # If necessary, use the aux coordinates to fully map the # src cube dimensions to the tgt cube dimensions. if not self.mapped: - self._aux_mapping(src_aux_coverage, tgt_aux_coverage) + self.mapping.update( + self._aux_mapping(src_aux_coverage, tgt_aux_coverage) + ) logger.debug( f"mapping common aux coordinates, mapping={self.mapping}" ) @@ -886,6 +1182,12 @@ def _metadata_mapping(self): self._as_compatible_cubes() def _metadata_prepare(self): + """ + Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and + :attr:`~iris.common.resolve.Resolve.prepared_factories` with the necessary metadata to be constructed + and attached to the resulting resolved :class:`~iris.cube.Cube`. + + """ # Initialise the state. self.prepared_category = _CategoryItems( items_dim=[], items_aux=[], items_scalar=[] @@ -1053,6 +1355,41 @@ def _prepare_common_aux_payload( prepared_items, ignore_mismatch=None, ): + """ + Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing + the necessary metadata for each auxiliary coordinate to be constructed and attached to the + resulting resolved :class:`~iris.cube.Cube`. + + .. note:: + + For mixed ``src`` and ``tgt`` coordinate types with matching metadata, an + :class:`~iris.coords.AuxCoord` will be nominated for construction. + + Args: + + * src_common_items: + The list of :attr:`~iris.common.resolve._AuxCoverage.common_items_aux` metadata + for the ``src`` :class:`~iris.cube.Cube`. + + * tgt_common_items: + The list of :attr:`~iris.common.resolve._AuxCoverage.common_items_aux` metadata + for the ``tgt`` :class:`~iris.cube.Cube`. + + * prepared_items: + The list of :class:`~iris.common.resolve._PreparedItem` metadata that will be used + to construct the auxiliary coordinates that will be attached to the resulting + resolved :class:`~iris.cube.Cube`. + + Kwargs: + + * ignore_mismatch: + When ``False``, an exception will be raised if a difference is detected between corresponding + ``src`` and ``tgt`` coordinate ``points`` and/or ``bounds``. + When ``True``, the coverage metadata is ignored i.e., a coordinate will not be constructed and + added to the resulting resolved :class:`~iris.cube.Cube`. + Defaults to ``False``. + + """ from iris.coords import AuxCoord if ignore_mismatch is None: @@ -1115,6 +1452,30 @@ def _prepare_common_aux_payload( def _prepare_common_dim_payload( self, src_coverage, tgt_coverage, ignore_mismatch=None ): + """ + Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for + each :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved + :class:`~iris.cube.Cube`. + + Args: + + * src_coverage: + The :class:`~iris.common.resolve._DimCoverage` metadata for the ``src`` :class:`~iris.cube.Cube`. + + * tgt_coverage: + The :class:`~iris.common.resolve._DimCoverage` metadata for the ``tgt`` :class:`~iris.cube.Cube`. + + Kwargs: + + * ignore_mismatch: + When ``False``, an exception will be raised if a difference is detected between corresponding + ``src`` and ``tgt`` :class:`~iris.coords.DimCoord` ``points`` and/or ``bounds``. + When ``True``, the coverage metadata is ignored i.e., a :class:`~iris.coords.DimCoord` will not + be constructed and added to the resulting resolved :class:`~iris.cube.Cube`. + Defaults to ``False``. + + """ from iris.coords import DimCoord if ignore_mismatch is None: @@ -1153,55 +1514,123 @@ def _prepare_common_dim_payload( ) self.prepared_category.items_dim.append(prepared_item) - def _prepare_factory_payload(self, cube, category_local, from_src=True): - def _get_prepared_item(metadata, from_src=True, from_local=False): - result = None - if from_local: - category = category_local - match = lambda item: item.metadata == metadata + def _get_prepared_item( + self, metadata, category_local, from_src=True, from_local=False + ): + """ + Find the :attr:`~iris.common.resolve._PreparedItem` from the + :attr:`~iris.common.resolve.Resolve.prepared_category` that matches the provided ``metadata``. + + Alternatively, the ``category_local`` is searched to find a :class:`~iris.common.resolve._Item` + with matching ``metadata`` from either the local ``src`` or ``tgt`` :class:`~iris.cube.Cube`. + If a match is found, then a new `~iris.common.resolve._PreparedItem` is created and added to + :attr:`~iris.common.resolve.Resolve.prepared_category` and returned. See ``from_local``. + + Args: + + * metadata: + The target metadata of the prepared (or local) item to retrieve. + + * category_local: + The :class:`~iris.common.resolve._CategoryItems` containing the + local metadata of either the ``src`` or ``tgt`` :class:`~iris.cube.Cube`. + See ``from_local``. + + Kwargs: + + * from_src: + Boolean stating whether the ``metadata`` is from the ``src`` (``True``) + or ``tgt`` :class:`~iris.cube.Cube`. + Defaults to ``True``. + + * from_local: + Boolean controlling whether the ``metadata`` is used to search the + ``category_local`` (``True``) or the :attr:`~iris.common.resolve.Resolve.prepared_category`. + Defaults to ``False``. + + Returns: + The :class:`~iris.common.resolve._PreparedItem` matching the provided ``metadata``. + + """ + result = None + + if from_local: + category = category_local + match = lambda item: item.metadata == metadata + else: + category = self.prepared_category + if from_src: + match = lambda item: item.metadata.src == metadata else: - category = self.prepared_category - if from_src: - match = lambda item: item.metadata.src == metadata + match = lambda item: item.metadata.tgt == metadata + + for member in category._fields: + category_items = getattr(category, member) + matched_items = tuple(filter(match, category_items)) + if matched_items: + if len(matched_items) > 1: + dmsg = ( + f"ignoring factory dependency {metadata}, multiple {'src' if from_src else 'tgt'} " + f"{'local' if from_local else 'prepared'} metadata matches" + ) + logger.debug(dmsg) else: - match = lambda item: item.metadata.tgt == metadata - for member in category._fields: - category_items = getattr(category, member) - matched_items = tuple(filter(match, category_items)) - if matched_items: - if len(matched_items) > 1: - dmsg = ( - f"ignoring factory dependency {metadata}, multiple {'src' if from_src else 'tgt'} " - f"{'local' if from_local else 'prepared'} metadata matches" - ) - logger.debug(dmsg) - else: - (item,) = matched_items - if from_local: - src = tgt = None - if from_src: - src = item.metadata - dims = tuple( - [self.mapping[dim] for dim in item.dims] - ) - else: - tgt = item.metadata - dims = item.dims - result = self._create_prepared_item( - item.coord, dims, src=src, tgt=tgt - ) - getattr(self.prepared_category, member).append( - result + (item,) = matched_items + if from_local: + src = tgt = None + if from_src: + src = item.metadata + dims = tuple( + [self.mapping[dim] for dim in item.dims] ) else: - result = item - break - return result + tgt = item.metadata + dims = item.dims + result = self._create_prepared_item( + item.coord, + dims, + src_metadata=src, + tgt_metadata=tgt, + ) + getattr(self.prepared_category, member).append(result) + else: + result = item + break + return result + + def _prepare_factory_payload(self, cube, category_local, from_src=True): + """ + Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a :class:`~iris.common.resolve._PreparedFactory` + containing the necessary metadata for each ``src`` and/or ``tgt`` auxiliary factory to be constructed and + attached to the resulting resolved :class:`~iris.cube.Cube`. + + .. note:: + + The required dependencies of an auxiliary factory may not all be available in the + :attr:`~iris.common.resolve.Resolve.prepared_category` and therefore this is a legitimate + reason to add the associated metadata of the local dependency to the ``prepared_category``. + + Args: + + * cube: + The :class:`~iris.cube.Cube` that may contain an auxiliary factory to be prepared. + + * category_local: + The :class:`~iris.common.resolve._CategoryItems` of all metadata local to the provided ``cube``. + Kwargs: + + * from_src: + Boolean stating whether the provided ``cube`` is either a ``src`` or ``tgt`` + :class:`~iris.cube.Cube` - used to retrieve the appropriate metadata from a + :class:`~iris.common.resolve._PreparedMetadata`. + + """ for factory in cube.aux_factories: container = type(factory) dependencies = {} prepared_item = None + found = True if tuple( filter( @@ -1222,18 +1651,24 @@ def _get_prepared_item(metadata, from_src=True, from_local=False): dependency_coord, ) in factory.dependencies.items(): metadata = dependency_coord.metadata - prepared_item = _get_prepared_item(metadata, from_src=from_src) + prepared_item = self._get_prepared_item( + metadata, category_local, from_src=from_src + ) if prepared_item is None: - prepared_item = _get_prepared_item( - metadata, from_src=from_src, from_local=True + prepared_item = self._get_prepared_item( + metadata, + category_local, + from_src=from_src, + from_local=True, ) if prepared_item is None: dmsg = f"cannot find matching {metadata} for {container} dependency {dependency_name}" logger.debug(dmsg) + found = False break dependencies[dependency_name] = prepared_item.metadata - if prepared_item is not None: + if found and prepared_item is not None: prepared_factory = _PreparedFactory( container=container, dependencies=dependencies ) @@ -1243,6 +1678,29 @@ def _get_prepared_item(metadata, from_src=True, from_local=False): logger.debug(dmsg) def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): + """ + Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each + ``src`` or ``tgt`` local auxiliary coordinate to be constructed and attached to the resulting + resolved :class:`~iris.cube.Cube`. + + .. note:: + + In general, lenient behaviour subscribes to the philosophy that it is easier to remove + metadata than it is to find then add metadata. To those ends, lenient behaviour supports + metadata richness by adding both local ``src`` and ``tgt`` auxiliary coordinates. + Alternatively, strict behaviour will only add a ``tgt`` local auxiliary coordinate that + spans dimensions not mapped to by the ``src`` e.g., extra ``tgt`` dimensions. + + Args: + + * src_aux_coverage: + The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`. + + * tgt_aux_coverage: + The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + + """ # Determine whether there are tgt dimensions not mapped to by an # associated src dimension, and thus may be covered by any local # tgt aux coordinates. @@ -1259,7 +1717,7 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): if all([dim in mapped_src_dims for dim in item.dims]): tgt_dims = tuple([self.mapping[dim] for dim in item.dims]) prepared_item = self._create_prepared_item( - item.coord, tgt_dims, src=item.metadata + item.coord, tgt_dims, src_metadata=item.metadata ) self.prepared_category.items_aux.append(prepared_item) else: @@ -1281,7 +1739,7 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): [dim in extra_tgt_dims for dim in tgt_dims] ): prepared_item = self._create_prepared_item( - item.coord, tgt_dims, tgt=item.metadata + item.coord, tgt_dims, tgt_metadata=item.metadata ) self.prepared_category.items_aux.append(prepared_item) else: @@ -1293,6 +1751,28 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): logger.debug(dmsg) def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): + """ + Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each + ``src`` or ``tgt`` local :class:`~iris.coords.DimCoord` to be constructed and attached to the + resulting resolved :class:`~iris.cube.Cube`. + + .. note:: + + In general, a local coordinate will only be added if there is no other metadata competing + to describe the same dimension/s on the ``tgt`` :class:`~iris.cube.Cube`. Lenient behaviour + is more liberal, whereas strict behaviour will only add a local ``tgt`` coordinate covering + an unmapped "extra" ``tgt`` dimension/s. + + Args: + + * src_dim_coverage: + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``src`` :class:`~iris.cube.Cube`. + + * tgt_dim_coverage: + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + + """ mapped_tgt_dims = self.mapping.values() # Determine whether there are tgt dimensions not mapped to by an @@ -1314,7 +1794,7 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): metadata = src_dim_coverage.metadata[src_dim] coord = src_dim_coverage.coords[src_dim] prepared_item = self._create_prepared_item( - coord, tgt_dim, src=metadata + coord, tgt_dim, src_metadata=metadata ) self.prepared_category.items_dim.append(prepared_item) else: @@ -1347,13 +1827,36 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): if metadata is not None: coord = tgt_dim_coverage.coords[tgt_dim] prepared_item = self._create_prepared_item( - coord, tgt_dim, tgt=metadata + coord, tgt_dim, tgt_metadata=metadata ) self.prepared_category.items_dim.append(prepared_item) def _prepare_local_payload_scalar( self, src_aux_coverage, tgt_aux_coverage ): + """ + Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each + ``src`` or ``tgt`` local scalar coordinate to be constructed and attached to the resulting + resolved :class:`~iris.cube.Cube`. + + .. note:: + + In general, lenient behaviour subscribes to the philosophy that it is easier to remove + metadata than it is to find then add metadata. To those ends, lenient behaviour supports + metadata richness by adding both local ``src`` and ``tgt`` scalar coordinates. + Alternatively, strict behaviour will only add a ``tgt`` local scalar coordinate when the + ``src`` is a scalar :class:`~iris.cube.Cube` with no local scalar coordinates. + + Args: + + * src_aux_coverage: + The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`. + + * tgt_aux_coverage: + The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + + """ # Add all local tgt scalar coordinates iff the src cube is a # scalar cube with no local src scalar coordinates. # Only for strict maths. @@ -1367,14 +1870,14 @@ def _prepare_local_payload_scalar( # Add any local src scalar coordinates, if available. for item in src_aux_coverage.local_items_scalar: prepared_item = self._create_prepared_item( - item.coord, item.dims, src=item.metadata + item.coord, item.dims, src_metadata=item.metadata ) self.prepared_category.items_scalar.append(prepared_item) # Add any local tgt scalar coordinates, if available. for item in tgt_aux_coverage.local_items_scalar: prepared_item = self._create_prepared_item( - item.coord, item.dims, tgt=item.metadata + item.coord, item.dims, tgt_metadata=item.metadata ) self.prepared_category.items_scalar.append(prepared_item) @@ -1385,6 +1888,27 @@ def _prepare_local_payload( tgt_dim_coverage, tgt_aux_coverage, ): + """ + Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a + :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata from the ``src`` + and/or ``tgt`` :class:`~iris.cube.Cube` for each coordinate to be constructed and attached + to the resulting resolved :class:`~iris.cube.Cube`. + + Args: + + * src_dim_coverage: + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``src`` :class:`~iris.cube.Cube`. + + * src_aux_coverage: + The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`. + + * tgt_dim_coverage: + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + + * tgt_aux_coverage: + The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + + """ # Add local src/tgt dim coordinates. self._prepare_local_payload_dim(src_dim_coverage, tgt_dim_coverage) @@ -1397,6 +1921,47 @@ def _prepare_local_payload( def _prepare_points_and_bounds( self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None ): + """ + Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure + that they are equivalent, taking into account broadcasting when appropriate. + + .. note:: + + An exception will be raised if the ``src`` and ``tgt`` coordinates cannot + be broadcast. + + .. note:: + + An exception will be raised if either the points or bounds are different, + however appropriate lenient behaviour concessions are applied. + + Args: + + * src_coord: + The ``src`` :class:`~iris.cube.Cube` coordinate with metadata matching + the ``tgt_coord``. + + * tgt_coord: + The ``tgt`` :class`~iris.cube.Cube` coordinate with metadata matching + the ``src_coord``. + + * src_dims: + The dimension/s of the ``src_coord`` attached to the ``src`` :class:`~iris.cube.Cube`. + + * tgt_dims: + The dimension/s of the ``tgt_coord`` attached to the ``tgt`` :class:`~iris.cube.Cube`. + + Kwargs: + + * ignore_mismatch: + For lenient behaviour only, don't raise an exception if there is a difference between + the ``src`` and ``tgt`` coordinate points or bounds. + Defaults to ``False``. + + Returns: + Tuple of equivalent ``points`` and ``bounds``, otherwise ``None``. + + """ from iris.util import array_equal if ignore_mismatch is None: @@ -1443,6 +2008,7 @@ def _prepare_points_and_bounds( tgt_broadcasting = tgt_shape != tgt_shape_broadcast if src_broadcasting and tgt_broadcasting: + # TBD: Extend capability to support attempting to broadcast two-way multi-dimensional coordinates. emsg = ( f"Cannot broadcast the coordinate {src_coord.name()!r} on " f"{self._src_cube_position} cube {self._src_cube.name()!r} and " diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 7ec94f1d2c7..086ea30f58c 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -12,7 +12,6 @@ from collections import namedtuple from collections.abc import Iterator import copy -from functools import wraps from itertools import chain, zip_longest import operator import warnings @@ -1272,7 +1271,7 @@ def contains_point(self, point): class Coord(_DimensionalMetadata): """ - Superclass for coordinates. + Abstract base class for coordinates. """ @@ -1291,7 +1290,7 @@ def __init__( ): """ - Constructs a single coordinate. + Coordinate abstract base class. As of ``v3.0.0`` you **cannot** create an instance of :class:`Coord`. Args: @@ -1313,17 +1312,17 @@ def __init__( * bounds An array of values describing the bounds of each cell. Given n bounds for each cell, the shape of the bounds array should be - points.shape + (n,). For example, a 1d coordinate with 100 points + points.shape + (n,). For example, a 1D coordinate with 100 points and two bounds per cell would have a bounds array of shape (100, 2) Note if the data is a climatology, `climatological` should be set. * attributes - A dictionary containing other cf and user-defined attributes. + A dictionary containing other CF and user-defined attributes. * coord_system A :class:`~iris.coord_systems.CoordSystem` representing the coordinate system of the coordinate, - e.g. a :class:`~iris.coord_systems.GeogCS` for a longitude Coord. + e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate. * climatological (bool): When True: the coordinate is a NetCDF climatological time axis. When True: saving in NetCDF will give the coordinate variable a @@ -2250,7 +2249,8 @@ def _xml_id_extra(self, unique_value): class DimCoord(Coord): """ - A coordinate that is 1D, numeric, and strictly monotonic. + A coordinate that is 1D, and numeric, with values that have a strict monotonic ordering. Missing values are not + permitted in a :class:`DimCoord`. """ @@ -2275,7 +2275,7 @@ def from_regular( optionally bounds. The majority of the arguments are defined as for - :meth:`Coord.__init__`, but those which differ are defined below. + :class:`Coord`, but those which differ are defined below. Args: @@ -2336,8 +2336,9 @@ def __init__( climatological=False, ): """ - Create a 1D, numeric, and strictly monotonic :class:`Coord` with - read-only points and bounds. + Create a 1D, numeric, and strictly monotonic coordinate with **immutable** points and bounds. + + Missing values are not permitted. Args: @@ -2369,11 +2370,11 @@ def __init__( Note if the data is a climatology, `climatological` should be set. * attributes: - A dictionary containing other cf and user-defined attributes. + A dictionary containing other CF and user-defined attributes. * coord_system: A :class:`~iris.coord_systems.CoordSystem` representing the coordinate system of the coordinate, - e.g. a :class:`~iris.coord_systems.GeogCS` for a longitude Coord. + e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate. * circular (bool): Whether the coordinate wraps by the :attr:`~iris.coords.DimCoord.units.modulus` i.e., the longitude coordinate wraps around the full great circle. @@ -2624,15 +2625,54 @@ class AuxCoord(Coord): """ A CF auxiliary coordinate. - .. note:: - - There are currently no specific properties of :class:`AuxCoord`, - everything is inherited from :class:`Coord`. - """ - @wraps(Coord.__init__, assigned=("__doc__",), updated=()) def __init__(self, *args, **kwargs): + """ + Create a coordinate with **mutable** points and bounds. + + Args: + + * points: + The values (or value in the case of a scalar coordinate) for each + cell of the coordinate. + + Kwargs: + + * standard_name: + CF standard name of the coordinate. + * long_name: + Descriptive name of the coordinate. + * var_name: + The netCDF variable name for the coordinate. + * units + The :class:`~cf_units.Unit` of the coordinate's values. + Can be a string, which will be converted to a Unit object. + * bounds + An array of values describing the bounds of each cell. Given n + bounds for each cell, the shape of the bounds array should be + points.shape + (n,). For example, a 1D coordinate with 100 points + and two bounds per cell would have a bounds array of shape + (100, 2) + Note if the data is a climatology, `climatological` + should be set. + * attributes + A dictionary containing other CF and user-defined attributes. + * coord_system + A :class:`~iris.coord_systems.CoordSystem` representing the + coordinate system of the coordinate, + e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate. + * climatological (bool): + When True: the coordinate is a NetCDF climatological time axis. + When True: saving in NetCDF will give the coordinate variable a + 'climatology' attribute and will create a boundary variable called + '_climatology' in place of a standard bounds + attribute and bounds variable. + Will set to True when a climatological time axis is loaded + from NetCDF. + Always False if no bounds exist. + + """ super().__init__(*args, **kwargs) # Logically, :class:`Coord` is an abstract class and all actual coords must diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 3d0854355c0..7c7d6c58e9a 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -981,9 +981,7 @@ def convert_units(self, unit): celsius and subtract 273.15 from each value in :attr:`~iris.cube.Cube.data`. - .. warning:: - Calling this method will trigger any deferred loading, causing - the cube's data array to be loaded into memory. + This operation preserves lazy data. """ # If the cube has units convert the data. @@ -1400,10 +1398,12 @@ def cell_measure_dims(self, cell_measure): Returns a tuple of the data dimensions relevant to the given CellMeasure. - * cell_measure - The CellMeasure to look for. + * cell_measure (string or CellMeasure) + The (name of the) cell measure to look for. """ + cell_measure = self.cell_measure(cell_measure) + # Search for existing cell measure (object) on the cube, faster lookup # than equality - makes no functional difference. matches = [ @@ -1422,10 +1422,12 @@ def ancillary_variable_dims(self, ancillary_variable): Returns a tuple of the data dimensions relevant to the given AncillaryVariable. - * ancillary_variable - The AncillaryVariable to look for. + * ancillary_variable (string or AncillaryVariable) + The (name of the) AncillaryVariable to look for. """ + ancillary_variable = self.ancillary_variable(ancillary_variable) + # Search for existing ancillary variable (object) on the cube, faster # lookup than equality - makes no functional difference. matches = [ @@ -2182,23 +2184,20 @@ def _summary_coord_extra(self, coord, indent): extra = "" similar_coords = self.coords(coord.name()) if len(similar_coords) > 1: - # Find all the attribute keys - keys = set() - for similar_coord in similar_coords: - keys.update(similar_coord.attributes.keys()) - # Look for any attributes that vary + similar_coords.remove(coord) + # Look for any attributes that vary. vary = set() - attributes = {} - for key in keys: + for key, value in coord.attributes.items(): for similar_coord in similar_coords: if key not in similar_coord.attributes: vary.add(key) break - value = similar_coord.attributes[key] - if attributes.setdefault(key, value) != value: + if not np.array_equal( + similar_coord.attributes[key], value + ): vary.add(key) break - keys = sorted(vary & set(coord.attributes.keys())) + keys = sorted(vary) bits = [ "{}={!r}".format(key, coord.attributes[key]) for key in keys ] @@ -3919,10 +3918,15 @@ def collapsed(self, coords, aggregator, **kwargs): # on the cube lazy array. # NOTE: do not reform the data in this case, as 'lazy_aggregate' # accepts multiple axes (unlike 'aggregate'). - collapse_axis = list(dims_to_collapse) + collapse_axes = list(dims_to_collapse) + if len(collapse_axes) == 1: + # Replace a "list of 1 axes" with just a number : This single-axis form is *required* by functions + # like da.average (and np.average), if a 1d weights array is specified. + collapse_axes = collapse_axes[0] + try: data_result = aggregator.lazy_aggregate( - self.lazy_data(), axis=collapse_axis, **kwargs + self.lazy_data(), axis=collapse_axes, **kwargs ) except TypeError: # TypeError - when unexpected keywords passed through (such as @@ -3946,8 +3950,10 @@ def collapsed(self, coords, aggregator, **kwargs): unrolled_data = np.transpose(self.data, dims).reshape(new_shape) # Perform the same operation on the weights if applicable - if kwargs.get("weights") is not None: - weights = kwargs["weights"].view() + weights = kwargs.get("weights") + if weights is not None and weights.ndim > 1: + # Note: *don't* adjust 1d weights arrays, these have a special meaning for statistics functions. + weights = weights.view() kwargs["weights"] = np.transpose(weights, dims).reshape( new_shape ) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 5c6e11f3acb..47ff6291b0a 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -9,7 +9,7 @@ References: -[CF] NetCDF Climate and Forecast (CF) Metadata conventions, Version 1.5, October, 2010. +[CF] NetCDF Climate and Forecast (CF) Metadata conventions. [NUG] NetCDF User's Guide, https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/ """ diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index d0c3a3c5346..bb7a870d58c 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -8,8 +8,7 @@ See also: `netCDF4 python `_. -Also refer to document 'NetCDF Climate and Forecast (CF) Metadata Conventions', -Version 1.4, 27 February 2009. +Also refer to document 'NetCDF Climate and Forecast (CF) Metadata Conventions'. """ @@ -720,6 +719,9 @@ def coord_from_term(term): warnings.warn(msg) coord_a = coord_from_term("a") if coord_a is not None: + if coord_a.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + coord_a.units = "1" delta = coord_a * coord_p0.points[0] delta.units = coord_a.units * coord_p0.units delta.rename("vertical pressure") @@ -2490,7 +2492,7 @@ def save( """ Save cube(s) to a netCDF file, given the cube and the filename. - * Iris will write CF 1.5 compliant NetCDF files. + * Iris will write CF 1.7 compliant NetCDF files. * The attributes dictionaries on each cube in the saved cube list will be compared and common attributes saved as NetCDF global attributes where appropriate. diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 349f1fea104..bda5274ccac 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -665,7 +665,20 @@ def _get_plot_objects(args): # single argument v_object = args[0] u_object = _u_object_from_v_object(v_object) + u, v = _uv_from_u_object_v_object(u_object, args[0]) + + # If a single cube argument, and the associated dimension coordinate + # is vertical-like, put the coordinate on the y axis, and the data o + # the x. + if ( + isinstance(v_object, iris.cube.Cube) + and isinstance(u_object, iris.coords.Coord) + and iris.util.guess_coord_axis(u_object) in ["Y", "Z"] + ): + u_object, v_object = v_object, u_object + u, v = v, u + args = args[1:] return u_object, v_object, u, v, args diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 42c0dba46ab..2eec514e9c6 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -49,7 +49,7 @@ def _title(cube_or_coord, with_units): if _use_symbol(units): units = units.symbol - if units.is_time_reference(): + elif units.is_time_reference(): # iris.plot uses matplotlib.dates.date2num, which is fixed to the below unit. if version.parse(_mpl_version) >= version.parse("3.3"): days_since = "1970-01-01" @@ -138,12 +138,8 @@ def _get_titles(u_object, v_object): def _label_1d_plot(*args, **kwargs): - if len(args) > 1 and isinstance( - args[1], (iris.cube.Cube, iris.coords.Coord) - ): - xlabel, ylabel, title = _get_titles(*args[:2]) - else: - xlabel, ylabel, title = _get_titles(None, args[0]) + u_obj, v_obj, _, _, _ = iplt._get_plot_objects(args) + xlabel, ylabel, title = _get_titles(u_obj, v_obj) axes = kwargs.pop("axes", None) diff --git a/lib/iris/tests/idiff.py b/lib/iris/tests/idiff.py index e45d8a709ed..84a966624ff 100755 --- a/lib/iris/tests/idiff.py +++ b/lib/iris/tests/idiff.py @@ -220,7 +220,9 @@ def step_over_diffs(result_dir, action, display=True): count = len(results) for count_index, result_fname in enumerate(results): - key = os.path.splitext("-".join(result_fname.split("-")[1:]))[0] + key = os.path.splitext( + "-".join(result_fname.split("result-")[1:]) + )[0] try: # Calculate the test result perceptual image hash. phash = imagehash.phash( diff --git a/lib/iris/tests/results/analysis/sqrt.cml b/lib/iris/tests/results/analysis/sqrt.cml index 0dd0fe20b3a..c6b9b88e9a0 100644 --- a/lib/iris/tests/results/analysis/sqrt.cml +++ b/lib/iris/tests/results/analysis/sqrt.cml @@ -1,6 +1,6 @@ - + @@ -39,6 +39,6 @@ - + diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index f9430ae9f58..6c2bf66ba6a 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -288,9 +288,7 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/8ff897066a01f0f2f818ee1eb007ca41853e3b81c57e36a991fe2ca9725e29ed.png" ], "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffac1547a0792546c179db7f1254f6d945b7392841678e895017e3e91c17a0f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c1fa7a05b4ea6c059d2ff1494e4b90f26304846d78d1872a6cfc938b2e3e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c1fa7a05b4fa6c059d2ef1494e4b90f26304847d78c1872a6cfc938b2e3e.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" ], "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_coord.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7e0098757103a71ce4506dc3d11e7b20d2477ec094857db895217f6a.png", @@ -323,8 +321,7 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/87ffb79e7f0060d8303fcd1eb007d801c52699e18d769e2199e60ce1da5629ed.png" ], "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc1dc7e00b0dc66179d95f127cfc9d44959ba846658e891075a3e99415a2f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1d87e00b49964179d28f16bce4b98724b268c6d58e1972e4874998b2e7e.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" ], "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_coord.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7f90987720029f1ef458cd43811cdb60d647de609485ddb899215f62.png", @@ -649,15 +646,10 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe956b7c01c2f26300929dfc1e3c6690736f91817e3b0c84be6be5d1603ed1.png" ], "iris.tests.test_plot.TestPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896266f068d873b83cb71e435725cd07c607ad07e70fcd0007a7881fe7ab8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896066f068d873b83cb71e435725cd07c607ad07c70fcd0007af881fe7bb8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe896366f0f8d93398bcb71e435f24ed074646ed07670acf010726d81f2798c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aff8946c7a14c99fb193d263e42432d8d00c2d27944a3f8dc5223ef703ff6b90.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/8ff99c067e01e7166101c9c6b04396b5cd4e2f0993163de9c4fe7b79207e36a1.png" ], "iris.tests.test_plot.TestPlot.test_z.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffac1547a0792546c179db7f1254f6d945b7392841678e895017e3e91c17a0f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c1fa7a05b4ea6c059d2ff1494e4b90f26304846d78d1872a6cfc938b2e3e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c1fa7a05b4fa6c059d2ef1494e4b90f26304847d78c1872a6cfc938b2e3e.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" ], "iris.tests.test_plot.TestPlotCitation.test.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/abf895067a1d9506f811783585437abd85426ab995067af9f00687f96afe87c8.png", @@ -836,14 +828,10 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1.png" ], "iris.tests.test_plot.TestQuickplotPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ffb6067f008d87339bc973e435d86ef034c87ad07c586cd001da69897e5838.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ffb6067f008d87339bc973e435d86ef034c87ad07cd86cd001da68897e58a8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7efb6367f008d97338fc973e435d86ef030c86ed070d86cd030d86d89f0d82c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a2fbb46e7f10c99f2013d863e46498dcd06c0d2798421fa5dd221e7789ff6f10.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/a3f9bc067e01c6166009c9c6b5439ee5cd4e0d2993361de9ccf65b79887636a9.png" ], "iris.tests.test_plot.TestQuickplotPlot.test_z.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc1dc7e00b0dc66179d95f127cfc9d44959ba846658e891075a3e99415a2f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1d87e00b49964179d28f16bce4b98724b268c6d58e1972e4874998b2e7e.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" ], "iris.tests.test_plot.TestSimple.test_bounds.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a85954a957ac17e954ac17a9c3e956ac07e3e80c07f3e857aa5c27d3f80.png" @@ -908,6 +896,9 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/bb433d4e94a4c6b9c15adaadc1fb6a469c8de43a3e07904e5f016b57984e1ea1.png", "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f3f81c07e3fa12da1c27e3f80.png" ], + "iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol.0": [ + "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f3f80c07e3fa12da1c27f3f80.png" + ], "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85857a955ae17e957ec57e7a81855fc17e3a81c57e1a813a85c57a1a05.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80.png" diff --git a/lib/iris/tests/stock_mdi_arrays.npz b/lib/iris/tests/stock_mdi_arrays.npz deleted file mode 100644 index 668c6d8473b..00000000000 Binary files a/lib/iris/tests/stock_mdi_arrays.npz and /dev/null differ diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index a559ee0e8a3..c4d7b51a065 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -537,11 +537,12 @@ def test_multiplication_not_in_place(self): class TestExponentiate(tests.IrisTest): def setUp(self): self.cube = iris.tests.stock.global_pp() - self.cube.data = self.cube.data - 260 + # Increase dtype from float32 to float64 in order + # to avoid dtype quantization errors during maths. + self.cube.data = self.cube.data.astype(np.float64) - 260.0 def test_exponentiate(self): a = self.cube - a.data = a.data.astype(np.float64) e = pow(a, 4) self.assertCMLApproxData(e, ("analysis", "exponentiate.cml")) @@ -553,8 +554,8 @@ def test_square_root(self): e = a ** 0.5 - self.assertCML(e, ("analysis", "sqrt.cml")) self.assertArrayEqual(e.data, a.data ** 0.5) + self.assertCML(e, ("analysis", "sqrt.cml")) self.assertRaises(ValueError, iris.analysis.maths.exponentiate, a, 0.3) def test_type_error(self): @@ -852,7 +853,7 @@ def setUp(self): def test_incompatible_dimensions(self): data3 = ma.MaskedArray( - [[3, 3, 3, 4], [2, 2, 2]], mask=[[0, 1, 0, 0], [0, 1, 1]] + [[3, 3, 3, 4], [2, 2, 2, 2]], mask=[[0, 1, 0, 0], [0, 1, 1, 1]] ) with self.assertRaises(ValueError): # Incompatible dimensions. diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 00ce7b7d44a..1ab39330ef7 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -102,14 +102,14 @@ def last_change_by_fname(): def test_license_headers(self): exclude_patterns = ( "setup.py", + "noxfile.py", "build/*", "dist/*", - "docs/iris/gallery_code/*/*.py", - "docs/iris/src/developers_guide/documenting/*.py", - "docs/iris/src/userguide/plotting_examples/*.py", - "docs/iris/src/userguide/regridding_plots/*.py", - "docs/iris/src/developers_guide/gitwash_dumper.py", - "docs/iris/src/_build/*", + "docs/gallery_code/*/*.py", + "docs/src/developers_guide/documenting/*.py", + "docs/src/userguide/plotting_examples/*.py", + "docs/src/userguide/regridding_plots/*.py", + "docs/src/_build/*", "lib/iris/analysis/_scipy_interpolate.py", "lib/iris/fileformats/_pyke_rules/*", ) diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 75266ff3fe6..2d1b4a53d58 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -543,17 +543,20 @@ def test_noexist_directory(self): pass def test_bad_permissions(self): - # Non-exhaustive check that wrong permissions results in a suitable - # exception being raised. - dir_name = tempfile.mkdtemp() - fnme = os.path.join(dir_name, "tmp.nc") - try: - os.chmod(dir_name, stat.S_IREAD) - with self.assertRaises(IOError): - iris.fileformats.netcdf.Saver(fnme, "NETCDF4") - self.assertFalse(os.path.exists(fnme)) - finally: - os.rmdir(dir_name) + # Skip this test for the root user. This is applicable to + # running within a Docker container and/or CIaaS hosted testing. + if os.getuid(): + # Non-exhaustive check that wrong permissions results in a suitable + # exception being raised. + dir_name = tempfile.mkdtemp() + fname = os.path.join(dir_name, "tmp.nc") + try: + os.chmod(dir_name, stat.S_IREAD) + with self.assertRaises(PermissionError): + iris.fileformats.netcdf.Saver(fname, "NETCDF4") + self.assertFalse(os.path.exists(fname)) + finally: + shutil.rmtree(dir_name) @tests.skip_data diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index cf25324ea77..8abbf48a941 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -201,6 +201,13 @@ def test_pcolormesh(self): self.check_graphic() + def test_pcolormesh_str_symbol(self): + pcube = self._small().copy() + pcube.coords("level_height")[0].units = "centimeters" + qplt.pcolormesh(pcube) + + self.check_graphic() + def test_map(self): cube = self._slice(["grid_latitude", "grid_longitude"]) qplt.contour(cube) diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index 14944891f29..32091c7d639 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -113,6 +113,15 @@ def test_factory_metadata(self): self.assertIsNone(factory.coord_system) self.assertEqual(factory.attributes, {}) + def test_promote_sigma_units_unknown_to_dimensionless(self): + sigma = mock.Mock(units=cf_units.Unit("unknown"), nbounds=0) + factory = HybridPressureFactory( + delta=self.delta, + sigma=sigma, + surface_air_pressure=self.surface_air_pressure, + ) + self.assertEqual("1", factory.dependencies["sigma"].units) + class Test_dependencies(tests.IrisTest): def setUp(self): diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index caf9d303c6c..6e8e40cd1bc 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -137,6 +137,12 @@ def test_depth_incompatible_units(self): with self.assertRaises(ValueError): OceanSFactory(**self.kwargs) + def test_promote_s_units_unknown_to_dimensionless(self): + s = mock.Mock(units=Unit("unknown"), nbounds=0) + self.kwargs["s"] = s + factory = OceanSFactory(**self.kwargs) + self.assertEqual("1", factory.dependencies["s"].units) + class Test_dependencies(tests.IrisTest): def setUp(self): diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 99a4fe17327..238df2f0737 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -121,6 +121,15 @@ def test_depth_incompatible_units(self): with self.assertRaises(ValueError): OceanSg1Factory(**self.kwargs) + def test_promote_c_and_s_units_unknown_to_dimensionless(self): + c = mock.Mock(units=Unit("unknown"), nbounds=0) + s = mock.Mock(units=Unit("unknown"), nbounds=0) + self.kwargs["c"] = c + self.kwargs["s"] = s + factory = OceanSg1Factory(**self.kwargs) + self.assertEqual("1", factory.dependencies["c"].units) + self.assertEqual("1", factory.dependencies["s"].units) + class Test_dependencies(tests.IrisTest): def setUp(self): diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index 387f0e48d13..fb3ada382e7 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -121,6 +121,15 @@ def test_depth_incompatible_units(self): with self.assertRaises(ValueError): OceanSg2Factory(**self.kwargs) + def test_promote_c_and_s_units_unknown_to_dimensionless(self): + c = mock.Mock(units=Unit("unknown"), nbounds=0) + s = mock.Mock(units=Unit("unknown"), nbounds=0) + self.kwargs["c"] = c + self.kwargs["s"] = s + factory = OceanSg2Factory(**self.kwargs) + self.assertEqual("1", factory.dependencies["c"].units) + self.assertEqual("1", factory.dependencies["s"].units) + class Test_dependencies(tests.IrisTest): def setUp(self): diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index 07c970ad7ed..69a8a32c6e3 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -59,6 +59,12 @@ def test_depth_incompatible_units(self): with self.assertRaises(ValueError): OceanSigmaFactory(**self.kwargs) + def test_promote_sigma_units_unknown_to_dimensionless(self): + sigma = mock.Mock(units=Unit("unknown"), nbounds=0) + self.kwargs["sigma"] = sigma + factory = OceanSigmaFactory(**self.kwargs) + self.assertEqual("1", factory.dependencies["sigma"].units) + class Test_dependencies(tests.IrisTest): def setUp(self): diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index 6f1e8cd57a1..4a4e30b9ca8 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -138,6 +138,12 @@ def test_depth_incompatible_units(self): with self.assertRaises(ValueError): OceanSigmaZFactory(**self.kwargs) + def test_promote_sigma_units_unknown_to_dimensionless(self): + sigma = mock.Mock(units=Unit("unknown"), nbounds=0) + self.kwargs["sigma"] = sigma + factory = OceanSigmaZFactory(**self.kwargs) + self.assertEqual("1", factory.dependencies["sigma"].units) + class Test_dependencies(tests.IrisTest): def setUp(self): diff --git a/lib/iris/tests/unit/common/resolve/__init__.py b/lib/iris/tests/unit/common/resolve/__init__.py new file mode 100644 index 00000000000..d0b189e59d3 --- /dev/null +++ b/lib/iris/tests/unit/common/resolve/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :mod:`iris.common.resolve` package.""" diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py new file mode 100644 index 00000000000..94ec48de884 --- /dev/null +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -0,0 +1,4795 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.resolve.Resolve`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from collections import namedtuple +from copy import deepcopy + +from cf_units import Unit +import numpy as np +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import LENIENT +from iris.common.metadata import CubeMetadata +from iris.common.resolve import ( + Resolve, + _AuxCoverage, + _CategoryItems, + _DimCoverage, + _Item, + _PreparedItem, + _PreparedFactory, + _PreparedMetadata, +) +from iris.coords import DimCoord +from iris.cube import Cube + + +class Test___init__(tests.IrisTest): + def setUp(self): + target = "iris.common.resolve.Resolve.__call__" + self.m_call = mock.MagicMock(return_value=sentinel.return_value) + _ = self.patch(target, new=self.m_call) + + def _assert_members_none(self, resolve): + self.assertIsNone(resolve.lhs_cube_resolved) + self.assertIsNone(resolve.rhs_cube_resolved) + self.assertIsNone(resolve.lhs_cube_category) + self.assertIsNone(resolve.rhs_cube_category) + self.assertIsNone(resolve.lhs_cube_category_local) + self.assertIsNone(resolve.rhs_cube_category_local) + self.assertIsNone(resolve.category_common) + self.assertIsNone(resolve.lhs_cube_dim_coverage) + self.assertIsNone(resolve.lhs_cube_aux_coverage) + self.assertIsNone(resolve.rhs_cube_dim_coverage) + self.assertIsNone(resolve.rhs_cube_aux_coverage) + self.assertIsNone(resolve.map_rhs_to_lhs) + self.assertIsNone(resolve.mapping) + self.assertIsNone(resolve.prepared_category) + self.assertIsNone(resolve.prepared_factories) + self.assertIsNone(resolve._broadcast_shape) + + def test_lhs_rhs_default(self): + resolve = Resolve() + self.assertIsNone(resolve.lhs_cube) + self.assertIsNone(resolve.rhs_cube) + self._assert_members_none(resolve) + self.assertEqual(0, self.m_call.call_count) + + def test_lhs_rhs_provided(self): + m_lhs = sentinel.lhs + m_rhs = sentinel.rhs + resolve = Resolve(lhs=m_lhs, rhs=m_rhs) + # The lhs_cube and rhs_cube are only None due + # to __call__ being mocked. See Test___call__ + # for appropriate test coverage. + self.assertIsNone(resolve.lhs_cube) + self.assertIsNone(resolve.rhs_cube) + self._assert_members_none(resolve) + self.assertEqual(1, self.m_call.call_count) + call_args = mock.call(m_lhs, m_rhs) + self.assertEqual(call_args, self.m_call.call_args) + + +class Test___call__(tests.IrisTest): + def setUp(self): + self.m_lhs = mock.MagicMock(spec=Cube) + self.m_rhs = mock.MagicMock(spec=Cube) + target = "iris.common.resolve.Resolve.{method}" + method = target.format(method="_metadata_resolve") + self.m_metadata_resolve = self.patch(method) + method = target.format(method="_metadata_coverage") + self.m_metadata_coverage = self.patch(method) + method = target.format(method="_metadata_mapping") + self.m_metadata_mapping = self.patch(method) + method = target.format(method="_metadata_prepare") + self.m_metadata_prepare = self.patch(method) + + def test_lhs_not_cube(self): + emsg = "'LHS' argument to be a 'Cube'" + with self.assertRaisesRegex(TypeError, emsg): + _ = Resolve(rhs=self.m_rhs) + + def test_rhs_not_cube(self): + emsg = "'RHS' argument to be a 'Cube'" + with self.assertRaisesRegex(TypeError, emsg): + _ = Resolve(lhs=self.m_lhs) + + def _assert_called_metadata_methods(self): + call_args = mock.call() + self.assertEqual(1, self.m_metadata_resolve.call_count) + self.assertEqual(call_args, self.m_metadata_resolve.call_args) + self.assertEqual(1, self.m_metadata_coverage.call_count) + self.assertEqual(call_args, self.m_metadata_coverage.call_args) + self.assertEqual(1, self.m_metadata_mapping.call_count) + self.assertEqual(call_args, self.m_metadata_mapping.call_args) + self.assertEqual(1, self.m_metadata_prepare.call_count) + self.assertEqual(call_args, self.m_metadata_prepare.call_args) + + def test_map_rhs_to_lhs__less_than(self): + self.m_lhs.ndim = 2 + self.m_rhs.ndim = 1 + resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) + self.assertEqual(self.m_lhs, resolve.lhs_cube) + self.assertEqual(self.m_rhs, resolve.rhs_cube) + self.assertTrue(resolve.map_rhs_to_lhs) + self._assert_called_metadata_methods() + + def test_map_rhs_to_lhs__equal(self): + self.m_lhs.ndim = 2 + self.m_rhs.ndim = 2 + resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) + self.assertEqual(self.m_lhs, resolve.lhs_cube) + self.assertEqual(self.m_rhs, resolve.rhs_cube) + self.assertTrue(resolve.map_rhs_to_lhs) + self._assert_called_metadata_methods() + + def test_map_lhs_to_rhs(self): + self.m_lhs.ndim = 2 + self.m_rhs.ndim = 3 + resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) + self.assertEqual(self.m_lhs, resolve.lhs_cube) + self.assertEqual(self.m_rhs, resolve.rhs_cube) + self.assertFalse(resolve.map_rhs_to_lhs) + self._assert_called_metadata_methods() + + +class Test__categorise_items(tests.IrisTest): + def setUp(self): + self.coord_dims = {} + # configure dim coords + coord = mock.Mock(metadata=sentinel.dim_metadata1) + self.dim_coords = [coord] + self.coord_dims[coord] = sentinel.dims1 + # configure aux and scalar coords + self.aux_coords = [] + pairs = [ + (sentinel.aux_metadata2, sentinel.dims2), + (sentinel.aux_metadata3, sentinel.dims3), + (sentinel.scalar_metadata4, None), + (sentinel.scalar_metadata5, None), + (sentinel.scalar_metadata6, None), + ] + for metadata, dims in pairs: + coord = mock.Mock(metadata=metadata) + self.aux_coords.append(coord) + self.coord_dims[coord] = dims + func = lambda coord: self.coord_dims[coord] + self.cube = mock.Mock( + aux_coords=self.aux_coords, + dim_coords=self.dim_coords, + coord_dims=func, + ) + + def test(self): + result = Resolve._categorise_items(self.cube) + self.assertIsInstance(result, _CategoryItems) + self.assertEqual(1, len(result.items_dim)) + # check dim coords + for item in result.items_dim: + self.assertIsInstance(item, _Item) + (coord,) = self.dim_coords + dims = self.coord_dims[coord] + expected = [_Item(metadata=coord.metadata, coord=coord, dims=dims)] + self.assertEqual(expected, result.items_dim) + # check aux coords + self.assertEqual(2, len(result.items_aux)) + for item in result.items_aux: + self.assertIsInstance(item, _Item) + expected_aux, expected_scalar = [], [] + for coord in self.aux_coords: + dims = self.coord_dims[coord] + item = _Item(metadata=coord.metadata, coord=coord, dims=dims) + if dims: + expected_aux.append(item) + else: + expected_scalar.append(item) + self.assertEqual(expected_aux, result.items_aux) + # check scalar coords + self.assertEqual(3, len(result.items_scalar)) + for item in result.items_scalar: + self.assertIsInstance(item, _Item) + self.assertEqual(expected_scalar, result.items_scalar) + + +class Test__metadata_resolve(tests.IrisTest): + def setUp(self): + self.target = "iris.common.resolve.Resolve._categorise_items" + self.m_lhs_cube = sentinel.lhs_cube + self.m_rhs_cube = sentinel.rhs_cube + + @staticmethod + def _create_items(pairs): + # this wrapper (hack) is necessary in order to support mocking + # the "name" method (callable) of the metadata, as "name" is already + # part of the mock API - this is always troublesome in mock-world. + Wrapper = namedtuple("Wrapper", ("name", "value")) + result = [] + for name, dims in pairs: + metadata = Wrapper(name=lambda: str(name), value=name) + coord = mock.Mock(metadata=metadata) + item = _Item(metadata=metadata, coord=coord, dims=dims) + result.append(item) + return result + + def test_metadata_same(self): + category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) + # configure dim coords + pairs = [(sentinel.dim_metadata1, sentinel.dims1)] + category.items_dim.extend(self._create_items(pairs)) + # configure aux coords + pairs = [ + (sentinel.aux_metadata1, sentinel.dims2), + (sentinel.aux_metadata2, sentinel.dims3), + ] + category.items_aux.extend(self._create_items(pairs)) + # configure scalar coords + pairs = [ + (sentinel.scalar_metadata1, None), + (sentinel.scalar_metadata2, None), + (sentinel.scalar_metadata3, None), + ] + category.items_scalar.extend(self._create_items(pairs)) + + side_effect = (category, category) + mocker = self.patch(self.target, side_effect=side_effect) + + resolve = Resolve() + self.assertIsNone(resolve.lhs_cube) + self.assertIsNone(resolve.rhs_cube) + self.assertIsNone(resolve.lhs_cube_category) + self.assertIsNone(resolve.rhs_cube_category) + self.assertIsNone(resolve.lhs_cube_category_local) + self.assertIsNone(resolve.rhs_cube_category_local) + self.assertIsNone(resolve.category_common) + + # require to explicitly configure cubes + resolve.lhs_cube = self.m_lhs_cube + resolve.rhs_cube = self.m_rhs_cube + resolve._metadata_resolve() + + self.assertEqual(mocker.call_count, 2) + calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] + self.assertEqual(calls, mocker.call_args_list) + + self.assertEqual(category, resolve.lhs_cube_category) + self.assertEqual(category, resolve.rhs_cube_category) + expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) + self.assertEqual(expected, resolve.lhs_cube_category_local) + self.assertEqual(expected, resolve.rhs_cube_category_local) + self.assertEqual(category, resolve.category_common) + + def test_metadata_overlap(self): + # configure the lhs cube category + category_lhs = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # configure dim coords + pairs = [ + (sentinel.dim_metadata1, sentinel.dims1), + (sentinel.dim_metadata2, sentinel.dims2), + ] + category_lhs.items_dim.extend(self._create_items(pairs)) + # configure aux coords + pairs = [ + (sentinel.aux_metadata1, sentinel.dims3), + (sentinel.aux_metadata2, sentinel.dims4), + ] + category_lhs.items_aux.extend(self._create_items(pairs)) + # configure scalar coords + pairs = [ + (sentinel.scalar_metadata1, None), + (sentinel.scalar_metadata2, None), + ] + category_lhs.items_scalar.extend(self._create_items(pairs)) + + # configure the rhs cube category + category_rhs = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # configure dim coords + category_rhs.items_dim.append(category_lhs.items_dim[0]) + pairs = [(sentinel.dim_metadata200, sentinel.dims2)] + category_rhs.items_dim.extend(self._create_items(pairs)) + # configure aux coords + category_rhs.items_aux.append(category_lhs.items_aux[0]) + pairs = [(sentinel.aux_metadata200, sentinel.dims4)] + category_rhs.items_aux.extend(self._create_items(pairs)) + # configure scalar coords + category_rhs.items_scalar.append(category_lhs.items_scalar[0]) + pairs = [(sentinel.scalar_metadata200, None)] + category_rhs.items_scalar.extend(self._create_items(pairs)) + + side_effect = (category_lhs, category_rhs) + mocker = self.patch(self.target, side_effect=side_effect) + + resolve = Resolve() + self.assertIsNone(resolve.lhs_cube) + self.assertIsNone(resolve.rhs_cube) + self.assertIsNone(resolve.lhs_cube_category) + self.assertIsNone(resolve.rhs_cube_category) + self.assertIsNone(resolve.lhs_cube_category_local) + self.assertIsNone(resolve.rhs_cube_category_local) + self.assertIsNone(resolve.category_common) + + # require to explicitly configure cubes + resolve.lhs_cube = self.m_lhs_cube + resolve.rhs_cube = self.m_rhs_cube + resolve._metadata_resolve() + + self.assertEqual(2, mocker.call_count) + calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] + self.assertEqual(calls, mocker.call_args_list) + + self.assertEqual(category_lhs, resolve.lhs_cube_category) + self.assertEqual(category_rhs, resolve.rhs_cube_category) + + items_dim = [category_lhs.items_dim[1]] + items_aux = [category_lhs.items_aux[1]] + items_scalar = [category_lhs.items_scalar[1]] + expected = _CategoryItems( + items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar + ) + self.assertEqual(expected, resolve.lhs_cube_category_local) + + items_dim = [category_rhs.items_dim[1]] + items_aux = [category_rhs.items_aux[1]] + items_scalar = [category_rhs.items_scalar[1]] + expected = _CategoryItems( + items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar + ) + self.assertEqual(expected, resolve.rhs_cube_category_local) + + items_dim = [category_lhs.items_dim[0]] + items_aux = [category_lhs.items_aux[0]] + items_scalar = [category_lhs.items_scalar[0]] + expected = _CategoryItems( + items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar + ) + self.assertEqual(expected, resolve.category_common) + + def test_metadata_different(self): + # configure the lhs cube category + category_lhs = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # configure dim coords + pairs = [ + (sentinel.dim_metadata1, sentinel.dims1), + (sentinel.dim_metadata2, sentinel.dims2), + ] + category_lhs.items_dim.extend(self._create_items(pairs)) + # configure aux coords + pairs = [ + (sentinel.aux_metadata1, sentinel.dims3), + (sentinel.aux_metadata2, sentinel.dims4), + ] + category_lhs.items_aux.extend(self._create_items(pairs)) + # configure scalar coords + pairs = [ + (sentinel.scalar_metadata1, None), + (sentinel.scalar_metadata2, None), + ] + category_lhs.items_scalar.extend(self._create_items(pairs)) + + # configure the rhs cube category + category_rhs = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # configure dim coords + pairs = [ + (sentinel.dim_metadata100, sentinel.dims1), + (sentinel.dim_metadata200, sentinel.dims2), + ] + category_rhs.items_dim.extend(self._create_items(pairs)) + # configure aux coords + pairs = [ + (sentinel.aux_metadata100, sentinel.dims3), + (sentinel.aux_metadata200, sentinel.dims4), + ] + category_rhs.items_aux.extend(self._create_items(pairs)) + # configure scalar coords + pairs = [ + (sentinel.scalar_metadata100, None), + (sentinel.scalar_metadata200, None), + ] + category_rhs.items_scalar.extend(self._create_items(pairs)) + + side_effect = (category_lhs, category_rhs) + mocker = self.patch(self.target, side_effect=side_effect) + + resolve = Resolve() + self.assertIsNone(resolve.lhs_cube) + self.assertIsNone(resolve.rhs_cube) + self.assertIsNone(resolve.lhs_cube_category) + self.assertIsNone(resolve.rhs_cube_category) + self.assertIsNone(resolve.lhs_cube_category_local) + self.assertIsNone(resolve.rhs_cube_category_local) + self.assertIsNone(resolve.category_common) + + # first require to explicitly lhs/rhs configure cubes + resolve.lhs_cube = self.m_lhs_cube + resolve.rhs_cube = self.m_rhs_cube + resolve._metadata_resolve() + + self.assertEqual(2, mocker.call_count) + calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] + self.assertEqual(calls, mocker.call_args_list) + + self.assertEqual(category_lhs, resolve.lhs_cube_category) + self.assertEqual(category_rhs, resolve.rhs_cube_category) + self.assertEqual(category_lhs, resolve.lhs_cube_category_local) + self.assertEqual(category_rhs, resolve.rhs_cube_category_local) + expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) + self.assertEqual(expected, resolve.category_common) + + +class Test__dim_coverage(tests.IrisTest): + def setUp(self): + self.ndim = 4 + self.cube = mock.Mock(ndim=self.ndim) + self.items = [] + parts = [ + (sentinel.metadata0, sentinel.coord0, (0,)), + (sentinel.metadata1, sentinel.coord1, (1,)), + (sentinel.metadata2, sentinel.coord2, (2,)), + (sentinel.metadata3, sentinel.coord3, (3,)), + ] + column_parts = [x for x in zip(*parts)] + self.metadata, self.coords, self.dims = [list(x) for x in column_parts] + self.dims = [dim for dim, in self.dims] + for metadata, coord, dims in parts: + item = _Item(metadata=metadata, coord=coord, dims=dims) + self.items.append(item) + + def test_coverage_no_local_no_common_all_free(self): + items = [] + common = [] + result = Resolve._dim_coverage(self.cube, items, common) + self.assertIsInstance(result, _DimCoverage) + self.assertEqual(self.cube, result.cube) + expected = [None] * self.ndim + self.assertEqual(expected, result.metadata) + self.assertEqual(expected, result.coords) + self.assertEqual([], result.dims_common) + self.assertEqual([], result.dims_local) + expected = list(range(self.ndim)) + self.assertEqual(expected, result.dims_free) + + def test_coverage_all_local_no_common_no_free(self): + common = [] + result = Resolve._dim_coverage(self.cube, self.items, common) + self.assertIsInstance(result, _DimCoverage) + self.assertEqual(self.cube, result.cube) + self.assertEqual(self.metadata, result.metadata) + self.assertEqual(self.coords, result.coords) + self.assertEqual([], result.dims_common) + self.assertEqual(self.dims, result.dims_local) + self.assertEqual([], result.dims_free) + + def test_coverage_no_local_all_common_no_free(self): + result = Resolve._dim_coverage(self.cube, self.items, self.metadata) + self.assertIsInstance(result, _DimCoverage) + self.assertEqual(self.cube, result.cube) + self.assertEqual(self.metadata, result.metadata) + self.assertEqual(self.coords, result.coords) + self.assertEqual(self.dims, result.dims_common) + self.assertEqual([], result.dims_local) + self.assertEqual([], result.dims_free) + + def test_coverage_mixed(self): + common = [self.items[1].metadata, self.items[2].metadata] + self.items.pop(0) + self.items.pop(-1) + metadata, coord, dims = sentinel.metadata100, sentinel.coord100, (0,) + self.items.append(_Item(metadata=metadata, coord=coord, dims=dims)) + result = Resolve._dim_coverage(self.cube, self.items, common) + self.assertIsInstance(result, _DimCoverage) + self.assertEqual(self.cube, result.cube) + expected = [ + metadata, + self.items[0].metadata, + self.items[1].metadata, + None, + ] + self.assertEqual(expected, result.metadata) + expected = [coord, self.items[0].coord, self.items[1].coord, None] + self.assertEqual(expected, result.coords) + self.assertEqual([1, 2], result.dims_common) + self.assertEqual([0], result.dims_local) + self.assertEqual([3], result.dims_free) + + +class Test__aux_coverage(tests.IrisTest): + def setUp(self): + self.ndim = 4 + self.cube = mock.Mock(ndim=self.ndim) + # configure aux coords + self.items_aux = [] + aux_parts = [ + (sentinel.aux_metadata0, sentinel.aux_coord0, (0,)), + (sentinel.aux_metadata1, sentinel.aux_coord1, (1,)), + (sentinel.aux_metadata23, sentinel.aux_coord23, (2, 3)), + ] + column_aux_parts = [x for x in zip(*aux_parts)] + self.aux_metadata, self.aux_coords, self.aux_dims = [ + list(x) for x in column_aux_parts + ] + for metadata, coord, dims in aux_parts: + item = _Item(metadata=metadata, coord=coord, dims=dims) + self.items_aux.append(item) + # configure scalar coords + self.items_scalar = [] + scalar_parts = [ + (sentinel.scalar_metadata0, sentinel.scalar_coord0, ()), + (sentinel.scalar_metadata1, sentinel.scalar_coord1, ()), + (sentinel.scalar_metadata2, sentinel.scalar_coord2, ()), + ] + column_scalar_parts = [x for x in zip(*scalar_parts)] + self.scalar_metadata, self.scalar_coords, self.scalar_dims = [ + list(x) for x in column_scalar_parts + ] + for metadata, coord, dims in scalar_parts: + item = _Item(metadata=metadata, coord=coord, dims=dims) + self.items_scalar.append(item) + + def test_coverage_no_local_no_common_all_free(self): + items_aux, items_scalar = [], [] + common_aux, common_scalar = [], [] + result = Resolve._aux_coverage( + self.cube, items_aux, items_scalar, common_aux, common_scalar + ) + self.assertIsInstance(result, _AuxCoverage) + self.assertEqual(self.cube, result.cube) + self.assertEqual([], result.common_items_aux) + self.assertEqual([], result.common_items_scalar) + self.assertEqual([], result.local_items_aux) + self.assertEqual([], result.local_items_scalar) + self.assertEqual([], result.dims_common) + self.assertEqual([], result.dims_local) + expected = list(range(self.ndim)) + self.assertEqual(expected, result.dims_free) + + def test_coverage_all_local_no_common_no_free(self): + common_aux, common_scalar = [], [] + result = Resolve._aux_coverage( + self.cube, + self.items_aux, + self.items_scalar, + common_aux, + common_scalar, + ) + self.assertIsInstance(result, _AuxCoverage) + self.assertEqual(self.cube, result.cube) + expected = [] + self.assertEqual(expected, result.common_items_aux) + self.assertEqual(expected, result.common_items_scalar) + self.assertEqual(self.items_aux, result.local_items_aux) + self.assertEqual(self.items_scalar, result.local_items_scalar) + self.assertEqual([], result.dims_common) + expected = list(range(self.ndim)) + self.assertEqual(expected, result.dims_local) + self.assertEqual([], result.dims_free) + + def test_coverage_no_local_all_common_no_free(self): + result = Resolve._aux_coverage( + self.cube, + self.items_aux, + self.items_scalar, + self.aux_metadata, + self.scalar_metadata, + ) + self.assertIsInstance(result, _AuxCoverage) + self.assertEqual(self.cube, result.cube) + self.assertEqual(self.items_aux, result.common_items_aux) + self.assertEqual(self.items_scalar, result.common_items_scalar) + self.assertEqual([], result.local_items_aux) + self.assertEqual([], result.local_items_scalar) + expected = list(range(self.ndim)) + self.assertEqual(expected, result.dims_common) + self.assertEqual([], result.dims_local) + self.assertEqual([], result.dims_free) + + def test_coverage_mixed(self): + common_aux = [self.items_aux[-1].metadata] + common_scalar = [self.items_scalar[1].metadata] + self.items_aux.pop(1) + result = Resolve._aux_coverage( + self.cube, + self.items_aux, + self.items_scalar, + common_aux, + common_scalar, + ) + self.assertIsInstance(result, _AuxCoverage) + self.assertEqual(self.cube, result.cube) + expected = [self.items_aux[-1]] + self.assertEqual(expected, result.common_items_aux) + expected = [self.items_scalar[1]] + self.assertEqual(expected, result.common_items_scalar) + expected = [self.items_aux[0]] + self.assertEqual(expected, result.local_items_aux) + expected = [self.items_scalar[0], self.items_scalar[2]] + self.assertEqual(expected, result.local_items_scalar) + self.assertEqual([2, 3], result.dims_common) + self.assertEqual([0], result.dims_local) + self.assertEqual([1], result.dims_free) + + +class Test__metadata_coverage(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + self.m_lhs_cube = sentinel.lhs_cube + self.resolve.lhs_cube = self.m_lhs_cube + self.m_rhs_cube = sentinel.rhs_cube + self.resolve.rhs_cube = self.m_rhs_cube + self.m_items_dim_metadata = sentinel.items_dim_metadata + self.m_items_aux_metadata = sentinel.items_aux_metadata + self.m_items_scalar_metadata = sentinel.items_scalar_metadata + items_dim = [mock.Mock(metadata=self.m_items_dim_metadata)] + items_aux = [mock.Mock(metadata=self.m_items_aux_metadata)] + items_scalar = [mock.Mock(metadata=self.m_items_scalar_metadata)] + category = _CategoryItems( + items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar + ) + self.resolve.category_common = category + self.m_items_dim = sentinel.items_dim + self.m_items_aux = sentinel.items_aux + self.m_items_scalar = sentinel.items_scalar + category = _CategoryItems( + items_dim=self.m_items_dim, + items_aux=self.m_items_aux, + items_scalar=self.m_items_scalar, + ) + self.resolve.lhs_cube_category = category + self.resolve.rhs_cube_category = category + target = "iris.common.resolve.Resolve._dim_coverage" + self.m_lhs_cube_dim_coverage = sentinel.lhs_cube_dim_coverage + self.m_rhs_cube_dim_coverage = sentinel.rhs_cube_dim_coverage + side_effect = ( + self.m_lhs_cube_dim_coverage, + self.m_rhs_cube_dim_coverage, + ) + self.mocker_dim_coverage = self.patch(target, side_effect=side_effect) + target = "iris.common.resolve.Resolve._aux_coverage" + self.m_lhs_cube_aux_coverage = sentinel.lhs_cube_aux_coverage + self.m_rhs_cube_aux_coverage = sentinel.rhs_cube_aux_coverage + side_effect = ( + self.m_lhs_cube_aux_coverage, + self.m_rhs_cube_aux_coverage, + ) + self.mocker_aux_coverage = self.patch(target, side_effect=side_effect) + + def test(self): + self.resolve._metadata_coverage() + self.assertEqual(2, self.mocker_dim_coverage.call_count) + calls = [ + mock.call( + self.m_lhs_cube, self.m_items_dim, [self.m_items_dim_metadata] + ), + mock.call( + self.m_rhs_cube, self.m_items_dim, [self.m_items_dim_metadata] + ), + ] + self.assertEqual(calls, self.mocker_dim_coverage.call_args_list) + self.assertEqual(2, self.mocker_aux_coverage.call_count) + calls = [ + mock.call( + self.m_lhs_cube, + self.m_items_aux, + self.m_items_scalar, + [self.m_items_aux_metadata], + [self.m_items_scalar_metadata], + ), + mock.call( + self.m_rhs_cube, + self.m_items_aux, + self.m_items_scalar, + [self.m_items_aux_metadata], + [self.m_items_scalar_metadata], + ), + ] + self.assertEqual(calls, self.mocker_aux_coverage.call_args_list) + self.assertEqual( + self.m_lhs_cube_dim_coverage, self.resolve.lhs_cube_dim_coverage + ) + self.assertEqual( + self.m_rhs_cube_dim_coverage, self.resolve.rhs_cube_dim_coverage + ) + self.assertEqual( + self.m_lhs_cube_aux_coverage, self.resolve.lhs_cube_aux_coverage + ) + self.assertEqual( + self.m_rhs_cube_aux_coverage, self.resolve.rhs_cube_aux_coverage + ) + + +class Test__dim_mapping(tests.IrisTest): + def setUp(self): + self.ndim = 3 + Wrapper = namedtuple("Wrapper", ("name",)) + cube = Wrapper(name=lambda: sentinel.name) + self.src_coverage = _DimCoverage( + cube=cube, + metadata=[], + coords=None, + dims_common=None, + dims_local=None, + dims_free=None, + ) + self.tgt_coverage = _DimCoverage( + cube=cube, + metadata=[], + coords=None, + dims_common=[], + dims_local=None, + dims_free=None, + ) + self.metadata = [ + sentinel.metadata_0, + sentinel.metadata_1, + sentinel.metadata_2, + ] + self.dummy = [sentinel.dummy_0, sentinel.dummy_1, sentinel.dummy_2] + + def test_no_mapping(self): + self.src_coverage.metadata.extend(self.metadata) + self.tgt_coverage.metadata.extend(self.dummy) + result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) + self.assertEqual(dict(), result) + + def test_full_mapping(self): + self.src_coverage.metadata.extend(self.metadata) + self.tgt_coverage.metadata.extend(self.metadata) + dims_common = list(range(self.ndim)) + self.tgt_coverage.dims_common.extend(dims_common) + result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 0, 1: 1, 2: 2} + self.assertEqual(expected, result) + + def test_transpose_mapping(self): + self.src_coverage.metadata.extend(self.metadata[::-1]) + self.tgt_coverage.metadata.extend(self.metadata) + dims_common = list(range(self.ndim)) + self.tgt_coverage.dims_common.extend(dims_common) + result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 2, 1: 1, 2: 0} + self.assertEqual(expected, result) + + def test_partial_mapping__transposed(self): + self.src_coverage.metadata.extend(self.metadata) + self.metadata[1] = sentinel.nope + self.tgt_coverage.metadata.extend(self.metadata[::-1]) + dims_common = [0, 2] + self.tgt_coverage.dims_common.extend(dims_common) + result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 2, 2: 0} + self.assertEqual(expected, result) + + def test_bad_metadata_mapping(self): + self.src_coverage.metadata.extend(self.metadata) + self.metadata[0] = sentinel.bad + self.tgt_coverage.metadata.extend(self.metadata) + dims_common = [0] + self.tgt_coverage.dims_common.extend(dims_common) + emsg = "Failed to map common dim coordinate metadata" + with self.assertRaisesRegex(ValueError, emsg): + _ = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) + + +class Test__aux_mapping(tests.IrisTest): + def setUp(self): + self.ndim = 3 + Wrapper = namedtuple("Wrapper", ("name",)) + cube = Wrapper(name=lambda: sentinel.name) + self.src_coverage = _AuxCoverage( + cube=cube, + common_items_aux=[], + common_items_scalar=None, + local_items_aux=None, + local_items_scalar=None, + dims_common=None, + dims_local=None, + dims_free=None, + ) + self.tgt_coverage = _AuxCoverage( + cube=cube, + common_items_aux=[], + common_items_scalar=None, + local_items_aux=None, + local_items_scalar=None, + dims_common=None, + dims_local=None, + dims_free=None, + ) + self.items = [ + _Item( + metadata=sentinel.metadata0, coord=sentinel.coord0, dims=[0] + ), + _Item( + metadata=sentinel.metadata1, coord=sentinel.coord1, dims=[1] + ), + _Item( + metadata=sentinel.metadata2, coord=sentinel.coord2, dims=[2] + ), + ] + + def _copy(self, items): + # Due to a bug in python 3.6.x, performing a deepcopy of a mock.sentinel + # will yield an object that is not equivalent to its parent, so this + # is a work-around until we drop support for python 3.6.x. + import sys + + version = sys.version_info + major, minor = version.major, version.minor + result = deepcopy(items) + if major == 3 and minor <= 6: + for i, item in enumerate(items): + result[i] = result[i]._replace(metadata=item.metadata) + return result + + def test_no_mapping(self): + result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + self.assertEqual(dict(), result) + + def test_full_mapping(self): + self.src_coverage.common_items_aux.extend(self.items) + self.tgt_coverage.common_items_aux.extend(self.items) + result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 0, 1: 1, 2: 2} + self.assertEqual(expected, result) + + def test_transpose_mapping(self): + self.src_coverage.common_items_aux.extend(self.items) + items = self._copy(self.items) + items[0].dims[0] = 2 + items[2].dims[0] = 0 + self.tgt_coverage.common_items_aux.extend(items) + result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 2, 1: 1, 2: 0} + self.assertEqual(expected, result) + + def test_partial_mapping__transposed(self): + _ = self.items.pop(1) + self.src_coverage.common_items_aux.extend(self.items) + items = self._copy(self.items) + items[0].dims[0] = 2 + items[1].dims[0] = 0 + self.tgt_coverage.common_items_aux.extend(items) + result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 2, 2: 0} + self.assertEqual(expected, result) + + def test_mapping__match_multiple_src_metadata(self): + items = self._copy(self.items) + _ = self.items.pop(1) + self.src_coverage.common_items_aux.extend(self.items) + items[1] = items[0] + self.tgt_coverage.common_items_aux.extend(items) + result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 0, 2: 2} + self.assertEqual(expected, result) + + def test_mapping__skip_match_multiple_src_metadata(self): + items = self._copy(self.items) + _ = self.items.pop(1) + self.tgt_coverage.common_items_aux.extend(self.items) + items[1] = items[0]._replace(dims=[1]) + self.src_coverage.common_items_aux.extend(items) + result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + expected = {2: 2} + self.assertEqual(expected, result) + + def test_mapping__skip_different_rank(self): + items = self._copy(self.items) + self.src_coverage.common_items_aux.extend(self.items) + items[2] = items[2]._replace(dims=[1, 2]) + self.tgt_coverage.common_items_aux.extend(items) + result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + expected = {0: 0, 1: 1} + self.assertEqual(expected, result) + + def test_bad_metadata_mapping(self): + self.src_coverage.common_items_aux.extend(self.items) + items = self._copy(self.items) + items[0] = items[0]._replace(metadata=sentinel.bad) + self.tgt_coverage.common_items_aux.extend(items) + emsg = "Failed to map common aux coordinate metadata" + with self.assertRaisesRegex(ValueError, emsg): + _ = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) + + +class Test_mapped(tests.IrisTest): + def test_mapping_none(self): + resolve = Resolve() + self.assertIsNone(resolve.mapping) + self.assertIsNone(resolve.mapped) + + def test_mapped__src_cube_lhs(self): + resolve = Resolve() + lhs = mock.Mock(ndim=2) + rhs = mock.Mock(ndim=3) + resolve.lhs_cube = lhs + resolve.rhs_cube = rhs + resolve.map_rhs_to_lhs = False + resolve.mapping = {0: 0, 1: 1} + self.assertTrue(resolve.mapped) + + def test_mapped__src_cube_rhs(self): + resolve = Resolve() + lhs = mock.Mock(ndim=3) + rhs = mock.Mock(ndim=2) + resolve.lhs_cube = lhs + resolve.rhs_cube = rhs + resolve.map_rhs_to_lhs = True + resolve.mapping = {0: 0, 1: 1} + self.assertTrue(resolve.mapped) + + def test_partial_mapping(self): + resolve = Resolve() + lhs = mock.Mock(ndim=3) + rhs = mock.Mock(ndim=2) + resolve.lhs_cube = lhs + resolve.rhs_cube = rhs + resolve.map_rhs_to_lhs = True + resolve.mapping = {0: 0} + self.assertFalse(resolve.mapped) + + +class Test__free_mapping(tests.IrisTest): + def setUp(self): + self.Cube = namedtuple("Wrapper", ("name", "ndim", "shape")) + self.src_dim_coverage = dict( + cube=None, + metadata=None, + coords=None, + dims_common=None, + dims_local=None, + dims_free=[], + ) + self.tgt_dim_coverage = deepcopy(self.src_dim_coverage) + self.src_aux_coverage = dict( + cube=None, + common_items_aux=None, + common_items_scalar=None, + local_items_aux=None, + local_items_scalar=None, + dims_common=None, + dims_local=None, + dims_free=[], + ) + self.tgt_aux_coverage = deepcopy(self.src_aux_coverage) + self.resolve = Resolve() + self.resolve.map_rhs_to_lhs = True + self.resolve.mapping = {} + + def _make_args(self): + args = dict( + src_dim_coverage=_DimCoverage(**self.src_dim_coverage), + tgt_dim_coverage=_DimCoverage(**self.tgt_dim_coverage), + src_aux_coverage=_AuxCoverage(**self.src_aux_coverage), + tgt_aux_coverage=_AuxCoverage(**self.tgt_aux_coverage), + ) + return args + + def test_mapping_no_dims_free(self): + ndim = 4 + shape = tuple(range(ndim)) + cube = self.Cube(name=lambda: "name", ndim=ndim, shape=shape) + self.src_dim_coverage["cube"] = cube + self.tgt_dim_coverage["cube"] = cube + args = self._make_args() + emsg = "Insufficient matching coordinate metadata" + with self.assertRaisesRegex(ValueError, emsg): + self.resolve._free_mapping(**args) + + def _make_coverage(self, name, shape, dims_free): + if name == "src": + dim_coverage = self.src_dim_coverage + aux_coverage = self.src_aux_coverage + else: + dim_coverage = self.tgt_dim_coverage + aux_coverage = self.tgt_aux_coverage + ndim = len(shape) + cube = self.Cube(name=lambda: name, ndim=ndim, shape=shape) + dim_coverage["cube"] = cube + dim_coverage["dims_free"].extend(dims_free) + aux_coverage["cube"] = cube + aux_coverage["dims_free"].extend(dims_free) + + def test_mapping_src_free_to_tgt_local(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 2 3 4 + # state f l c l state f c f + # coord d d d a coord a d d + # + # src-to-tgt mapping: + # before 1->2 + # after 0->3 1->2 2->1 + src_shape = (2, 3, 4) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 3, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt_local__broadcast_src_first(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 1 3 4 + # state f l c l state f c f + # coord d d d a coord a d d + # bcast ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->3 1->2 2->1 + src_shape = (1, 3, 4) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 3, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt_local__broadcast_src_last(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 2 3 1 + # state f l c l state f c f + # coord d d d a coord a d d + # bcast ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->3 1->2 2->1 + src_shape = (2, 3, 1) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 3, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt_local__broadcast_src_both(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 1 3 1 + # state f l c l state f c f + # coord d d d a coord a d d + # bcast ^ ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->1 1->2 2->3 + src_shape = (1, 3, 1) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 1, 1: 2, 2: 3} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt_free(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 2 3 4 + # state f f c f state f c f + # coord d d d a coord a d d + # + # src-to-tgt mapping: + # before 1->2 + # after 0->0 1->2 2->1 + src_shape = (2, 3, 4) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0, 1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 0, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt_free__broadcast_src_first(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 1 3 4 + # state f f c f state f c f + # coord d d d a coord a d d + # bcast ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->0 1->2 2->1 + src_shape = (1, 3, 4) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0, 1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 0, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt_free__broadcast_src_last(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 2 3 1 + # state f f c f state f c f + # coord d d d a coord a d d + # bcast ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->0 1->2 2->1 + src_shape = (2, 3, 1) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0, 1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 0, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt_free__broadcast_src_both(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 1 3 1 + # state f f c f state f c f + # coord d d d a coord a d d + # bcast ^ ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->0 1->2 2->1 + src_shape = (1, 3, 1) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0, 1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 0, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_src_free_to_tgt__fail(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 2 3 5 + # state f f c f state f c f + # coord d d d a coord a d d + # fail ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->0 1->2 2->? + src_shape = (2, 3, 5) + src_free = [0, 2] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [0, 1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + emsg = "Insufficient matching coordinate metadata to resolve cubes" + with self.assertRaisesRegex(ValueError, emsg): + self.resolve._free_mapping(**args) + + def test_mapping_tgt_free_to_src_local(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: -> src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 2 3 4 + # state l f c f state l c l + # coord d d d a coord a d d + # + # src-to-tgt mapping: + # before 1->2 + # after 0->3 1->2 2->1 + src_shape = (2, 3, 4) + src_free = [] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 2) + tgt_free = [1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 3, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_tgt_free_to_src_local__broadcast_tgt_first(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: -> src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 1 3 2 shape 2 3 4 + # state l f c f state l c l + # coord d d d a coord a d d + # bcast ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->3 1->2 2->1 + src_shape = (2, 3, 4) + src_free = [] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 1, 3, 2) + tgt_free = [1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 3, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_tgt_free_to_src_local__broadcast_tgt_last(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: -> src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 1 shape 2 3 4 + # state l f c f state l c l + # coord d d d a coord a d d + # bcast ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->3 1->2 2->1 + src_shape = (2, 3, 4) + src_free = [] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 1) + tgt_free = [1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 3, 1: 2, 2: 1} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_tgt_free_to_src_local__broadcast_tgt_both(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: -> src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 1 3 1 shape 2 3 4 + # state l f c f state l c l + # coord d d d a coord a d d + # bcast ^ ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->1 1->2 2->3 + src_shape = (2, 3, 4) + src_free = [] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 1, 3, 1) + tgt_free = [1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + self.resolve._free_mapping(**args) + expected = {0: 1, 1: 2, 2: 3} + self.assertEqual(expected, self.resolve.mapping) + + def test_mapping_tgt_free_to_src_no_free__fail(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: -> src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 5 shape 2 3 4 + # state l f c f state l c l + # coord d d d a coord a d d + # fail ^ + # + # src-to-tgt mapping: + # before 1->2 + # after 0->0 1->2 2->? + src_shape = (2, 3, 4) + src_free = [] + self._make_coverage("src", src_shape, src_free) + tgt_shape = (2, 4, 3, 5) + tgt_free = [1, 3] + self._make_coverage("tgt", tgt_shape, tgt_free) + self.resolve.mapping = {1: 2} + args = self._make_args() + emsg = "Insufficient matching coordinate metadata to resolve cubes" + with self.assertRaisesRegex(ValueError, emsg): + self.resolve._free_mapping(**args) + + +class Test__src_cube(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + self.expected = sentinel.cube + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.resolve.rhs_cube = self.expected + self.assertEqual(self.expected, self.resolve._src_cube) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.resolve.lhs_cube = self.expected + self.assertEqual(self.expected, self.resolve._src_cube) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._src_cube + + +class Test__src_cube_position(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.assertEqual("RHS", self.resolve._src_cube_position) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.assertEqual("LHS", self.resolve._src_cube_position) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._src_cube_position + + +class Test__src_cube_resolved__getter(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + self.expected = sentinel.cube + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.resolve.rhs_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve._src_cube_resolved) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.resolve.lhs_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve._src_cube_resolved) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._src_cube_resolved + + +class Test__src_cube_resolved__setter(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + self.expected = sentinel.cube + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.resolve._src_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve.rhs_cube_resolved) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.resolve._src_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve.lhs_cube_resolved) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._src_cube_resolved = self.expected + + +class Test__tgt_cube(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + self.expected = sentinel.cube + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.resolve.rhs_cube = self.expected + self.assertEqual(self.expected, self.resolve._tgt_cube) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.resolve.lhs_cube = self.expected + self.assertEqual(self.expected, self.resolve._tgt_cube) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._tgt_cube + + +class Test__tgt_cube_position(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.assertEqual("RHS", self.resolve._tgt_cube_position) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.assertEqual("LHS", self.resolve._tgt_cube_position) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._tgt_cube_position + + +class Test__tgt_cube_resolved__getter(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + self.expected = sentinel.cube + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.resolve.rhs_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve._tgt_cube_resolved) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.resolve.lhs_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve._tgt_cube_resolved) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._tgt_cube_resolved + + +class Test__tgt_cube_resolved__setter(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + self.expected = sentinel.cube + + def test_rhs_cube(self): + self.resolve.map_rhs_to_lhs = False + self.resolve._tgt_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve.rhs_cube_resolved) + + def test_lhs_cube(self): + self.resolve.map_rhs_to_lhs = True + self.resolve._tgt_cube_resolved = self.expected + self.assertEqual(self.expected, self.resolve.lhs_cube_resolved) + + def test_fail__no_map_rhs_to_lhs(self): + with self.assertRaises(AssertionError): + self.resolve._tgt_cube_resolved = self.expected + + +class Test_shape(tests.IrisTest): + def setUp(self): + self.resolve = Resolve() + + def test_no_shape(self): + self.assertIsNone(self.resolve.shape) + + def test_shape(self): + expected = sentinel.shape + self.resolve._broadcast_shape = expected + self.assertEqual(expected, self.resolve.shape) + + +class Test__as_compatible_cubes(tests.IrisTest): + def setUp(self): + self.Cube = namedtuple( + "Wrapper", + ( + "name", + "ndim", + "shape", + "metadata", + "core_data", + "coord_dims", + "dim_coords", + "aux_coords", + "aux_factories", + ), + ) + self.resolve = Resolve() + self.resolve.map_rhs_to_lhs = True + self.resolve.mapping = {} + self.mocker = self.patch("iris.cube.Cube") + self.args = dict( + name=None, + ndim=None, + shape=None, + metadata=None, + core_data=None, + coord_dims=None, + dim_coords=None, + aux_coords=None, + aux_factories=None, + ) + + def _make_cube(self, name, shape, transpose_shape=None): + self.args["name"] = lambda: name + ndim = len(shape) + self.args["ndim"] = ndim + self.args["shape"] = shape + if name == "src": + self.args["metadata"] = sentinel.metadata + self.reshape = sentinel.reshape + m_reshape = mock.Mock(return_value=self.reshape) + self.transpose = mock.Mock( + shape=transpose_shape, reshape=m_reshape + ) + m_transpose = mock.Mock(return_value=self.transpose) + self.data = mock.Mock( + shape=shape, transpose=m_transpose, reshape=m_reshape + ) + m_copy = mock.Mock(return_value=self.data) + m_core_data = mock.Mock(copy=m_copy) + self.args["core_data"] = mock.Mock(return_value=m_core_data) + self.args["coord_dims"] = mock.Mock(side_effect=([0], [ndim - 1])) + self.dim_coord = sentinel.dim_coord + self.aux_coord = sentinel.aux_coord + self.aux_factory = sentinel.aux_factory + self.args["dim_coords"] = [self.dim_coord] + self.args["aux_coords"] = [self.aux_coord] + self.args["aux_factories"] = [self.aux_factory] + cube = self.Cube(**self.args) + self.resolve.rhs_cube = cube + self.cube = mock.Mock() + self.mocker.return_value = self.cube + else: + cube = self.Cube(**self.args) + self.resolve.lhs_cube = cube + + def test_incomplete_src_to_tgt_mapping__fail(self): + src_shape = (1, 2) + self._make_cube("src", src_shape) + tgt_shape = (3, 4) + self._make_cube("tgt", tgt_shape) + with self.assertRaises(AssertionError): + self.resolve._as_compatible_cubes() + + def test_incompatible_shapes__fail(self): + # key: (state) c=common, f=free + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 2 3 4 shape 2 3 5 + # state f c c c state c c c + # fail ^ fail ^ + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + src_shape = (2, 3, 5) + self._make_cube("src", src_shape) + tgt_shape = (2, 2, 3, 4) + self._make_cube("tgt", tgt_shape) + self.resolve.mapping = {0: 1, 1: 2, 2: 3} + emsg = "Cannot resolve cubes" + with self.assertRaisesRegex(ValueError, emsg): + self.resolve._as_compatible_cubes() + + def test_incompatible_shapes__fail_broadcast(self): + # key: (state) c=common, f=free + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 2 4 3 2 shape 2 3 5 + # state f c c c state c c c + # fail ^ fail ^ + # + # src-to-tgt mapping: + # 0->3, 1->2, 2->1 + src_shape = (2, 3, 5) + self._make_cube("src", src_shape) + tgt_shape = (2, 4, 3, 2) + self._make_cube("tgt", tgt_shape) + self.resolve.mapping = {0: 3, 1: 2, 2: 1} + emsg = "Cannot resolve cubes" + with self.assertRaisesRegex(ValueError, emsg): + self.resolve._as_compatible_cubes() + + def _check_compatible(self, broadcast_shape): + self.assertEqual( + self.resolve.lhs_cube, self.resolve._tgt_cube_resolved + ) + self.assertEqual(self.cube, self.resolve._src_cube_resolved) + self.assertEqual(broadcast_shape, self.resolve._broadcast_shape) + self.assertEqual(1, self.mocker.call_count) + self.assertEqual(self.args["metadata"], self.cube.metadata) + self.assertEqual(2, self.resolve.rhs_cube.coord_dims.call_count) + self.assertEqual( + [mock.call(self.dim_coord), mock.call(self.aux_coord)], + self.resolve.rhs_cube.coord_dims.call_args_list, + ) + self.assertEqual(1, self.cube.add_dim_coord.call_count) + self.assertEqual( + [mock.call(self.dim_coord, [self.resolve.mapping[0]])], + self.cube.add_dim_coord.call_args_list, + ) + self.assertEqual(1, self.cube.add_aux_coord.call_count) + self.assertEqual( + [mock.call(self.aux_coord, [self.resolve.mapping[2]])], + self.cube.add_aux_coord.call_args_list, + ) + self.assertEqual(1, self.cube.add_aux_factory.call_count) + self.assertEqual( + [mock.call(self.aux_factory)], + self.cube.add_aux_factory.call_args_list, + ) + + def test_compatible(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 2 + # shape 4 3 2 shape 4 3 2 + # state c c c state c c c + # coord d a + # + # src-to-tgt mapping: + # 0->0, 1->1, 2->2 + src_shape = (4, 3, 2) + self._make_cube("src", src_shape) + tgt_shape = (4, 3, 2) + self._make_cube("tgt", tgt_shape) + mapping = {0: 0, 1: 1, 2: 2} + self.resolve.mapping = mapping + self.resolve._as_compatible_cubes() + self._check_compatible(broadcast_shape=tgt_shape) + self.assertEqual([mock.call(self.data)], self.mocker.call_args_list) + + def test_compatible__transpose(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 2 + # shape 4 3 2 shape 2 3 4 + # state c c c state c c c + # coord d a + # + # src-to-tgt mapping: + # 0->2, 1->1, 2->0 + src_shape = (2, 3, 4) + self._make_cube("src", src_shape, transpose_shape=(4, 3, 2)) + tgt_shape = (4, 3, 2) + self._make_cube("tgt", tgt_shape) + mapping = {0: 2, 1: 1, 2: 0} + self.resolve.mapping = mapping + self.resolve._as_compatible_cubes() + self._check_compatible(broadcast_shape=tgt_shape) + self.assertEqual(1, self.data.transpose.call_count) + self.assertEqual( + [mock.call([2, 1, 0])], self.data.transpose.call_args_list + ) + self.assertEqual( + [mock.call(self.transpose)], self.mocker.call_args_list + ) + + def test_compatible__reshape(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 4 3 2 + # state f c c c state c c c + # coord d a + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + src_shape = (4, 3, 2) + self._make_cube("src", src_shape) + tgt_shape = (5, 4, 3, 2) + self._make_cube("tgt", tgt_shape) + mapping = {0: 1, 1: 2, 2: 3} + self.resolve.mapping = mapping + self.resolve._as_compatible_cubes() + self._check_compatible(broadcast_shape=tgt_shape) + self.assertEqual(1, self.data.reshape.call_count) + self.assertEqual( + [mock.call((1,) + src_shape)], self.data.reshape.call_args_list + ) + self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) + + def test_compatible__transpose_reshape(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 2 3 4 + # state f c c c state c c c + # coord d a + # + # src-to-tgt mapping: + # 0->3, 1->2, 2->1 + src_shape = (2, 3, 4) + transpose_shape = (4, 3, 2) + self._make_cube("src", src_shape, transpose_shape=transpose_shape) + tgt_shape = (5, 4, 3, 2) + self._make_cube("tgt", tgt_shape) + mapping = {0: 3, 1: 2, 2: 1} + self.resolve.mapping = mapping + self.resolve._as_compatible_cubes() + self._check_compatible(broadcast_shape=tgt_shape) + self.assertEqual(1, self.data.transpose.call_count) + self.assertEqual( + [mock.call([2, 1, 0])], self.data.transpose.call_args_list + ) + self.assertEqual(1, self.data.reshape.call_count) + self.assertEqual( + [mock.call((1,) + transpose_shape)], + self.data.reshape.call_args_list, + ) + self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) + + def test_compatible__broadcast(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 2 + # shape 1 3 2 shape 4 1 2 + # state c c c state c c c + # coord d a + # bcast ^ bcast ^ + # + # src-to-tgt mapping: + # 0->0, 1->1, 2->2 + src_shape = (4, 1, 2) + self._make_cube("src", src_shape) + tgt_shape = (1, 3, 2) + self._make_cube("tgt", tgt_shape) + mapping = {0: 0, 1: 1, 2: 2} + self.resolve.mapping = mapping + self.resolve._as_compatible_cubes() + self._check_compatible(broadcast_shape=(4, 3, 2)) + self.assertEqual([mock.call(self.data)], self.mocker.call_args_list) + + def test_compatible__broadcast_transpose_reshape(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 1 3 2 shape 2 1 4 + # state f c c c state c c c + # coord d a + # bcast ^ bcast ^ + # + # src-to-tgt mapping: + # 0->3, 1->2, 2->1 + src_shape = (2, 1, 4) + transpose_shape = (4, 1, 2) + self._make_cube("src", src_shape) + tgt_shape = (5, 1, 3, 2) + self._make_cube("tgt", tgt_shape) + mapping = {0: 3, 1: 2, 2: 1} + self.resolve.mapping = mapping + self.resolve._as_compatible_cubes() + self._check_compatible(broadcast_shape=(5, 4, 3, 2)) + self.assertEqual(1, self.data.transpose.call_count) + self.assertEqual( + [mock.call([2, 1, 0])], self.data.transpose.call_args_list + ) + self.assertEqual(1, self.data.reshape.call_count) + self.assertEqual( + [mock.call((1,) + transpose_shape)], + self.data.reshape.call_args_list, + ) + self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) + + +class Test__metadata_mapping(tests.IrisTest): + def setUp(self): + self.ndim = sentinel.ndim + self.src_cube = mock.Mock(ndim=self.ndim) + self.src_dim_coverage = mock.Mock(dims_free=[]) + self.src_aux_coverage = mock.Mock(dims_free=[]) + self.tgt_cube = mock.Mock(ndim=self.ndim) + self.tgt_dim_coverage = mock.Mock(dims_free=[]) + self.tgt_aux_coverage = mock.Mock(dims_free=[]) + self.resolve = Resolve() + self.map_rhs_to_lhs = True + self.resolve.map_rhs_to_lhs = self.map_rhs_to_lhs + self.resolve.rhs_cube = self.src_cube + self.resolve.rhs_cube_dim_coverage = self.src_dim_coverage + self.resolve.rhs_cube_aux_coverage = self.src_aux_coverage + self.resolve.lhs_cube = self.tgt_cube + self.resolve.lhs_cube_dim_coverage = self.tgt_dim_coverage + self.resolve.lhs_cube_aux_coverage = self.tgt_aux_coverage + self.resolve.mapping = {} + self.shape = sentinel.shape + self.resolve._broadcast_shape = self.shape + self.resolve._src_cube_resolved = mock.Mock(shape=self.shape) + self.resolve._tgt_cube_resolved = mock.Mock(shape=self.shape) + self.m_dim_mapping = self.patch( + "iris.common.resolve.Resolve._dim_mapping", return_value={} + ) + self.m_aux_mapping = self.patch( + "iris.common.resolve.Resolve._aux_mapping", return_value={} + ) + self.m_free_mapping = self.patch( + "iris.common.resolve.Resolve._free_mapping" + ) + self.m_as_compatible_cubes = self.patch( + "iris.common.resolve.Resolve._as_compatible_cubes" + ) + self.mapping = {0: 1, 1: 2, 2: 3} + + def test_mapped__dim_coords(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 4 3 2 + # state f c c c state c c c + # coord d d d coord d d d + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + self.src_cube.ndim = 3 + self.m_dim_mapping.return_value = self.mapping + self.resolve._metadata_mapping() + self.assertEqual(self.mapping, self.resolve.mapping) + self.assertEqual(1, self.m_dim_mapping.call_count) + expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] + self.assertEqual(expected, self.m_dim_mapping.call_args_list) + self.assertEqual(0, self.m_aux_mapping.call_count) + self.assertEqual(0, self.m_free_mapping.call_count) + self.assertEqual(1, self.m_as_compatible_cubes.call_count) + + def test_mapped__aux_coords(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 4 3 2 + # state f c c c state c c c + # coord a a a coord a a a + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + self.src_cube.ndim = 3 + self.m_aux_mapping.return_value = self.mapping + self.resolve._metadata_mapping() + self.assertEqual(self.mapping, self.resolve.mapping) + self.assertEqual(1, self.m_dim_mapping.call_count) + expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] + self.assertEqual(expected, self.m_dim_mapping.call_args_list) + self.assertEqual(1, self.m_aux_mapping.call_count) + expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] + self.assertEqual(expected, self.m_aux_mapping.call_args_list) + self.assertEqual(0, self.m_free_mapping.call_count) + self.assertEqual(1, self.m_as_compatible_cubes.call_count) + + def test_mapped__dim_and_aux_coords(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 4 3 2 + # state f c c c state c c c + # coord d a d coord d a d + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + dim_mapping = {0: 1, 2: 3} + aux_mapping = {1: 2} + self.src_cube.ndim = 3 + self.m_dim_mapping.return_value = dim_mapping + self.m_aux_mapping.return_value = aux_mapping + self.resolve._metadata_mapping() + self.assertEqual(self.mapping, self.resolve.mapping) + self.assertEqual(1, self.m_dim_mapping.call_count) + expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] + self.assertEqual(expected, self.m_dim_mapping.call_args_list) + self.assertEqual(1, self.m_aux_mapping.call_count) + expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] + self.assertEqual(expected, self.m_aux_mapping.call_args_list) + self.assertEqual(0, self.m_free_mapping.call_count) + self.assertEqual(1, self.m_as_compatible_cubes.call_count) + + def test_mapped__dim_coords_and_free_dims(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 4 3 2 + # state l f c c state f c c + # coord d d d coord d d + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + dim_mapping = {1: 2, 2: 3} + free_mapping = {0: 1} + self.src_cube.ndim = 3 + self.m_dim_mapping.return_value = dim_mapping + side_effect = lambda a, b, c, d: self.resolve.mapping.update( + free_mapping + ) + self.m_free_mapping.side_effect = side_effect + self.resolve._metadata_mapping() + self.assertEqual(self.mapping, self.resolve.mapping) + self.assertEqual(1, self.m_dim_mapping.call_count) + expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] + self.assertEqual(expected, self.m_dim_mapping.call_args_list) + self.assertEqual(1, self.m_aux_mapping.call_count) + expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] + self.assertEqual(expected, self.m_aux_mapping.call_args_list) + self.assertEqual(1, self.m_free_mapping.call_count) + expected = [ + mock.call( + self.src_dim_coverage, + self.tgt_dim_coverage, + self.src_aux_coverage, + self.tgt_aux_coverage, + ) + ] + self.assertEqual(expected, self.m_free_mapping.call_args_list) + self.assertEqual(1, self.m_as_compatible_cubes.call_count) + + def test_mapped__dim_coords_with_broadcast_flip(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 4 dims 0 1 2 4 + # shape 1 4 3 2 shape 5 4 3 2 + # state c c c c state c c c c + # coord d d d d coord d d d d + # + # src-to-tgt mapping: + # 0->0, 1->1, 2->2, 3->3 + mapping = {0: 0, 1: 1, 2: 2, 3: 3} + self.src_cube.ndim = 4 + self.tgt_cube.ndim = 4 + self.m_dim_mapping.return_value = mapping + broadcast_shape = (5, 4, 3, 2) + self.resolve._broadcast_shape = broadcast_shape + self.resolve._src_cube_resolved.shape = broadcast_shape + self.resolve._tgt_cube_resolved.shape = (1, 4, 3, 2) + self.resolve._metadata_mapping() + self.assertEqual(mapping, self.resolve.mapping) + self.assertEqual(1, self.m_dim_mapping.call_count) + expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] + self.assertEqual(expected, self.m_dim_mapping.call_args_list) + self.assertEqual(0, self.m_aux_mapping.call_count) + self.assertEqual(0, self.m_free_mapping.call_count) + self.assertEqual(2, self.m_as_compatible_cubes.call_count) + self.assertEqual(not self.map_rhs_to_lhs, self.resolve.map_rhs_to_lhs) + + def test_mapped__dim_coords_free_flip_with_free_flip(self): + # key: (state) c=common, f=free, l=local + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 2 + # shape 4 3 2 shape 4 3 2 + # state f f c state l l c + # coord d coord d d d + # + # src-to-tgt mapping: + # 0->0, 1->1, 2->2 + dim_mapping = {2: 2} + free_mapping = {0: 0, 1: 1} + mapping = {0: 0, 1: 1, 2: 2} + self.src_cube.ndim = 3 + self.tgt_cube.ndim = 3 + self.m_dim_mapping.return_value = dim_mapping + side_effect = lambda a, b, c, d: self.resolve.mapping.update( + free_mapping + ) + self.m_free_mapping.side_effect = side_effect + self.tgt_dim_coverage.dims_free = [0, 1] + self.tgt_aux_coverage.dims_free = [0, 1] + self.resolve._metadata_mapping() + self.assertEqual(mapping, self.resolve.mapping) + self.assertEqual(1, self.m_dim_mapping.call_count) + expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] + self.assertEqual(expected, self.m_dim_mapping.call_args_list) + self.assertEqual(1, self.m_aux_mapping.call_count) + expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] + self.assertEqual(expected, self.m_aux_mapping.call_args_list) + self.assertEqual(1, self.m_free_mapping.call_count) + expected = [ + mock.call( + self.src_dim_coverage, + self.tgt_dim_coverage, + self.src_aux_coverage, + self.tgt_aux_coverage, + ) + ] + self.assertEqual(expected, self.m_free_mapping.call_args_list) + self.assertEqual(2, self.m_as_compatible_cubes.call_count) + + +class Test__prepare_common_dim_payload(tests.IrisTest): + def setUp(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 4 3 2 + # state l c c c state c c c + # coord d d d coord d d d + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2) + self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2) + self.pb_0 = ( + mock.Mock(copy=mock.Mock(return_value=self.points[0])), + mock.Mock(copy=mock.Mock(return_value=self.bounds[0])), + ) + self.pb_1 = ( + mock.Mock(copy=mock.Mock(return_value=self.points[1])), + None, + ) + self.pb_2 = ( + mock.Mock(copy=mock.Mock(return_value=self.points[2])), + mock.Mock(copy=mock.Mock(return_value=self.bounds[2])), + ) + side_effect = (self.pb_0, self.pb_1, self.pb_2) + self.m_prepare_points_and_bounds = self.patch( + "iris.common.resolve.Resolve._prepare_points_and_bounds", + side_effect=side_effect, + ) + self.resolve = Resolve() + self.resolve.prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + self.mapping = {0: 1, 1: 2, 2: 3} + self.resolve.mapping = self.mapping + self.metadata_combined = ( + sentinel.combined_0, + sentinel.combined_1, + sentinel.combined_2, + ) + self.src_metadata = mock.Mock( + combine=mock.Mock(side_effect=self.metadata_combined) + ) + metadata = [self.src_metadata] * len(self.mapping) + self.src_coords = [ + sentinel.src_coord_0, + sentinel.src_coord_1, + sentinel.src_coord_2, + ] + self.src_dims_common = [0, 1, 2] + self.container = DimCoord + self.src_dim_coverage = _DimCoverage( + cube=None, + metadata=metadata, + coords=self.src_coords, + dims_common=self.src_dims_common, + dims_local=[], + dims_free=[], + ) + self.tgt_metadata = [ + sentinel.tgt_metadata_0, + sentinel.tgt_metadata_1, + sentinel.tgt_metadata_2, + sentinel.tgt_metadata_3, + ] + self.tgt_coords = [ + sentinel.tgt_coord_0, + sentinel.tgt_coord_1, + sentinel.tgt_coord_2, + sentinel.tgt_coord_3, + ] + self.tgt_dims_common = [1, 2, 3] + self.tgt_dim_coverage = _DimCoverage( + cube=None, + metadata=self.tgt_metadata, + coords=self.tgt_coords, + dims_common=self.tgt_dims_common, + dims_local=[], + dims_free=[], + ) + + def _check(self, ignore_mismatch=None, bad_points=None): + if bad_points is None: + bad_points = False + self.resolve._prepare_common_dim_payload( + self.src_dim_coverage, + self.tgt_dim_coverage, + ignore_mismatch=ignore_mismatch, + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + if not bad_points: + self.assertEqual(3, len(self.resolve.prepared_category.items_dim)) + expected = [ + _PreparedItem( + metadata=_PreparedMetadata( + combined=self.metadata_combined[0], + src=self.src_metadata, + tgt=self.tgt_metadata[self.mapping[0]], + ), + points=self.points[0], + bounds=self.bounds[0], + dims=(self.mapping[0],), + container=self.container, + ), + _PreparedItem( + metadata=_PreparedMetadata( + combined=self.metadata_combined[1], + src=self.src_metadata, + tgt=self.tgt_metadata[self.mapping[1]], + ), + points=self.points[1], + bounds=None, + dims=(self.mapping[1],), + container=self.container, + ), + _PreparedItem( + metadata=_PreparedMetadata( + combined=self.metadata_combined[2], + src=self.src_metadata, + tgt=self.tgt_metadata[self.mapping[2]], + ), + points=self.points[2], + bounds=self.bounds[2], + dims=(self.mapping[2],), + container=self.container, + ), + ] + self.assertEqual( + expected, self.resolve.prepared_category.items_dim + ) + else: + self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + self.assertEqual(3, self.m_prepare_points_and_bounds.call_count) + if ignore_mismatch is None: + ignore_mismatch = False + expected = [ + mock.call( + self.src_coords[0], + self.tgt_coords[self.mapping[0]], + 0, + 1, + ignore_mismatch=ignore_mismatch, + ), + mock.call( + self.src_coords[1], + self.tgt_coords[self.mapping[1]], + 1, + 2, + ignore_mismatch=ignore_mismatch, + ), + mock.call( + self.src_coords[2], + self.tgt_coords[self.mapping[2]], + 2, + 3, + ignore_mismatch=ignore_mismatch, + ), + ] + self.assertEqual( + expected, self.m_prepare_points_and_bounds.call_args_list + ) + if not bad_points: + self.assertEqual(3, self.src_metadata.combine.call_count) + expected = [ + mock.call(metadata) for metadata in self.tgt_metadata[1:] + ] + self.assertEqual( + expected, self.src_metadata.combine.call_args_list + ) + + def test__default_ignore_mismatch(self): + self._check() + + def test__not_ignore_mismatch(self): + self._check(ignore_mismatch=False) + + def test__ignore_mismatch(self): + self._check(ignore_mismatch=True) + + def test__bad_points(self): + side_effect = [(None, None)] * len(self.mapping) + self.m_prepare_points_and_bounds.side_effect = side_effect + self._check(bad_points=True) + + +class Test__prepare_common_aux_payload(tests.IrisTest): + def setUp(self): + # key: (state) c=common, f=free + # (coord) a=aux, d=dim + # + # tgt: <- src: + # dims 0 1 2 3 dims 0 1 2 + # shape 5 4 3 2 shape 4 3 2 + # state l c c c state c c c + # coord a a a coord a a a + # + # src-to-tgt mapping: + # 0->1, 1->2, 2->3 + self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2) + self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2) + self.pb_0 = ( + mock.Mock(copy=mock.Mock(return_value=self.points[0])), + mock.Mock(copy=mock.Mock(return_value=self.bounds[0])), + ) + self.pb_1 = ( + mock.Mock(copy=mock.Mock(return_value=self.points[1])), + None, + ) + self.pb_2 = ( + mock.Mock(copy=mock.Mock(return_value=self.points[2])), + mock.Mock(copy=mock.Mock(return_value=self.bounds[2])), + ) + side_effect = (self.pb_0, self.pb_1, self.pb_2) + self.m_prepare_points_and_bounds = self.patch( + "iris.common.resolve.Resolve._prepare_points_and_bounds", + side_effect=side_effect, + ) + self.resolve = Resolve() + self.resolve.prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + self.mapping = {0: 1, 1: 2, 2: 3} + self.resolve.mapping = self.mapping + self.resolve.map_rhs_to_lhs = True + self.metadata_combined = ( + sentinel.combined_0, + sentinel.combined_1, + sentinel.combined_2, + ) + self.src_metadata = [ + mock.Mock( + combine=mock.Mock(return_value=self.metadata_combined[0]) + ), + mock.Mock( + combine=mock.Mock(return_value=self.metadata_combined[1]) + ), + mock.Mock( + combine=mock.Mock(return_value=self.metadata_combined[2]) + ), + ] + self.src_coords = [ + sentinel.src_coord_0, + sentinel.src_coord_1, + sentinel.src_coord_2, + ] + self.src_dims = [(dim,) for dim in self.mapping.keys()] + self.src_common_items = [ + _Item(*item) + for item in zip(self.src_metadata, self.src_coords, self.src_dims) + ] + self.tgt_metadata = [sentinel.tgt_metadata_0] + self.src_metadata + self.tgt_coords = [ + sentinel.tgt_coord_0, + sentinel.tgt_coord_1, + sentinel.tgt_coord_2, + sentinel.tgt_coord_3, + ] + self.tgt_dims = [None] + [(dim,) for dim in self.mapping.values()] + self.tgt_common_items = [ + _Item(*item) + for item in zip(self.tgt_metadata, self.tgt_coords, self.tgt_dims) + ] + self.container = type(self.src_coords[0]) + + def _check(self, ignore_mismatch=None, bad_points=None): + if bad_points is None: + bad_points = False + prepared_items = [] + self.resolve._prepare_common_aux_payload( + self.src_common_items, + self.tgt_common_items, + prepared_items, + ignore_mismatch=ignore_mismatch, + ) + if not bad_points: + self.assertEqual(3, len(prepared_items)) + expected = [ + _PreparedItem( + metadata=_PreparedMetadata( + combined=self.metadata_combined[0], + src=self.src_metadata[0], + tgt=self.tgt_metadata[self.mapping[0]], + ), + points=self.points[0], + bounds=self.bounds[0], + dims=self.tgt_dims[self.mapping[0]], + container=self.container, + ), + _PreparedItem( + metadata=_PreparedMetadata( + combined=self.metadata_combined[1], + src=self.src_metadata[1], + tgt=self.tgt_metadata[self.mapping[1]], + ), + points=self.points[1], + bounds=None, + dims=self.tgt_dims[self.mapping[1]], + container=self.container, + ), + _PreparedItem( + metadata=_PreparedMetadata( + combined=self.metadata_combined[2], + src=self.src_metadata[2], + tgt=self.tgt_metadata[self.mapping[2]], + ), + points=self.points[2], + bounds=self.bounds[2], + dims=self.tgt_dims[self.mapping[2]], + container=self.container, + ), + ] + self.assertEqual(expected, prepared_items) + else: + self.assertEqual(0, len(prepared_items)) + self.assertEqual(3, self.m_prepare_points_and_bounds.call_count) + if ignore_mismatch is None: + ignore_mismatch = False + expected = [ + mock.call( + self.src_coords[0], + self.tgt_coords[self.mapping[0]], + self.src_dims[0], + self.tgt_dims[self.mapping[0]], + ignore_mismatch=ignore_mismatch, + ), + mock.call( + self.src_coords[1], + self.tgt_coords[self.mapping[1]], + self.src_dims[1], + self.tgt_dims[self.mapping[1]], + ignore_mismatch=ignore_mismatch, + ), + mock.call( + self.src_coords[2], + self.tgt_coords[self.mapping[2]], + self.src_dims[2], + self.tgt_dims[self.mapping[2]], + ignore_mismatch=ignore_mismatch, + ), + ] + self.assertEqual( + expected, self.m_prepare_points_and_bounds.call_args_list + ) + if not bad_points: + for src_metadata, tgt_metadata in zip( + self.src_metadata, self.tgt_metadata[1:] + ): + self.assertEqual(1, src_metadata.combine.call_count) + expected = [mock.call(tgt_metadata)] + self.assertEqual(expected, src_metadata.combine.call_args_list) + + def test__default_ignore_mismatch(self): + self._check() + + def test__not_ignore_mismatch(self): + self._check(ignore_mismatch=False) + + def test__ignore_mismatch(self): + self._check(ignore_mismatch=True) + + def test__bad_points(self): + side_effect = [(None, None)] * len(self.mapping) + self.m_prepare_points_and_bounds.side_effect = side_effect + self._check(bad_points=True) + + def test__no_tgt_metadata_match(self): + item = self.tgt_common_items[0] + tgt_common_items = [item] * len(self.tgt_common_items) + prepared_items = [] + self.resolve._prepare_common_aux_payload( + self.src_common_items, tgt_common_items, prepared_items + ) + self.assertEqual(0, len(prepared_items)) + + def test__multi_tgt_metadata_match(self): + item = self.tgt_common_items[1] + tgt_common_items = [item] * len(self.tgt_common_items) + prepared_items = [] + self.resolve._prepare_common_aux_payload( + self.src_common_items, tgt_common_items, prepared_items + ) + self.assertEqual(0, len(prepared_items)) + + +class Test__prepare_points_and_bounds(tests.IrisTest): + def setUp(self): + self.Coord = namedtuple( + "Coord", + [ + "name", + "points", + "bounds", + "metadata", + "ndim", + "shape", + "has_bounds", + ], + ) + self.Cube = namedtuple("Cube", ["name", "shape"]) + self.resolve = Resolve() + self.resolve.map_rhs_to_lhs = True + self.src_name = sentinel.src_name + self.src_points = sentinel.src_points + self.src_bounds = sentinel.src_bounds + self.src_metadata = sentinel.src_metadata + self.src_items = dict( + name=lambda: self.src_name, + points=self.src_points, + bounds=self.src_bounds, + metadata=self.src_metadata, + ndim=None, + shape=None, + has_bounds=None, + ) + self.tgt_name = sentinel.tgt_name + self.tgt_points = sentinel.tgt_points + self.tgt_bounds = sentinel.tgt_bounds + self.tgt_metadata = sentinel.tgt_metadata + self.tgt_items = dict( + name=lambda: self.tgt_name, + points=self.tgt_points, + bounds=self.tgt_bounds, + metadata=self.tgt_metadata, + ndim=None, + shape=None, + has_bounds=None, + ) + self.m_array_equal = self.patch( + "iris.util.array_equal", side_effect=(True, True) + ) + + def test_coord_ndim_unequal__tgt_ndim_greater(self): + self.src_items["ndim"] = 1 + src_coord = self.Coord(**self.src_items) + self.tgt_items["ndim"] = 10 + tgt_coord = self.Coord(**self.tgt_items) + points, bounds = self.resolve._prepare_points_and_bounds( + src_coord, tgt_coord, src_dims=None, tgt_dims=None + ) + self.assertEqual(self.tgt_points, points) + self.assertEqual(self.tgt_bounds, bounds) + + def test_coord_ndim_unequal__src_ndim_greater(self): + self.src_items["ndim"] = 10 + src_coord = self.Coord(**self.src_items) + self.tgt_items["ndim"] = 1 + tgt_coord = self.Coord(**self.tgt_items) + points, bounds = self.resolve._prepare_points_and_bounds( + src_coord, tgt_coord, src_dims=None, tgt_dims=None + ) + self.assertEqual(self.src_points, points) + self.assertEqual(self.src_bounds, bounds) + + def test_coord_ndim_equal__shape_unequal_with_src_broadcasting(self): + # key: (state) c=common, f=free + # (coord) x=coord + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 9 9 shape 1 9 + # state c c state c c + # coord x-x coord x-x + # bcast ^ + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + broadcast_shape = (9, 9) + ndim = len(broadcast_shape) + self.resolve.mapping = mapping + self.resolve._broadcast_shape = broadcast_shape + src_shape = (1, 9) + src_dims = tuple(mapping.keys()) + self.resolve.rhs_cube = self.Cube(name=None, shape=src_shape) + self.src_items["ndim"] = ndim + self.src_items["shape"] = src_shape + src_coord = self.Coord(**self.src_items) + tgt_shape = broadcast_shape + tgt_dims = tuple(mapping.values()) + self.resolve.lhs_cube = self.Cube(name=None, shape=tgt_shape) + self.tgt_items["ndim"] = ndim + self.tgt_items["shape"] = tgt_shape + tgt_coord = self.Coord(**self.tgt_items) + points, bounds = self.resolve._prepare_points_and_bounds( + src_coord, tgt_coord, src_dims, tgt_dims + ) + self.assertEqual(self.tgt_points, points) + self.assertEqual(self.tgt_bounds, bounds) + + def test_coord_ndim_equal__shape_unequal_with_tgt_broadcasting(self): + # key: (state) c=common, f=free + # (coord) x=coord + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 1 9 shape 9 9 + # state c c state c c + # coord x-x coord x-x + # bcast ^ + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + broadcast_shape = (9, 9) + ndim = len(broadcast_shape) + self.resolve.mapping = mapping + self.resolve._broadcast_shape = broadcast_shape + src_shape = broadcast_shape + src_dims = tuple(mapping.keys()) + self.resolve.rhs_cube = self.Cube(name=None, shape=src_shape) + self.src_items["ndim"] = ndim + self.src_items["shape"] = src_shape + src_coord = self.Coord(**self.src_items) + tgt_shape = (1, 9) + tgt_dims = tuple(mapping.values()) + self.resolve.lhs_cube = self.Cube(name=None, shape=tgt_shape) + self.tgt_items["ndim"] = ndim + self.tgt_items["shape"] = tgt_shape + tgt_coord = self.Coord(**self.tgt_items) + points, bounds = self.resolve._prepare_points_and_bounds( + src_coord, tgt_coord, src_dims, tgt_dims + ) + self.assertEqual(self.src_points, points) + self.assertEqual(self.src_bounds, bounds) + + def test_coord_ndim_equal__shape_unequal_with_unsupported_broadcasting( + self, + ): + # key: (state) c=common, f=free + # (coord) x=coord + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 1 9 shape 9 1 + # state c c state c c + # coord x-x coord x-x + # bcast ^ bcast ^ + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + broadcast_shape = (9, 9) + ndim = len(broadcast_shape) + self.resolve.mapping = mapping + self.resolve._broadcast_shape = broadcast_shape + src_shape = (9, 1) + src_dims = tuple(mapping.keys()) + self.resolve.rhs_cube = self.Cube( + name=lambda: sentinel.src_cube, shape=src_shape + ) + self.src_items["ndim"] = ndim + self.src_items["shape"] = src_shape + src_coord = self.Coord(**self.src_items) + tgt_shape = (1, 9) + tgt_dims = tuple(mapping.values()) + self.resolve.lhs_cube = self.Cube( + name=lambda: sentinel.tgt_cube, shape=tgt_shape + ) + self.tgt_items["ndim"] = ndim + self.tgt_items["shape"] = tgt_shape + tgt_coord = self.Coord(**self.tgt_items) + emsg = "Cannot broadcast" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve._prepare_points_and_bounds( + src_coord, tgt_coord, src_dims, tgt_dims + ) + + def _populate( + self, src_points, tgt_points, src_bounds=None, tgt_bounds=None + ): + # key: (state) c=common, f=free + # (coord) x=coord + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state f c state f c + # coord x coord x + # + # src-to-tgt mapping: + # 0->0, 1->1 + shape = (2, 3) + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + self.resolve.map_rhs_to_lhs = True + self.resolve.rhs_cube = self.Cube( + name=lambda: sentinel.src_cube, shape=None + ) + self.resolve.lhs_cube = self.Cube( + name=lambda: sentinel.tgt_cube, shape=None + ) + ndim = 1 + src_dims = 1 + self.src_items["ndim"] = ndim + self.src_items["shape"] = (shape[src_dims],) + self.src_items["points"] = src_points + self.src_items["bounds"] = src_bounds + self.src_items["has_bounds"] = lambda: src_bounds is not None + src_coord = self.Coord(**self.src_items) + tgt_dims = 1 + self.tgt_items["ndim"] = ndim + self.tgt_items["shape"] = (shape[mapping[tgt_dims]],) + self.tgt_items["points"] = tgt_points + self.tgt_items["bounds"] = tgt_bounds + self.tgt_items["has_bounds"] = lambda: tgt_bounds is not None + tgt_coord = self.Coord(**self.tgt_items) + args = dict( + src_coord=src_coord, + tgt_coord=tgt_coord, + src_dims=src_dims, + tgt_dims=tgt_dims, + ) + return args + + def test_coord_ndim_and_shape_equal__points_equal_with_no_bounds(self): + args = self._populate(self.src_points, self.src_points) + points, bounds = self.resolve._prepare_points_and_bounds(**args) + self.assertEqual(self.src_points, points) + self.assertIsNone(bounds) + self.assertEqual(1, self.m_array_equal.call_count) + expected = [mock.call(self.src_points, self.src_points, withnans=True)] + self.assertEqual(expected, self.m_array_equal.call_args_list) + + def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only( + self, + ): + args = self._populate( + self.src_points, self.src_points, src_bounds=self.src_bounds + ) + points, bounds = self.resolve._prepare_points_and_bounds(**args) + self.assertEqual(self.src_points, points) + self.assertEqual(self.src_bounds, bounds) + self.assertEqual(1, self.m_array_equal.call_count) + expected = [mock.call(self.src_points, self.src_points, withnans=True)] + self.assertEqual(expected, self.m_array_equal.call_args_list) + + def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only( + self, + ): + args = self._populate( + self.src_points, self.src_points, tgt_bounds=self.tgt_bounds + ) + points, bounds = self.resolve._prepare_points_and_bounds(**args) + self.assertEqual(self.src_points, points) + self.assertEqual(self.tgt_bounds, bounds) + self.assertEqual(1, self.m_array_equal.call_count) + expected = [mock.call(self.src_points, self.src_points, withnans=True)] + self.assertEqual(expected, self.m_array_equal.call_args_list) + + def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only_strict( + self, + ): + args = self._populate( + self.src_points, self.src_points, src_bounds=self.src_bounds + ) + with LENIENT.context(maths=False): + emsg = f"Coordinate {self.src_name} has bounds" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve._prepare_points_and_bounds(**args) + + def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only_strict( + self, + ): + args = self._populate( + self.src_points, self.src_points, tgt_bounds=self.tgt_bounds + ) + with LENIENT.context(maths=False): + emsg = f"Coordinate {self.tgt_name} has bounds" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve._prepare_points_and_bounds(**args) + + def test_coord_ndim_and_shape_equal__points_equal_with_bounds_equal(self): + args = self._populate( + self.src_points, + self.src_points, + src_bounds=self.src_bounds, + tgt_bounds=self.src_bounds, + ) + points, bounds = self.resolve._prepare_points_and_bounds(**args) + self.assertEqual(self.src_points, points) + self.assertEqual(self.src_bounds, bounds) + self.assertEqual(2, self.m_array_equal.call_count) + expected = [ + mock.call(self.src_points, self.src_points, withnans=True), + mock.call(self.src_bounds, self.src_bounds, withnans=True), + ] + self.assertEqual(expected, self.m_array_equal.call_args_list) + + def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different( + self, + ): + self.m_array_equal.side_effect = (True, False) + args = self._populate( + self.src_points, + self.src_points, + src_bounds=self.src_bounds, + tgt_bounds=self.tgt_bounds, + ) + emsg = f"Coordinate {self.src_name} has different bounds" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve._prepare_points_and_bounds(**args) + + def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_ignore_mismatch( + self, + ): + self.m_array_equal.side_effect = (True, False) + args = self._populate( + self.src_points, + self.src_points, + src_bounds=self.src_bounds, + tgt_bounds=self.tgt_bounds, + ) + points, bounds = self.resolve._prepare_points_and_bounds( + **args, ignore_mismatch=True + ) + self.assertEqual(self.src_points, points) + self.assertIsNone(bounds) + self.assertEqual(2, self.m_array_equal.call_count) + expected = [ + mock.call(self.src_points, self.src_points, withnans=True), + mock.call(self.src_bounds, self.tgt_bounds, withnans=True), + ] + self.assertEqual(expected, self.m_array_equal.call_args_list) + + def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_strict( + self, + ): + self.m_array_equal.side_effect = (True, False) + args = self._populate( + self.src_points, + self.src_points, + src_bounds=self.src_bounds, + tgt_bounds=self.tgt_bounds, + ) + with LENIENT.context(maths=False): + emsg = f"Coordinate {self.src_name} has different bounds" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve._prepare_points_and_bounds(**args) + + def test_coord_ndim_and_shape_equal__points_different(self): + self.m_array_equal.side_effect = (False,) + args = self._populate(self.src_points, self.tgt_points) + emsg = f"Coordinate {self.src_name} has different points" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve._prepare_points_and_bounds(**args) + + def test_coord_ndim_and_shape_equal__points_different_ignore_mismatch( + self, + ): + self.m_array_equal.side_effect = (False,) + args = self._populate(self.src_points, self.tgt_points) + points, bounds = self.resolve._prepare_points_and_bounds( + **args, ignore_mismatch=True + ) + self.assertIsNone(points) + self.assertIsNone(bounds) + + def test_coord_ndim_and_shape_equal__points_different_strict(self): + self.m_array_equal.side_effect = (False,) + args = self._populate(self.src_points, self.tgt_points) + with LENIENT.context(maths=False): + emsg = f"Coordinate {self.src_name} has different points" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve._prepare_points_and_bounds(**args) + + +class Test__create_prepared_item(tests.IrisTest): + def setUp(self): + Coord = namedtuple("Coord", ["points", "bounds"]) + self.points_value = sentinel.points + self.points = mock.Mock(copy=mock.Mock(return_value=self.points_value)) + self.bounds_value = sentinel.bounds + self.bounds = mock.Mock(copy=mock.Mock(return_value=self.bounds_value)) + self.coord = Coord(points=self.points, bounds=self.bounds) + self.container = type(self.coord) + self.combined = sentinel.combined + self.src = mock.Mock(combine=mock.Mock(return_value=self.combined)) + self.tgt = sentinel.tgt + + def _check(self, src=None, tgt=None): + dims = 0 + if src is not None and tgt is not None: + combined = self.combined + else: + combined = src or tgt + result = Resolve._create_prepared_item( + self.coord, dims, src_metadata=src, tgt_metadata=tgt + ) + self.assertIsInstance(result, _PreparedItem) + self.assertIsInstance(result.metadata, _PreparedMetadata) + expected = _PreparedMetadata(combined=combined, src=src, tgt=tgt) + self.assertEqual(expected, result.metadata) + self.assertEqual(self.points_value, result.points) + self.assertEqual(1, self.points.copy.call_count) + self.assertEqual([mock.call()], self.points.copy.call_args_list) + self.assertEqual(self.bounds_value, result.bounds) + self.assertEqual(1, self.bounds.copy.call_count) + self.assertEqual([mock.call()], self.bounds.copy.call_args_list) + self.assertEqual((dims,), result.dims) + self.assertEqual(self.container, result.container) + + def test__no_metadata(self): + self._check() + + def test__src_metadata_only(self): + self._check(src=self.src) + + def test__tgt_metadata_only(self): + self._check(tgt=self.tgt) + + def test__combine_metadata(self): + self._check(src=self.src, tgt=self.tgt) + + +class Test__prepare_local_payload_dim(tests.IrisTest): + def setUp(self): + self.Cube = namedtuple("Cube", ["ndim"]) + self.resolve = Resolve() + self.resolve.prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + self.resolve.map_rhs_to_lhs = True + self.src_coverage = dict( + cube=None, + metadata=[], + coords=[], + dims_common=None, + dims_local=[], + dims_free=None, + ) + self.tgt_coverage = deepcopy(self.src_coverage) + self.prepared_item = sentinel.prepared_item + self.m_create_prepared_item = self.patch( + "iris.common.resolve.Resolve._create_prepared_item", + return_value=self.prepared_item, + ) + + def test_src_no_local_with_tgt_no_local(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c c state c c + # coord d d coord d d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _DimCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + + def test_src_no_local_with_tgt_no_local__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c c state c c + # coord d d coord d d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _DimCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + + def test_src_local_with_tgt_local(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c l + # coord d d coord d d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + self.src_coverage["dims_local"] = (1,) + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["dims_local"] = (1,) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _DimCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + + def test_src_local_with_tgt_local__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c l + # coord d d coord d d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + self.src_coverage["dims_local"] = (1,) + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["dims_local"] = (1,) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _DimCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + + def test_src_local_with_tgt_free(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c f state c l + # coord d coord d d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_dim = 1 + self.src_coverage["dims_local"] = (src_dim,) + src_metadata = sentinel.src_metadata + self.src_coverage["metadata"] = [None, src_metadata] + src_coord = sentinel.src_coord + self.src_coverage["coords"] = [None, src_coord] + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _DimCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) + self.assertEqual( + self.prepared_item, self.resolve.prepared_category.items_dim[0] + ) + self.assertEqual(1, self.m_create_prepared_item.call_count) + expected = [ + mock.call(src_coord, mapping[src_dim], src_metadata=src_metadata) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_free__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c f state c l + # coord d coord d d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_dim = 1 + self.src_coverage["dims_local"] = (src_dim,) + src_metadata = sentinel.src_metadata + self.src_coverage["metadata"] = [None, src_metadata] + src_coord = sentinel.src_coord + self.src_coverage["coords"] = [None, src_coord] + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _DimCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + + def test_src_free_with_tgt_local(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c f + # coord d d coord d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_dim = 1 + self.tgt_coverage["dims_local"] = (tgt_dim,) + tgt_metadata = sentinel.tgt_metadata + self.tgt_coverage["metadata"] = [None, tgt_metadata] + tgt_coord = sentinel.tgt_coord + self.tgt_coverage["coords"] = [None, tgt_coord] + tgt_coverage = _DimCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) + self.assertEqual( + self.prepared_item, self.resolve.prepared_category.items_dim[0] + ) + self.assertEqual(1, self.m_create_prepared_item.call_count) + expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_free_with_tgt_local__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c f + # coord d d coord d + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_dim = 1 + self.tgt_coverage["dims_local"] = (tgt_dim,) + tgt_metadata = sentinel.tgt_metadata + self.tgt_coverage["metadata"] = [None, tgt_metadata] + tgt_coord = sentinel.tgt_coord + self.tgt_coverage["coords"] = [None, tgt_coord] + tgt_coverage = _DimCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + + def test_src_no_local_with_tgt_local__extra_dims(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 + # shape 4 2 3 shape 2 3 + # state l c c state c c + # coord d d d coord d d + # + # src-to-tgt mapping: + # 0->1, 1->2 + mapping = {0: 1, 1: 2} + self.resolve.mapping = mapping + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=3) + tgt_dim = 0 + self.tgt_coverage["dims_local"] = (tgt_dim,) + tgt_metadata = sentinel.tgt_metadata + self.tgt_coverage["metadata"] = [tgt_metadata, None, None] + tgt_coord = sentinel.tgt_coord + self.tgt_coverage["coords"] = [tgt_coord, None, None] + tgt_coverage = _DimCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) + self.assertEqual( + self.prepared_item, self.resolve.prepared_category.items_dim[0] + ) + self.assertEqual(1, self.m_create_prepared_item.call_count) + expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_no_local_with_tgt_local__extra_dims_strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 + # shape 4 2 3 shape 2 3 + # state l c c state c c + # coord d d d coord d d + # + # src-to-tgt mapping: + # 0->1, 1->2 + mapping = {0: 1, 1: 2} + self.resolve.mapping = mapping + src_coverage = _DimCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=3) + tgt_dim = 0 + self.tgt_coverage["dims_local"] = (tgt_dim,) + tgt_metadata = sentinel.tgt_metadata + self.tgt_coverage["metadata"] = [tgt_metadata, None, None] + tgt_coord = sentinel.tgt_coord + self.tgt_coverage["coords"] = [tgt_coord, None, None] + tgt_coverage = _DimCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) + self.assertEqual( + self.prepared_item, self.resolve.prepared_category.items_dim[0] + ) + self.assertEqual(1, self.m_create_prepared_item.call_count) + expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + +class Test__prepare_local_payload_aux(tests.IrisTest): + def setUp(self): + self.Cube = namedtuple("Cube", ["ndim"]) + self.resolve = Resolve() + self.resolve.prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + self.resolve.map_rhs_to_lhs = True + self.src_coverage = dict( + cube=None, + common_items_aux=None, + common_items_scalar=None, + local_items_aux=[], + local_items_scalar=None, + dims_common=None, + dims_local=[], + dims_free=None, + ) + self.tgt_coverage = deepcopy(self.src_coverage) + self.src_prepared_item = sentinel.src_prepared_item + self.tgt_prepared_item = sentinel.tgt_prepared_item + self.m_create_prepared_item = self.patch( + "iris.common.resolve.Resolve._create_prepared_item", + side_effect=(self.src_prepared_item, self.tgt_prepared_item), + ) + + def test_src_no_local_with_tgt_no_local(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c c state c c + # coord a a coord a a + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + + def test_src_no_local_with_tgt_no_local__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c c state c c + # coord a a coord a a + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + + def test_src_local_with_tgt_local(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c l + # coord a a coord a a + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_dims = (1,) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) + self.src_coverage["local_items_aux"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_dims = (1,) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) + self.tgt_coverage["local_items_aux"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) + expected = [self.src_prepared_item, self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_aux) + expected = [ + mock.call(src_coord, tgt_dims, src_metadata=src_metadata), + mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata), + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_local__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c l + # coord a a coord a a + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_dims = (1,) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) + self.src_coverage["local_items_aux"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_dims = (1,) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) + self.tgt_coverage["local_items_aux"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + + def test_src_local_with_tgt_free(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c f state c l + # coord a coord a a + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_dims = (1,) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) + self.src_coverage["local_items_aux"].append(src_item) + self.src_coverage["dims_local"].extend(src_dims) + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + expected = [self.src_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_aux) + expected = [mock.call(src_coord, src_dims, src_metadata=src_metadata)] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_free__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c f state c l + # coord a coord a a + # + # src-to-tgt mapping: + # 0->0, 1->1 + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_dims = (1,) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) + self.src_coverage["local_items_aux"].append(src_item) + self.src_coverage["dims_local"].extend(src_dims) + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + + def test_src_free_with_tgt_local(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c f + # coord a a coord a + # + # src-to-tgt mapping: + # 0->0, 1->1 + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_dims = (1,) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) + self.tgt_coverage["local_items_aux"].append(tgt_item) + self.tgt_coverage["dims_local"].extend(tgt_dims) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + expected = [self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_aux) + expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_free_with_tgt_local__strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 dims 0 1 + # shape 2 3 shape 2 3 + # state c l state c f + # coord a a coord a + # + # src-to-tgt mapping: + # 0->0, 1->1 + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + mapping = {0: 0, 1: 1} + self.resolve.mapping = mapping + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=2) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_dims = (1,) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) + self.tgt_coverage["local_items_aux"].append(tgt_item) + self.tgt_coverage["dims_local"].extend(tgt_dims) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + + def test_src_no_local_with_tgt_local__extra_dims(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 + # shape 4 2 3 shape 2 3 + # state l c c state c c + # coord a a a coord a a + # + # src-to-tgt mapping: + # 0->1, 1->2 + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + mapping = {0: 1, 1: 2} + self.resolve.mapping = mapping + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=3) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_dims = (0,) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) + self.tgt_coverage["local_items_aux"].append(tgt_item) + self.tgt_coverage["dims_local"].extend(tgt_dims) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + expected = [self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_aux) + expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_no_local_with_tgt_local__extra_dims_strict(self): + # key: (state) c=common, f=free, l=local + # (coord) d=dim + # + # tgt: <- src: + # dims 0 1 2 dims 0 1 + # shape 4 2 3 shape 2 3 + # state l c c state c c + # coord a a a coord a a + # + # src-to-tgt mapping: + # 0->1, 1->2 + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + mapping = {0: 1, 1: 2} + self.resolve.mapping = mapping + src_coverage = _AuxCoverage(**self.src_coverage) + self.tgt_coverage["cube"] = self.Cube(ndim=3) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_dims = (0,) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) + self.tgt_coverage["local_items_aux"].append(tgt_item) + self.tgt_coverage["dims_local"].extend(tgt_dims) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=True): + self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + expected = [self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_aux) + expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + +class Test__prepare_local_payload_scalar(tests.IrisTest): + def setUp(self): + self.Cube = namedtuple("Cube", ["ndim"]) + self.resolve = Resolve() + self.resolve.prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + self.src_coverage = dict( + cube=None, + common_items_aux=None, + common_items_scalar=None, + local_items_aux=None, + local_items_scalar=[], + dims_common=None, + dims_local=[], + dims_free=None, + ) + self.tgt_coverage = deepcopy(self.src_coverage) + self.src_prepared_item = sentinel.src_prepared_item + self.tgt_prepared_item = sentinel.tgt_prepared_item + self.m_create_prepared_item = self.patch( + "iris.common.resolve.Resolve._create_prepared_item", + side_effect=(self.src_prepared_item, self.tgt_prepared_item), + ) + self.src_dims = () + self.tgt_dims = () + + def test_src_no_local_with_tgt_no_local(self): + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_no_local_with_tgt_no_local__strict(self): + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_no_local_with_tgt_no_local__src_scalar_cube(self): + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_no_local_with_tgt_no_local__src_scalar_cube_strict(self): + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_local_with_tgt_no_local(self): + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + expected = [self.src_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + expected = [ + mock.call(src_coord, self.src_dims, src_metadata=src_metadata) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_no_local__strict(self): + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_local_with_tgt_no_local__src_scalar_cube(self): + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + expected = [self.src_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + expected = [ + mock.call(src_coord, self.src_dims, src_metadata=src_metadata) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_no_local__src_scalar_cube_strict(self): + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_no_local_with_tgt_local(self): + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + expected = [self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + expected = [ + mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_no_local_with_tgt_local__strict(self): + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_no_local_with_tgt_local__src_scalar_cube(self): + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + expected = [self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + expected = [ + mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_no_local_with_tgt_local__src_scalar_cube_strict(self): + self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + expected = [self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + expected = [ + mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_local(self): + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) + expected = [self.src_prepared_item, self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + expected = [ + mock.call(src_coord, self.src_dims, src_metadata=src_metadata), + mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_local__strict(self): + ndim = 2 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + def test_src_local_with_tgt_local__src_scalar_cube(self): + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) + self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) + expected = [self.src_prepared_item, self.tgt_prepared_item] + self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + expected = [ + mock.call(src_coord, self.src_dims, src_metadata=src_metadata), + mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_src_local_with_tgt_local__src_scalar_cube_strict(self): + ndim = 0 + self.src_coverage["cube"] = self.Cube(ndim=ndim) + src_metadata = sentinel.src_metadata + src_coord = sentinel.src_coord + src_item = _Item( + metadata=src_metadata, coord=src_coord, dims=self.src_dims + ) + self.src_coverage["local_items_scalar"].append(src_item) + src_coverage = _AuxCoverage(**self.src_coverage) + tgt_metadata = sentinel.tgt_metadata + tgt_coord = sentinel.tgt_coord + tgt_item = _Item( + metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims + ) + self.tgt_coverage["local_items_scalar"].append(tgt_item) + tgt_coverage = _AuxCoverage(**self.tgt_coverage) + with LENIENT.context(maths=False): + self.resolve._prepare_local_payload_scalar( + src_coverage, tgt_coverage + ) + self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + + +class Test__prepare_local_payload(tests.IrisTest): + def test(self): + src_dim_coverage = sentinel.src_dim_coverage + src_aux_coverage = sentinel.src_aux_coverage + tgt_dim_coverage = sentinel.tgt_dim_coverage + tgt_aux_coverage = sentinel.tgt_aux_coverage + root = "iris.common.resolve.Resolve" + m_prepare_dim = self.patch(f"{root}._prepare_local_payload_dim") + m_prepare_aux = self.patch(f"{root}._prepare_local_payload_aux") + m_prepare_scalar = self.patch(f"{root}._prepare_local_payload_scalar") + resolve = Resolve() + resolve._prepare_local_payload( + src_dim_coverage, + src_aux_coverage, + tgt_dim_coverage, + tgt_aux_coverage, + ) + self.assertEqual(1, m_prepare_dim.call_count) + expected = [mock.call(src_dim_coverage, tgt_dim_coverage)] + self.assertEqual(expected, m_prepare_dim.call_args_list) + self.assertEqual(1, m_prepare_aux.call_count) + expected = [mock.call(src_aux_coverage, tgt_aux_coverage)] + self.assertEqual(expected, m_prepare_aux.call_args_list) + self.assertEqual(1, m_prepare_scalar.call_count) + expected = [mock.call(src_aux_coverage, tgt_aux_coverage)] + self.assertEqual(expected, m_prepare_scalar.call_args_list) + + +class Test__metadata_prepare(tests.IrisTest): + def setUp(self): + self.src_cube = sentinel.src_cube + self.src_category_local = sentinel.src_category_local + self.src_dim_coverage = sentinel.src_dim_coverage + self.src_aux_coverage = mock.Mock( + common_items_aux=sentinel.src_aux_coverage_common_items_aux, + common_items_scalar=sentinel.src_aux_coverage_common_items_scalar, + ) + self.tgt_cube = sentinel.tgt_cube + self.tgt_category_local = sentinel.tgt_category_local + self.tgt_dim_coverage = sentinel.tgt_dim_coverage + self.tgt_aux_coverage = mock.Mock( + common_items_aux=sentinel.tgt_aux_coverage_common_items_aux, + common_items_scalar=sentinel.tgt_aux_coverage_common_items_scalar, + ) + self.resolve = Resolve() + root = "iris.common.resolve.Resolve" + self.m_prepare_common_dim_payload = self.patch( + f"{root}._prepare_common_dim_payload" + ) + self.m_prepare_common_aux_payload = self.patch( + f"{root}._prepare_common_aux_payload" + ) + self.m_prepare_local_payload = self.patch( + f"{root}._prepare_local_payload" + ) + self.m_prepare_factory_payload = self.patch( + f"{root}._prepare_factory_payload" + ) + + def _check(self): + self.assertIsNone(self.resolve.prepared_category) + self.assertIsNone(self.resolve.prepared_factories) + self.resolve._metadata_prepare() + expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) + self.assertEqual(expected, self.resolve.prepared_category) + self.assertEqual([], self.resolve.prepared_factories) + self.assertEqual(1, self.m_prepare_common_dim_payload.call_count) + expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] + self.assertEqual( + expected, self.m_prepare_common_dim_payload.call_args_list + ) + self.assertEqual(2, self.m_prepare_common_aux_payload.call_count) + expected = [ + mock.call( + self.src_aux_coverage.common_items_aux, + self.tgt_aux_coverage.common_items_aux, + [], + ), + mock.call( + self.src_aux_coverage.common_items_scalar, + self.tgt_aux_coverage.common_items_scalar, + [], + ignore_mismatch=True, + ), + ] + self.assertEqual( + expected, self.m_prepare_common_aux_payload.call_args_list + ) + self.assertEqual(1, self.m_prepare_local_payload.call_count) + expected = [ + mock.call( + self.src_dim_coverage, + self.src_aux_coverage, + self.tgt_dim_coverage, + self.tgt_aux_coverage, + ) + ] + self.assertEqual(expected, self.m_prepare_local_payload.call_args_list) + self.assertEqual(2, self.m_prepare_factory_payload.call_count) + expected = [ + mock.call(self.tgt_cube, self.tgt_category_local, from_src=False), + mock.call(self.src_cube, self.src_category_local), + ] + self.assertEqual( + expected, self.m_prepare_factory_payload.call_args_list + ) + + def test_map_rhs_to_lhs__true(self): + self.resolve.map_rhs_to_lhs = True + self.resolve.rhs_cube = self.src_cube + self.resolve.rhs_cube_category_local = self.src_category_local + self.resolve.rhs_cube_dim_coverage = self.src_dim_coverage + self.resolve.rhs_cube_aux_coverage = self.src_aux_coverage + self.resolve.lhs_cube = self.tgt_cube + self.resolve.lhs_cube_category_local = self.tgt_category_local + self.resolve.lhs_cube_dim_coverage = self.tgt_dim_coverage + self.resolve.lhs_cube_aux_coverage = self.tgt_aux_coverage + self._check() + + def test_map_rhs_to_lhs__false(self): + self.resolve.map_rhs_to_lhs = False + self.resolve.lhs_cube = self.src_cube + self.resolve.lhs_cube_category_local = self.src_category_local + self.resolve.lhs_cube_dim_coverage = self.src_dim_coverage + self.resolve.lhs_cube_aux_coverage = self.src_aux_coverage + self.resolve.rhs_cube = self.tgt_cube + self.resolve.rhs_cube_category_local = self.tgt_category_local + self.resolve.rhs_cube_dim_coverage = self.tgt_dim_coverage + self.resolve.rhs_cube_aux_coverage = self.tgt_aux_coverage + self._check() + + +class Test__prepare_factory_payload(tests.IrisTest): + def setUp(self): + self.Cube = namedtuple("Cube", ["aux_factories"]) + self.Coord = namedtuple("Coord", ["metadata"]) + self.Factory_T1 = namedtuple( + "Factory_T1", ["dependencies"] + ) # dummy factory type + self.container_T1 = type(self.Factory_T1(None)) + self.Factory_T2 = namedtuple( + "Factory_T2", ["dependencies"] + ) # dummy factory type + self.container_T2 = type(self.Factory_T2(None)) + self.resolve = Resolve() + self.resolve.map_rhs_to_lhs = True + self.resolve.prepared_factories = [] + self.m_get_prepared_item = self.patch( + "iris.common.resolve.Resolve._get_prepared_item" + ) + self.category_local = sentinel.category_local + self.from_src = sentinel.from_src + + def test_no_factory(self): + cube = self.Cube(aux_factories=[]) + self.resolve._prepare_factory_payload(cube, self.category_local) + self.assertEqual(0, len(self.resolve.prepared_factories)) + + def test_skip_factory__already_prepared(self): + aux_factory = self.Factory_T1(dependencies=None) + aux_factories = [aux_factory] + cube = self.Cube(aux_factories=aux_factories) + prepared_factories = [ + _PreparedFactory(container=self.container_T1, dependencies=None), + _PreparedFactory(container=self.container_T2, dependencies=None), + ] + self.resolve.prepared_factories.extend(prepared_factories) + self.resolve._prepare_factory_payload(cube, self.category_local) + self.assertEqual(prepared_factories, self.resolve.prepared_factories) + + def test_factory__dependency_already_prepared(self): + coord_a = self.Coord(metadata=sentinel.coord_a_metadata) + coord_b = self.Coord(metadata=sentinel.coord_b_metadata) + coord_c = self.Coord(metadata=sentinel.coord_c_metadata) + side_effect = (coord_a, coord_b, coord_c) + self.m_get_prepared_item.side_effect = side_effect + dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) + aux_factory = self.Factory_T1(dependencies=dependencies) + aux_factories = [aux_factory] + cube = self.Cube(aux_factories=aux_factories) + self.resolve._prepare_factory_payload( + cube, self.category_local, from_src=self.from_src + ) + self.assertEqual(1, len(self.resolve.prepared_factories)) + prepared_dependencies = { + name: coord.metadata for name, coord in dependencies.items() + } + expected = [ + _PreparedFactory( + container=self.container_T1, dependencies=prepared_dependencies + ) + ] + self.assertEqual(expected, self.resolve.prepared_factories) + self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) + expected = [ + mock.call( + coord_a.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_b.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_c.metadata, self.category_local, from_src=self.from_src + ), + ] + actual = self.m_get_prepared_item.call_args_list + for call in expected: + self.assertIn(call, actual) + + def test_factory__dependency_local_not_prepared(self): + coord_a = self.Coord(metadata=sentinel.coord_a_metadata) + coord_b = self.Coord(metadata=sentinel.coord_b_metadata) + coord_c = self.Coord(metadata=sentinel.coord_c_metadata) + side_effect = (None, coord_a, None, coord_b, None, coord_c) + self.m_get_prepared_item.side_effect = side_effect + dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) + aux_factory = self.Factory_T1(dependencies=dependencies) + aux_factories = [aux_factory] + cube = self.Cube(aux_factories=aux_factories) + self.resolve._prepare_factory_payload( + cube, self.category_local, from_src=self.from_src + ) + self.assertEqual(1, len(self.resolve.prepared_factories)) + prepared_dependencies = { + name: coord.metadata for name, coord in dependencies.items() + } + expected = [ + _PreparedFactory( + container=self.container_T1, dependencies=prepared_dependencies + ) + ] + self.assertEqual(expected, self.resolve.prepared_factories) + self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) + expected = [ + mock.call( + coord_a.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_b.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_c.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_a.metadata, + self.category_local, + from_src=self.from_src, + from_local=True, + ), + mock.call( + coord_b.metadata, + self.category_local, + from_src=self.from_src, + from_local=True, + ), + mock.call( + coord_c.metadata, + self.category_local, + from_src=self.from_src, + from_local=True, + ), + ] + actual = self.m_get_prepared_item.call_args_list + for call in expected: + self.assertIn(call, actual) + + def test_factory__dependency_not_found(self): + coord_a = self.Coord(metadata=sentinel.coord_a_metadata) + coord_b = self.Coord(metadata=sentinel.coord_b_metadata) + coord_c = self.Coord(metadata=sentinel.coord_c_metadata) + side_effect = (None, None) + self.m_get_prepared_item.side_effect = side_effect + dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) + aux_factory = self.Factory_T1(dependencies=dependencies) + aux_factories = [aux_factory] + cube = self.Cube(aux_factories=aux_factories) + self.resolve._prepare_factory_payload( + cube, self.category_local, from_src=self.from_src + ) + self.assertEqual(0, len(self.resolve.prepared_factories)) + self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) + expected = [ + mock.call( + coord_a.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_b.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_c.metadata, self.category_local, from_src=self.from_src + ), + mock.call( + coord_a.metadata, + self.category_local, + from_src=self.from_src, + from_local=True, + ), + mock.call( + coord_b.metadata, + self.category_local, + from_src=self.from_src, + from_local=True, + ), + mock.call( + coord_c.metadata, + self.category_local, + from_src=self.from_src, + from_local=True, + ), + ] + actual = self.m_get_prepared_item.call_args_list + for call in actual: + self.assertIn(call, expected) + + +class Test__get_prepared_item(tests.IrisTest): + def setUp(self): + PreparedItem = namedtuple("PreparedItem", ["metadata"]) + self.resolve = Resolve() + self.prepared_dim_metadata_src = sentinel.prepared_dim_metadata_src + self.prepared_dim_metadata_tgt = sentinel.prepared_dim_metadata_tgt + self.prepared_items_dim = PreparedItem( + metadata=_PreparedMetadata( + combined=None, + src=self.prepared_dim_metadata_src, + tgt=self.prepared_dim_metadata_tgt, + ) + ) + self.prepared_aux_metadata_src = sentinel.prepared_aux_metadata_src + self.prepared_aux_metadata_tgt = sentinel.prepared_aux_metadata_tgt + self.prepared_items_aux = PreparedItem( + metadata=_PreparedMetadata( + combined=None, + src=self.prepared_aux_metadata_src, + tgt=self.prepared_aux_metadata_tgt, + ) + ) + self.prepared_scalar_metadata_src = ( + sentinel.prepared_scalar_metadata_src + ) + self.prepared_scalar_metadata_tgt = ( + sentinel.prepared_scalar_metadata_tgt + ) + self.prepared_items_scalar = PreparedItem( + metadata=_PreparedMetadata( + combined=None, + src=self.prepared_scalar_metadata_src, + tgt=self.prepared_scalar_metadata_tgt, + ) + ) + self.resolve.prepared_category = _CategoryItems( + items_dim=[self.prepared_items_dim], + items_aux=[self.prepared_items_aux], + items_scalar=[self.prepared_items_scalar], + ) + self.resolve.mapping = {0: 10} + self.m_create_prepared_item = self.patch( + "iris.common.resolve.Resolve._create_prepared_item" + ) + self.local_dim_metadata = sentinel.local_dim_metadata + self.local_aux_metadata = sentinel.local_aux_metadata + self.local_scalar_metadata = sentinel.local_scalar_metadata + self.local_coord = sentinel.local_coord + self.local_coord_dims = (0,) + self.local_items_dim = _Item( + metadata=self.local_dim_metadata, + coord=self.local_coord, + dims=self.local_coord_dims, + ) + self.local_items_aux = _Item( + metadata=self.local_aux_metadata, + coord=self.local_coord, + dims=self.local_coord_dims, + ) + self.local_items_scalar = _Item( + metadata=self.local_scalar_metadata, + coord=self.local_coord, + dims=self.local_coord_dims, + ) + self.category_local = _CategoryItems( + items_dim=[self.local_items_dim], + items_aux=[self.local_items_aux], + items_scalar=[self.local_items_scalar], + ) + + def test_missing_prepared_coord__from_src(self): + metadata = sentinel.missing + category_local = None + result = self.resolve._get_prepared_item(metadata, category_local) + self.assertIsNone(result) + + def test_missing_prepared_coord__from_tgt(self): + metadata = sentinel.missing + category_local = None + result = self.resolve._get_prepared_item( + metadata, category_local, from_src=False + ) + self.assertIsNone(result) + + def test_get_prepared_dim_coord__from_src(self): + metadata = self.prepared_dim_metadata_src + category_local = None + result = self.resolve._get_prepared_item(metadata, category_local) + self.assertEqual(self.prepared_items_dim, result) + + def test_get_prepared_dim_coord__from_tgt(self): + metadata = self.prepared_dim_metadata_tgt + category_local = None + result = self.resolve._get_prepared_item( + metadata, category_local, from_src=False + ) + self.assertEqual(self.prepared_items_dim, result) + + def test_get_prepared_aux_coord__from_src(self): + metadata = self.prepared_aux_metadata_src + category_local = None + result = self.resolve._get_prepared_item(metadata, category_local) + self.assertEqual(self.prepared_items_aux, result) + + def test_get_prepared_aux_coord__from_tgt(self): + metadata = self.prepared_aux_metadata_tgt + category_local = None + result = self.resolve._get_prepared_item( + metadata, category_local, from_src=False + ) + self.assertEqual(self.prepared_items_aux, result) + + def test_get_prepared_scalar_coord__from_src(self): + metadata = self.prepared_scalar_metadata_src + category_local = None + result = self.resolve._get_prepared_item(metadata, category_local) + self.assertEqual(self.prepared_items_scalar, result) + + def test_get_prepared_scalar_coord__from_tgt(self): + metadata = self.prepared_scalar_metadata_tgt + category_local = None + result = self.resolve._get_prepared_item( + metadata, category_local, from_src=False + ) + self.assertEqual(self.prepared_items_scalar, result) + + def test_missing_local_coord__from_src(self): + metadata = sentinel.missing + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_local=True + ) + self.assertIsNone(result) + + def test_missing_local_coord__from_tgt(self): + metadata = sentinel.missing + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_src=False, from_local=True + ) + self.assertIsNone(result) + + def test_get_local_dim_coord__from_src(self): + created_local_item = sentinel.created_local_item + self.m_create_prepared_item.return_value = created_local_item + metadata = self.local_dim_metadata + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_local=True + ) + expected = created_local_item + self.assertEqual(expected, result) + self.assertEqual(2, len(self.resolve.prepared_category.items_dim)) + self.assertEqual(expected, self.resolve.prepared_category.items_dim[1]) + self.assertEqual(1, self.m_create_prepared_item.call_count) + dims = (self.resolve.mapping[self.local_coord_dims[0]],) + expected = [ + mock.call( + self.local_coord, + dims, + src_metadata=metadata, + tgt_metadata=None, + ) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_get_local_dim_coord__from_tgt(self): + created_local_item = sentinel.created_local_item + self.m_create_prepared_item.return_value = created_local_item + metadata = self.local_dim_metadata + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_src=False, from_local=True + ) + expected = created_local_item + self.assertEqual(expected, result) + self.assertEqual(2, len(self.resolve.prepared_category.items_dim)) + self.assertEqual(expected, self.resolve.prepared_category.items_dim[1]) + self.assertEqual(1, self.m_create_prepared_item.call_count) + dims = self.local_coord_dims + expected = [ + mock.call( + self.local_coord, + dims, + src_metadata=None, + tgt_metadata=metadata, + ) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_get_local_aux_coord__from_src(self): + created_local_item = sentinel.created_local_item + self.m_create_prepared_item.return_value = created_local_item + metadata = self.local_aux_metadata + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_local=True + ) + expected = created_local_item + self.assertEqual(expected, result) + self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) + self.assertEqual(expected, self.resolve.prepared_category.items_aux[1]) + self.assertEqual(1, self.m_create_prepared_item.call_count) + dims = (self.resolve.mapping[self.local_coord_dims[0]],) + expected = [ + mock.call( + self.local_coord, + dims, + src_metadata=metadata, + tgt_metadata=None, + ) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_get_local_aux_coord__from_tgt(self): + created_local_item = sentinel.created_local_item + self.m_create_prepared_item.return_value = created_local_item + metadata = self.local_aux_metadata + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_src=False, from_local=True + ) + expected = created_local_item + self.assertEqual(expected, result) + self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) + self.assertEqual(expected, self.resolve.prepared_category.items_aux[1]) + self.assertEqual(1, self.m_create_prepared_item.call_count) + dims = self.local_coord_dims + expected = [ + mock.call( + self.local_coord, + dims, + src_metadata=None, + tgt_metadata=metadata, + ) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_get_local_scalar_coord__from_src(self): + created_local_item = sentinel.created_local_item + self.m_create_prepared_item.return_value = created_local_item + metadata = self.local_scalar_metadata + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_local=True + ) + expected = created_local_item + self.assertEqual(expected, result) + self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) + self.assertEqual( + expected, self.resolve.prepared_category.items_scalar[1] + ) + self.assertEqual(1, self.m_create_prepared_item.call_count) + dims = (self.resolve.mapping[self.local_coord_dims[0]],) + expected = [ + mock.call( + self.local_coord, + dims, + src_metadata=metadata, + tgt_metadata=None, + ) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + def test_get_local_scalar_coord__from_tgt(self): + created_local_item = sentinel.created_local_item + self.m_create_prepared_item.return_value = created_local_item + metadata = self.local_scalar_metadata + result = self.resolve._get_prepared_item( + metadata, self.category_local, from_src=False, from_local=True + ) + expected = created_local_item + self.assertEqual(expected, result) + self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) + self.assertEqual( + expected, self.resolve.prepared_category.items_scalar[1] + ) + self.assertEqual(1, self.m_create_prepared_item.call_count) + dims = self.local_coord_dims + expected = [ + mock.call( + self.local_coord, + dims, + src_metadata=None, + tgt_metadata=metadata, + ) + ] + self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + + +class Test_cube(tests.IrisTest): + def setUp(self): + self.shape = (2, 3) + self.data = np.zeros(np.multiply(*self.shape), dtype=np.int8).reshape( + self.shape + ) + self.bad_data = np.zeros(np.multiply(*self.shape), dtype=np.int8) + self.resolve = Resolve() + self.resolve.map_rhs_to_lhs = True + self.resolve._broadcast_shape = self.shape + self.cube_metadata = CubeMetadata( + standard_name="air_temperature", + long_name="air temp", + var_name="airT", + units=Unit("K"), + attributes={}, + cell_methods=(), + ) + lhs_cube = Cube(self.data) + lhs_cube.metadata = self.cube_metadata + self.resolve.lhs_cube = lhs_cube + rhs_cube = Cube(self.data) + rhs_cube.metadata = self.cube_metadata + self.resolve.rhs_cube = rhs_cube + self.m_add_dim_coord = self.patch("iris.cube.Cube.add_dim_coord") + self.m_add_aux_coord = self.patch("iris.cube.Cube.add_aux_coord") + self.m_add_aux_factory = self.patch("iris.cube.Cube.add_aux_factory") + self.m_coord = self.patch("iris.cube.Cube.coord") + # + # prepared coordinates + # + prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # prepared dim coordinates + self.prepared_dim_0_metadata = _PreparedMetadata( + combined=sentinel.prepared_dim_0_metadata_combined, + src=None, + tgt=None, + ) + self.prepared_dim_0_points = sentinel.prepared_dim_0_points + self.prepared_dim_0_bounds = sentinel.prepared_dim_0_bounds + self.prepared_dim_0_dims = (0,) + self.prepared_dim_0_coord = mock.Mock(metadata=None) + self.prepared_dim_0_container = mock.Mock( + return_value=self.prepared_dim_0_coord + ) + self.prepared_dim_0 = _PreparedItem( + metadata=self.prepared_dim_0_metadata, + points=self.prepared_dim_0_points, + bounds=self.prepared_dim_0_bounds, + dims=self.prepared_dim_0_dims, + container=self.prepared_dim_0_container, + ) + prepared_category.items_dim.append(self.prepared_dim_0) + self.prepared_dim_1_metadata = _PreparedMetadata( + combined=sentinel.prepared_dim_1_metadata_combined, + src=None, + tgt=None, + ) + self.prepared_dim_1_points = sentinel.prepared_dim_1_points + self.prepared_dim_1_bounds = sentinel.prepared_dim_1_bounds + self.prepared_dim_1_dims = (1,) + self.prepared_dim_1_coord = mock.Mock(metadata=None) + self.prepared_dim_1_container = mock.Mock( + return_value=self.prepared_dim_1_coord + ) + self.prepared_dim_1 = _PreparedItem( + metadata=self.prepared_dim_1_metadata, + points=self.prepared_dim_1_points, + bounds=self.prepared_dim_1_bounds, + dims=self.prepared_dim_1_dims, + container=self.prepared_dim_1_container, + ) + prepared_category.items_dim.append(self.prepared_dim_1) + + # prepared auxiliary coordinates + self.prepared_aux_0_metadata = _PreparedMetadata( + combined=sentinel.prepared_aux_0_metadata_combined, + src=None, + tgt=None, + ) + self.prepared_aux_0_points = sentinel.prepared_aux_0_points + self.prepared_aux_0_bounds = sentinel.prepared_aux_0_bounds + self.prepared_aux_0_dims = (0,) + self.prepared_aux_0_coord = mock.Mock(metadata=None) + self.prepared_aux_0_container = mock.Mock( + return_value=self.prepared_aux_0_coord + ) + self.prepared_aux_0 = _PreparedItem( + metadata=self.prepared_aux_0_metadata, + points=self.prepared_aux_0_points, + bounds=self.prepared_aux_0_bounds, + dims=self.prepared_aux_0_dims, + container=self.prepared_aux_0_container, + ) + prepared_category.items_aux.append(self.prepared_aux_0) + self.prepared_aux_1_metadata = _PreparedMetadata( + combined=sentinel.prepared_aux_1_metadata_combined, + src=None, + tgt=None, + ) + self.prepared_aux_1_points = sentinel.prepared_aux_1_points + self.prepared_aux_1_bounds = sentinel.prepared_aux_1_bounds + self.prepared_aux_1_dims = (1,) + self.prepared_aux_1_coord = mock.Mock(metadata=None) + self.prepared_aux_1_container = mock.Mock( + return_value=self.prepared_aux_1_coord + ) + self.prepared_aux_1 = _PreparedItem( + metadata=self.prepared_aux_1_metadata, + points=self.prepared_aux_1_points, + bounds=self.prepared_aux_1_bounds, + dims=self.prepared_aux_1_dims, + container=self.prepared_aux_1_container, + ) + prepared_category.items_aux.append(self.prepared_aux_1) + + # prepare scalar coordinates + self.prepared_scalar_0_metadata = _PreparedMetadata( + combined=sentinel.prepared_scalar_0_metadata_combined, + src=None, + tgt=None, + ) + self.prepared_scalar_0_points = sentinel.prepared_scalar_0_points + self.prepared_scalar_0_bounds = sentinel.prepared_scalar_0_bounds + self.prepared_scalar_0_dims = () + self.prepared_scalar_0_coord = mock.Mock(metadata=None) + self.prepared_scalar_0_container = mock.Mock( + return_value=self.prepared_scalar_0_coord + ) + self.prepared_scalar_0 = _PreparedItem( + metadata=self.prepared_scalar_0_metadata, + points=self.prepared_scalar_0_points, + bounds=self.prepared_scalar_0_bounds, + dims=self.prepared_scalar_0_dims, + container=self.prepared_scalar_0_container, + ) + prepared_category.items_scalar.append(self.prepared_scalar_0) + self.prepared_scalar_1_metadata = _PreparedMetadata( + combined=sentinel.prepared_scalar_1_metadata_combined, + src=None, + tgt=None, + ) + self.prepared_scalar_1_points = sentinel.prepared_scalar_1_points + self.prepared_scalar_1_bounds = sentinel.prepared_scalar_1_bounds + self.prepared_scalar_1_dims = () + self.prepared_scalar_1_coord = mock.Mock(metadata=None) + self.prepared_scalar_1_container = mock.Mock( + return_value=self.prepared_scalar_1_coord + ) + self.prepared_scalar_1 = _PreparedItem( + metadata=self.prepared_scalar_1_metadata, + points=self.prepared_scalar_1_points, + bounds=self.prepared_scalar_1_bounds, + dims=self.prepared_scalar_1_dims, + container=self.prepared_scalar_1_container, + ) + prepared_category.items_scalar.append(self.prepared_scalar_1) + # + # prepared factories + # + prepared_factories = [] + self.aux_factory = sentinel.aux_factory + self.prepared_factory_container = mock.Mock( + return_value=self.aux_factory + ) + self.prepared_factory_metadata_a = _PreparedMetadata( + combined=sentinel.prepared_factory_metadata_a_combined, + src=None, + tgt=None, + ) + self.prepared_factory_metadata_b = _PreparedMetadata( + combined=sentinel.prepared_factory_metadata_b_combined, + src=None, + tgt=None, + ) + self.prepared_factory_metadata_c = _PreparedMetadata( + combined=sentinel.prepared_factory_metadata_c_combined, + src=None, + tgt=None, + ) + self.prepared_factory_dependencies = dict( + name_a=self.prepared_factory_metadata_a, + name_b=self.prepared_factory_metadata_b, + name_c=self.prepared_factory_metadata_c, + ) + self.prepared_factory = _PreparedFactory( + container=self.prepared_factory_container, + dependencies=self.prepared_factory_dependencies, + ) + prepared_factories.append(self.prepared_factory) + self.prepared_factory_side_effect = ( + sentinel.prepared_factory_coord_a, + sentinel.prepared_factory_coord_b, + sentinel.prepared_factory_coord_c, + ) + self.m_coord.side_effect = self.prepared_factory_side_effect + self.resolve.prepared_category = prepared_category + self.resolve.prepared_factories = prepared_factories + + def test_no_resolved_shape(self): + self.resolve._broadcast_shape = None + data = None + emsg = "Cannot resolve resultant cube, as no candidate cubes have been provided" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve.cube(data) + + def test_bad_data_shape(self): + emsg = "Cannot resolve resultant cube, as the provided data must have shape" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve.cube(self.bad_data) + + def test_bad_data_shape__inplace(self): + self.resolve.lhs_cube = Cube(self.bad_data) + emsg = "Cannot resolve resultant cube in-place" + with self.assertRaisesRegex(ValueError, emsg): + _ = self.resolve.cube(self.data, in_place=True) + + def _check(self): + # check dim coordinate 0 + self.assertEqual(1, self.prepared_dim_0.container.call_count) + expected = [ + mock.call( + self.prepared_dim_0_points, bounds=self.prepared_dim_0_bounds + ) + ] + self.assertEqual( + expected, self.prepared_dim_0.container.call_args_list + ) + self.assertEqual( + self.prepared_dim_0_coord.metadata, + self.prepared_dim_0_metadata.combined, + ) + # check dim coordinate 1 + self.assertEqual(1, self.prepared_dim_1.container.call_count) + expected = [ + mock.call( + self.prepared_dim_1_points, bounds=self.prepared_dim_1_bounds + ) + ] + self.assertEqual( + expected, self.prepared_dim_1.container.call_args_list + ) + self.assertEqual( + self.prepared_dim_1_coord.metadata, + self.prepared_dim_1_metadata.combined, + ) + # check add_dim_coord + self.assertEqual(2, self.m_add_dim_coord.call_count) + expected = [ + mock.call(self.prepared_dim_0_coord, self.prepared_dim_0_dims), + mock.call(self.prepared_dim_1_coord, self.prepared_dim_1_dims), + ] + self.assertEqual(expected, self.m_add_dim_coord.call_args_list) + + # check aux coordinate 0 + self.assertEqual(1, self.prepared_aux_0.container.call_count) + expected = [ + mock.call( + self.prepared_aux_0_points, bounds=self.prepared_aux_0_bounds + ) + ] + self.assertEqual( + expected, self.prepared_aux_0.container.call_args_list + ) + self.assertEqual( + self.prepared_aux_0_coord.metadata, + self.prepared_aux_0_metadata.combined, + ) + # check aux coordinate 1 + self.assertEqual(1, self.prepared_aux_1.container.call_count) + expected = [ + mock.call( + self.prepared_aux_1_points, bounds=self.prepared_aux_1_bounds + ) + ] + self.assertEqual( + expected, self.prepared_aux_1.container.call_args_list + ) + self.assertEqual( + self.prepared_aux_1_coord.metadata, + self.prepared_aux_1_metadata.combined, + ) + # check scalar coordinate 0 + self.assertEqual(1, self.prepared_scalar_0.container.call_count) + expected = [ + mock.call( + self.prepared_scalar_0_points, + bounds=self.prepared_scalar_0_bounds, + ) + ] + self.assertEqual( + expected, self.prepared_scalar_0.container.call_args_list + ) + self.assertEqual( + self.prepared_scalar_0_coord.metadata, + self.prepared_scalar_0_metadata.combined, + ) + # check scalar coordinate 1 + self.assertEqual(1, self.prepared_scalar_1.container.call_count) + expected = [ + mock.call( + self.prepared_scalar_1_points, + bounds=self.prepared_scalar_1_bounds, + ) + ] + self.assertEqual( + expected, self.prepared_scalar_1.container.call_args_list + ) + self.assertEqual( + self.prepared_scalar_1_coord.metadata, + self.prepared_scalar_1_metadata.combined, + ) + # check add_aux_coord + self.assertEqual(4, self.m_add_aux_coord.call_count) + expected = [ + mock.call(self.prepared_aux_0_coord, self.prepared_aux_0_dims), + mock.call(self.prepared_aux_1_coord, self.prepared_aux_1_dims), + mock.call( + self.prepared_scalar_0_coord, self.prepared_scalar_0_dims + ), + mock.call( + self.prepared_scalar_1_coord, self.prepared_scalar_1_dims + ), + ] + self.assertEqual(expected, self.m_add_aux_coord.call_args_list) + + # check auxiliary factories + self.assertEqual(1, self.m_add_aux_factory.call_count) + expected = [mock.call(self.aux_factory)] + self.assertEqual(expected, self.m_add_aux_factory.call_args_list) + self.assertEqual(1, self.prepared_factory_container.call_count) + expected = [ + mock.call( + **{ + name: value + for name, value in zip( + sorted(self.prepared_factory_dependencies.keys()), + self.prepared_factory_side_effect, + ) + } + ) + ] + self.assertEqual( + expected, self.prepared_factory_container.call_args_list + ) + self.assertEqual(3, self.m_coord.call_count) + expected = [ + mock.call(self.prepared_factory_metadata_a.combined), + mock.call(self.prepared_factory_metadata_b.combined), + mock.call(self.prepared_factory_metadata_c.combined), + ] + self.assertEqual(expected, self.m_coord.call_args_list) + + def test_resolve(self): + result = self.resolve.cube(self.data) + self.assertEqual(self.cube_metadata, result.metadata) + self._check() + self.assertIsNot(self.resolve.lhs_cube, result) + + def test_resolve__inplace(self): + result = self.resolve.cube(self.data, in_place=True) + self.assertEqual(self.cube_metadata, result.metadata) + self._check() + self.assertIs(self.resolve.lhs_cube, result) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 01dfe365b4f..ded401cab31 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -336,6 +336,108 @@ def test_non_lazy_aggregator(self): self.assertArrayEqual(result.data, np.mean(self.data, axis=1)) +class Test_collapsed__multidim_weighted(tests.IrisTest): + def setUp(self): + self.data = np.arange(6.0).reshape((2, 3)) + self.lazydata = as_lazy_data(self.data) + # Test cubes wth (same-valued) real and lazy data + cube_real = Cube(self.data) + for i_dim, name in enumerate(("y", "x")): + npts = cube_real.shape[i_dim] + coord = DimCoord(np.arange(npts), long_name=name) + cube_real.add_dim_coord(coord, i_dim) + self.cube_real = cube_real + self.cube_lazy = cube_real.copy(data=self.lazydata) + # Test weights and expected result for a y-collapse + self.y_weights = np.array([0.3, 0.5]) + self.full_weights_y = np.broadcast_to( + self.y_weights.reshape((2, 1)), cube_real.shape + ) + self.expected_result_y = np.array([1.875, 2.875, 3.875]) + # Test weights and expected result for an x-collapse + self.x_weights = np.array([0.7, 0.4, 0.6]) + self.full_weights_x = np.broadcast_to( + self.x_weights.reshape((1, 3)), cube_real.shape + ) + self.expected_result_x = np.array([0.941176, 3.941176]) + + def test_weighted_fullweights_real_y(self): + # Supplying full-shape weights for collapsing over a single dimension. + cube_collapsed = self.cube_real.collapsed( + "y", MEAN, weights=self.full_weights_y + ) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_y + ) + + def test_weighted_fullweights_lazy_y(self): + # Full-shape weights, lazy data : Check lazy result, same values as real calc. + cube_collapsed = self.cube_lazy.collapsed( + "y", MEAN, weights=self.full_weights_y + ) + self.assertTrue(cube_collapsed.has_lazy_data()) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_y + ) + + def test_weighted_1dweights_real_y(self): + # 1-D weights, real data : Check same results as full-shape. + cube_collapsed = self.cube_real.collapsed( + "y", MEAN, weights=self.y_weights + ) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_y + ) + + def test_weighted_1dweights_lazy_y(self): + # 1-D weights, lazy data : Check lazy result, same values as real calc. + cube_collapsed = self.cube_lazy.collapsed( + "y", MEAN, weights=self.y_weights + ) + self.assertTrue(cube_collapsed.has_lazy_data()) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_y + ) + + def test_weighted_fullweights_real_x(self): + # Full weights, real data, ** collapse X ** : as for 'y' case above + cube_collapsed = self.cube_real.collapsed( + "x", MEAN, weights=self.full_weights_x + ) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_x + ) + + def test_weighted_fullweights_lazy_x(self): + # Full weights, lazy data, ** collapse X ** : as for 'y' case above + cube_collapsed = self.cube_lazy.collapsed( + "x", MEAN, weights=self.full_weights_x + ) + self.assertTrue(cube_collapsed.has_lazy_data()) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_x + ) + + def test_weighted_1dweights_real_x(self): + # 1-D weights, real data, ** collapse X ** : as for 'y' case above + cube_collapsed = self.cube_real.collapsed( + "x", MEAN, weights=self.x_weights + ) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_x + ) + + def test_weighted_1dweights_lazy_x(self): + # 1-D weights, lazy data, ** collapse X ** : as for 'y' case above + cube_collapsed = self.cube_lazy.collapsed( + "x", MEAN, weights=self.x_weights + ) + self.assertTrue(cube_collapsed.has_lazy_data()) + self.assertArrayAlmostEqual( + cube_collapsed.data, self.expected_result_x + ) + + class Test_collapsed__cellmeasure_ancils(tests.IrisTest): def setUp(self): cube = Cube(np.arange(6.0).reshape((2, 3))) @@ -484,6 +586,16 @@ def test_ancillary_variable(self): ) self.assertEqual(cube.summary(), expected_summary) + def test_similar_coords(self): + coord1 = AuxCoord( + 42, long_name="foo", attributes=dict(bar=np.array([2, 5])) + ) + coord2 = coord1.copy() + coord2.attributes = dict(bar="baz") + for coord in [coord1, coord2]: + self.cube.add_aux_coord(coord) + self.assertIn("baz", self.cube.summary()) + class Test_is_compatible(tests.IrisTest): def setUp(self): @@ -2044,6 +2156,14 @@ def test_remove_ancilliary_variable(self): ) self.assertEqual(self.cube._ancillary_variables_and_dims, []) + def test_remove_ancilliary_variable_by_name(self): + self.cube.remove_ancillary_variable("Quality of Detection") + self.assertEqual(self.cube._ancillary_variables_and_dims, []) + + def test_fail_remove_ancilliary_variable_by_name(self): + with self.assertRaises(AncillaryVariableNotFoundError): + self.cube.remove_ancillary_variable("notname") + class Test__getitem_CellMeasure(tests.IrisTest): def setUp(self): @@ -2146,6 +2266,16 @@ def test_fail_ancill_variable_dims(self): with self.assertRaises(AncillaryVariableNotFoundError): self.cube.ancillary_variable_dims(ancillary_variable) + def test_ancillary_variable_dims_by_name(self): + ancill_var_dims = self.cube.ancillary_variable_dims( + "number_of_observations" + ) + self.assertEqual(ancill_var_dims, (0, 1)) + + def test_fail_ancillary_variable_dims_by_name(self): + with self.assertRaises(AncillaryVariableNotFoundError): + self.cube.ancillary_variable_dims("notname") + class TestCellMeasures(tests.IrisTest): def setUp(self): @@ -2194,6 +2324,14 @@ def test_fail_cell_measure_dims(self): with self.assertRaises(CellMeasureNotFoundError): _ = self.cube.cell_measure_dims(a_cell_measure) + def test_cell_measure_dims_by_name(self): + cm_dims = self.cube.cell_measure_dims("area") + self.assertEqual(cm_dims, (0, 1)) + + def test_fail_cell_measure_dims_by_name(self): + with self.assertRaises(CellMeasureNotFoundError): + self.cube.cell_measure_dims("notname") + class Test_transpose(tests.IrisTest): def setUp(self): diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py index 48cc9c0d1a7..c8f9460e0f3 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py @@ -53,8 +53,44 @@ def test_formula_terms_ap(self): self.assertEqual(factory.surface_air_pressure, self.ps) def test_formula_terms_a_p0(self): - coord_a = DimCoord(np.arange(5), units="Pa") - coord_p0 = DimCoord(10, units="1") + coord_a = DimCoord(np.arange(5), units="1") + coord_p0 = DimCoord(10, units="Pa") + coord_expected = DimCoord( + np.arange(5) * 10, + units="Pa", + long_name="vertical pressure", + var_name="ap", + ) + self.cube_parts["coordinates"].extend( + [(coord_a, "a"), (coord_p0, "p0")] + ) + self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") + _load_aux_factory(self.engine, self.cube) + # Check cube.coord_dims method. + self.assertEqual(self.cube.coord_dims.call_count, 1) + args, _ = self.cube.coord_dims.call_args + self.assertEqual(len(args), 1) + self.assertIs(args[0], coord_a) + # Check cube.add_aux_coord method. + self.assertEqual(self.cube.add_aux_coord.call_count, 1) + args, _ = self.cube.add_aux_coord.call_args + self.assertEqual(len(args), 2) + self.assertEqual(args[0], coord_expected) + self.assertIsInstance(args[1], mock.Mock) + # Check cube.add_aux_factory method. + self.assertEqual(self.cube.add_aux_factory.call_count, 1) + args, _ = self.cube.add_aux_factory.call_args + self.assertEqual(len(args), 1) + factory = args[0] + self.assertEqual(factory.delta, coord_expected) + self.assertEqual(factory.sigma, mock.sentinel.b) + self.assertEqual(factory.surface_air_pressure, self.ps) + + def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless( + self, + ): + coord_a = DimCoord(np.arange(5), units="unknown") + coord_p0 = DimCoord(10, units="Pa") coord_expected = DimCoord( np.arange(5) * 10, units="Pa", @@ -71,6 +107,7 @@ def test_formula_terms_a_p0(self): args, _ = self.cube.coord_dims.call_args self.assertEqual(len(args), 1) self.assertIs(args[0], coord_a) + self.assertEqual("1", args[0].units) # Check cube.add_aux_coord method. self.assertEqual(self.cube.add_aux_coord.call_count, 1) args, _ = self.cube.add_aux_coord.call_args diff --git a/lib/iris/tests/unit/quickplot/test_plot.py b/lib/iris/tests/unit/quickplot/test_plot.py index 0a88107a6fa..9bc4a7dca3c 100644 --- a/lib/iris/tests/unit/quickplot/test_plot.py +++ b/lib/iris/tests/unit/quickplot/test_plot.py @@ -8,6 +8,7 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests +from iris.tests.stock import simple_1d from iris.tests.unit.plot import TestGraphicStringCoord if tests.MPL_AVAILABLE: @@ -29,5 +30,29 @@ def test_xaxis_labels(self): self.assertBoundsTickLabels("xaxis") +class TestAxisLabels(tests.GraphicsTest): + def test_xy_cube(self): + c = simple_1d() + qplt.plot(c) + ax = qplt.plt.gca() + x = ax.xaxis.get_label().get_text() + self.assertEqual(x, "Foo") + y = ax.yaxis.get_label().get_text() + self.assertEqual(y, "Thingness") + + def test_yx_cube(self): + c = simple_1d() + c.transpose() + # Making the cube a vertical coordinate should change the default + # orientation of the plot. + c.coord("foo").attributes["positive"] = "up" + qplt.plot(c) + ax = qplt.plt.gca() + x = ax.xaxis.get_label().get_text() + self.assertEqual(x, "Thingness") + y = ax.yaxis.get_label().get_text() + self.assertEqual(y, "Foo") + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/representation/__init__.py b/lib/iris/tests/unit/representation/__init__.py new file mode 100644 index 00000000000..e943ad149b7 --- /dev/null +++ b/lib/iris/tests/unit/representation/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :mod:`iris._representation` module.""" diff --git a/lib/iris/tests/unit/representation/test_representation.py b/lib/iris/tests/unit/representation/test_representation.py new file mode 100644 index 00000000000..212f454e707 --- /dev/null +++ b/lib/iris/tests/unit/representation/test_representation.py @@ -0,0 +1,187 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :mod:`iris._representation` module.""" + +import numpy as np +import iris.tests as tests +import iris._representation +from iris.cube import Cube +from iris.coords import ( + DimCoord, + AuxCoord, + CellMeasure, + AncillaryVariable, + CellMethod, +) + + +def example_cube(): + cube = Cube( + np.arange(6).reshape([3, 2]), + standard_name="air_temperature", + long_name="screen_air_temp", + var_name="airtemp", + units="K", + ) + lat = DimCoord([0, 1, 2], standard_name="latitude", units="degrees") + cube.add_dim_coord(lat, 0) + return cube + + +class Test_CubeSummary(tests.IrisTest): + def setUp(self): + self.cube = example_cube() + + def test_header(self): + rep = iris._representation.CubeSummary(self.cube) + header_left = rep.header.nameunit + header_right = rep.header.dimension_header.contents + + self.assertEqual(header_left, "air_temperature / (K)") + self.assertEqual(header_right, ["latitude: 3", "-- : 2"]) + + def test_blank_cube(self): + cube = Cube([1, 2]) + rep = iris._representation.CubeSummary(cube) + + self.assertEqual(rep.header.nameunit, "unknown / (unknown)") + self.assertEqual(rep.header.dimension_header.contents, ["-- : 2"]) + + expected_vector_sections = [ + "Dimension coordinates:", + "Auxiliary coordinates:", + "Derived coordinates:", + "Cell Measures:", + "Ancillary Variables:", + ] + self.assertEqual( + list(rep.vector_sections.keys()), expected_vector_sections + ) + for title in expected_vector_sections: + vector_section = rep.vector_sections[title] + self.assertEqual(vector_section.contents, []) + self.assertTrue(vector_section.is_empty()) + + expected_scalar_sections = [ + "Scalar Coordinates:", + "Scalar cell measures:", + "Attributes:", + "Cell methods:", + ] + + self.assertEqual( + list(rep.scalar_sections.keys()), expected_scalar_sections + ) + for title in expected_scalar_sections: + scalar_section = rep.scalar_sections[title] + self.assertEqual(scalar_section.contents, []) + self.assertTrue(scalar_section.is_empty()) + + def test_vector_coord(self): + rep = iris._representation.CubeSummary(self.cube) + dim_section = rep.vector_sections["Dimension coordinates:"] + + self.assertEqual(len(dim_section.contents), 1) + self.assertFalse(dim_section.is_empty()) + + dim_summary = dim_section.contents[0] + + name = dim_summary.name + dim_chars = dim_summary.dim_chars + extra = dim_summary.extra + + self.assertEqual(name, "latitude") + self.assertEqual(dim_chars, ["x", "-"]) + self.assertEqual(extra, "") + + def test_scalar_coord(self): + cube = self.cube + scalar_coord_no_bounds = AuxCoord([10], long_name="bar", units="K") + scalar_coord_with_bounds = AuxCoord( + [10], long_name="foo", units="K", bounds=[(5, 15)] + ) + scalar_coord_text = AuxCoord( + ["a\nb\nc"], long_name="foo", attributes={"key": "value"} + ) + cube.add_aux_coord(scalar_coord_no_bounds) + cube.add_aux_coord(scalar_coord_with_bounds) + cube.add_aux_coord(scalar_coord_text) + rep = iris._representation.CubeSummary(cube) + + scalar_section = rep.scalar_sections["Scalar Coordinates:"] + + self.assertEqual(len(scalar_section.contents), 3) + + no_bounds_summary = scalar_section.contents[0] + bounds_summary = scalar_section.contents[1] + text_summary = scalar_section.contents[2] + + self.assertEqual(no_bounds_summary.name, "bar") + self.assertEqual(no_bounds_summary.content, "10 K") + self.assertEqual(no_bounds_summary.extra, "") + + self.assertEqual(bounds_summary.name, "foo") + self.assertEqual(bounds_summary.content, "10 K, bound=(5, 15) K") + self.assertEqual(bounds_summary.extra, "") + + self.assertEqual(text_summary.name, "foo") + self.assertEqual(text_summary.content, "a\nb\nc") + self.assertEqual(text_summary.extra, "key='value'") + + def test_cell_measure(self): + cube = self.cube + cell_measure = CellMeasure([1, 2, 3], long_name="foo") + cube.add_cell_measure(cell_measure, 0) + rep = iris._representation.CubeSummary(cube) + + cm_section = rep.vector_sections["Cell Measures:"] + self.assertEqual(len(cm_section.contents), 1) + + cm_summary = cm_section.contents[0] + self.assertEqual(cm_summary.name, "foo") + self.assertEqual(cm_summary.dim_chars, ["x", "-"]) + + def test_ancillary_variable(self): + cube = self.cube + cell_measure = AncillaryVariable([1, 2, 3], long_name="foo") + cube.add_ancillary_variable(cell_measure, 0) + rep = iris._representation.CubeSummary(cube) + + av_section = rep.vector_sections["Ancillary Variables:"] + self.assertEqual(len(av_section.contents), 1) + + av_summary = av_section.contents[0] + self.assertEqual(av_summary.name, "foo") + self.assertEqual(av_summary.dim_chars, ["x", "-"]) + + def test_attributes(self): + cube = self.cube + cube.attributes = {"a": 1, "b": "two"} + rep = iris._representation.CubeSummary(cube) + + attribute_section = rep.scalar_sections["Attributes:"] + attribute_contents = attribute_section.contents + expected_contents = ["a: 1", "b: two"] + + self.assertEqual(attribute_contents, expected_contents) + + def test_cell_methods(self): + cube = self.cube + x = AuxCoord(1, long_name="x") + y = AuxCoord(1, long_name="y") + cell_method_xy = CellMethod("mean", [x, y]) + cell_method_x = CellMethod("mean", x) + cube.add_cell_method(cell_method_xy) + cube.add_cell_method(cell_method_x) + + rep = iris._representation.CubeSummary(cube) + cell_method_section = rep.scalar_sections["Cell methods:"] + expected_contents = ["mean: x, y", "mean: x"] + self.assertEqual(cell_method_section.contents, expected_contents) + + +if __name__ == "__main__": + tests.main() diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 00000000000..b6f9480290e --- /dev/null +++ b/noxfile.py @@ -0,0 +1,276 @@ +""" +Perform test automation with nox. + +For further details, see https://nox.thea.codes/en/stable/# + +""" + +import hashlib +import os +from pathlib import Path + +import nox + + +#: Default to reusing any pre-existing nox environments. +nox.options.reuse_existing_virtualenvs = True + +#: Name of the package to test. +PACKAGE = str("lib" / Path("iris")) + +#: Cirrus-CI environment variable hook. +PY_VER = os.environ.get("PY_VER", ["3.6", "3.7"]) + +#: Default cartopy cache directory. +CARTOPY_CACHE_DIR = os.environ.get("HOME") / Path(".local/share/cartopy") + + +def venv_cached(session): + """ + Determine whether the nox session environment has been cached. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + Returns + ------- + bool + Whether the session has been cached. + + """ + result = False + yml = Path(f"requirements/ci/py{session.python.replace('.', '')}.yml") + tmp_dir = Path(session.create_tmp()) + cache = tmp_dir / yml.name + if cache.is_file(): + with open(yml, "rb") as fi: + expected = hashlib.sha256(fi.read()).hexdigest() + with open(cache, "r") as fi: + actual = fi.read() + result = actual == expected + return result + + +def cache_venv(session): + """ + Cache the nox session environment. + + This consists of saving a hexdigest (sha256) of the associated + conda requirements YAML file. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + yml = Path(f"requirements/ci/py{session.python.replace('.', '')}.yml") + with open(yml, "rb") as fi: + hexdigest = hashlib.sha256(fi.read()).hexdigest() + tmp_dir = Path(session.create_tmp()) + cache = tmp_dir / yml.name + with open(cache, "w") as fo: + fo.write(hexdigest) + + +def cache_cartopy(session): + """ + Determine whether to cache the cartopy natural earth shapefiles. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + if not CARTOPY_CACHE_DIR.is_dir(): + session.run( + "python", + "-c", + "import cartopy; cartopy.io.shapereader.natural_earth()", + ) + + +def prepare_venv(session): + """ + Create and cache the nox session conda environment, and additionally + provide conda environment package details and info. + + Note that, iris is installed into the environment using pip. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + Notes + ----- + See + - https://github.com/theacodes/nox/issues/346 + - https://github.com/theacodes/nox/issues/260 + + """ + if not venv_cached(session): + # Determine the conda requirements yaml file. + fname = f"requirements/ci/py{session.python.replace('.', '')}.yml" + # Back-door approach to force nox to use "conda env update". + command = ( + "conda", + "env", + "update", + f"--prefix={session.virtualenv.location}", + f"--file={fname}", + "--prune", + ) + session._run(*command, silent=True, external="error") + cache_venv(session) + + cache_cartopy(session) + session.install("--no-deps", "--editable", ".") + + # Determine whether verbose diagnostics have been requested + # from the command line. + verbose = "-v" in session.posargs or "--verbose" in session.posargs + + if verbose: + session.run("conda", "info") + session.run("conda", "list", f"--prefix={session.virtualenv.location}") + session.run( + "conda", + "list", + f"--prefix={session.virtualenv.location}", + "--explicit", + ) + + +@nox.session +def flake8(session): + """ + Perform flake8 linting of iris. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + # Pip install the session requirements. + session.install("flake8") + # Execute the flake8 linter on the package. + session.run("flake8", PACKAGE) + # Execute the flake8 linter on this file. + session.run("flake8", __file__) + + +@nox.session +def black(session): + """ + Perform black format checking of iris. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + # Pip install the session requirements. + session.install("black==20.8b1") + # Execute the black format checker on the package. + session.run("black", "--check", PACKAGE) + # Execute the black format checker on this file. + session.run("black", "--check", __file__) + + +@nox.session(python=PY_VER, venv_backend="conda") +def tests(session): + """ + Perform iris system, integration and unit tests. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.run( + "python", + "-m", + "iris.tests.runner", + "--default-tests", + "--system-tests", + ) + + +@nox.session(python=PY_VER, venv_backend="conda") +def gallery(session): + """ + Perform iris gallery doc-tests. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.run( + "python", + "-m", + "iris.tests.runner", + "--gallery-tests", + ) + + +@nox.session(python=PY_VER, venv_backend="conda") +def doctest(session): + """ + Perform iris doc-tests. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.cd("docs") + session.run( + "make", + "clean", + "html", + external=True, + ) + session.run( + "make", + "doctest", + external=True, + ) + + +@nox.session(python=PY_VER, venv_backend="conda") +def linkcheck(session): + """ + Perform iris doc link check. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.cd("docs") + session.run( + "make", + "clean", + "html", + external=True, + ) + session.run( + "make", + "linkcheck", + external=True, + ) diff --git a/requirements/ci/py36.yml b/requirements/ci/py36.yml index 8cc6ab2308f..4d9d25d7c61 100644 --- a/requirements/ci/py36.yml +++ b/requirements/ci/py36.yml @@ -13,7 +13,7 @@ dependencies: # Core dependencies. - cartopy>=0.18 - cf-units>=2 - - cftime + - cftime<1.3.0 - dask>=2 - matplotlib - netcdf4 @@ -35,6 +35,7 @@ dependencies: - asv - black=20.8b1 - filelock + - flake8 - imagehash>=4.0 - nose - pillow<7 @@ -43,12 +44,8 @@ dependencies: # Documentation dependencies. - sphinx + - sphinxcontrib-napoleon - sphinx-copybutton - sphinx-gallery + - sphinx-panels - sphinx_rtd_theme - - pip - - pip: - - sphinxcontrib-napoleon - - sphinx-panels - - sphinxcontrib-spelling - - pyenchant diff --git a/requirements/ci/py37.yml b/requirements/ci/py37.yml index dc9de8daefe..23fda873657 100644 --- a/requirements/ci/py37.yml +++ b/requirements/ci/py37.yml @@ -13,7 +13,7 @@ dependencies: # Core dependencies. - cartopy>=0.18 - cf-units>=2 - - cftime + - cftime<1.3.0 - dask>=2 - matplotlib - netcdf4 @@ -35,6 +35,7 @@ dependencies: - asv - black=20.8b1 - filelock + - flake8 - imagehash>=4.0 - nose - pillow<7 @@ -43,12 +44,12 @@ dependencies: # Documentation dependencies. - sphinx + - sphinxcontrib-napoleon - sphinx-copybutton - sphinx-gallery + - sphinx-panels - sphinx_rtd_theme - pip - pip: - - sphinxcontrib-napoleon - - sphinx-panels - sphinxcontrib-spelling - pyenchant diff --git a/requirements/core.txt b/requirements/core.txt index 0b59c573ec1..9e0c4fb1bbb 100644 --- a/requirements/core.txt +++ b/requirements/core.txt @@ -2,7 +2,7 @@ cartopy>=0.18 cf-units>=2 -cftime +cftime<1.3.0 dask[array]>=2 matplotlib netcdf4 diff --git a/requirements/setup.txt b/requirements/setup.txt index 2e14da49055..9232946a6aa 100644 --- a/requirements/setup.txt +++ b/requirements/setup.txt @@ -1,6 +1,4 @@ # Dependencies necessary to run setup.py of iris # ---------------------------------------------- -scitools-pyke setuptools>=40.8.0 -wheel diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index a87902cbfda..00000000000 --- a/setup.cfg +++ /dev/null @@ -1,44 +0,0 @@ -[flake8] -ignore = E402,\ # Due to conditional imports - E226 # Due to whitespace around operators (e.g. 2*x + 3) -exclude = */iris/std_names.py,\ - */iris/fileformats/cf.py,\ - */iris/fileformats/dot.py,\ - */iris/fileformats/pp_load_rules.py,\ - */iris/fileformats/rules.py,\ - */iris/fileformats/um_cf_map.py,\ - */iris/fileformats/_pyke_rules/compiled_krb/*,\ - */iris/io/__init__.py,\ - */iris/io/format_picker.py,\ - */iris/tests/__init__.py,\ - */iris/tests/pp.py,\ - */iris/tests/system_test.py,\ - */iris/tests/test_analysis.py,\ - */iris/tests/test_analysis_calculus.py,\ - */iris/tests/test_basic_maths.py,\ - */iris/tests/test_cartography.py,\ - */iris/tests/test_cdm.py,\ - */iris/tests/test_cell.py,\ - */iris/tests/test_cf.py,\ - */iris/tests/test_constraints.py,\ - */iris/tests/test_coord_api.py,\ - */iris/tests/test_coord_categorisation.py,\ - */iris/tests/test_coordsystem.py,\ - */iris/tests/test_cube_to_pp.py,\ - */iris/tests/test_file_load.py,\ - */iris/tests/test_file_save.py,\ - */iris/tests/test_hybrid.py,\ - */iris/tests/test_intersect.py,\ - */iris/tests/test_io_init.py,\ - */iris/tests/test_iterate.py,\ - */iris/tests/test_load.py,\ - */iris/tests/test_merge.py,\ - */iris/tests/test_pp_cf.py,\ - */iris/tests/test_pp_module.py,\ - */iris/tests/test_pp_stash.py,\ - */iris/tests/test_pp_to_cube.py,\ - */iris/tests/test_quickplot.py,\ - */iris/tests/test_std_names.py,\ - */iris/tests/test_uri_callback.py,\ - */iris/tests/test_util.py -