diff --git a/docs/src/further_topics/controlling_merge.rst b/docs/src/further_topics/controlling_merge.rst
index 3f788a8493..85fe46e745 100644
--- a/docs/src/further_topics/controlling_merge.rst
+++ b/docs/src/further_topics/controlling_merge.rst
@@ -4,6 +4,21 @@
Controlling Merge and Concatenate
=================================
+Preliminaries
+-------------
+
+The following code would have been necessary with loading behaviour prior to version 3.11.0 . For the sake of
+demonstration, we will revert back to this legacy loading behaviour as follows:
+
+ >>> iris.LOAD_POLICY.set("legacy")
+
+.. note::
+ The default settings for :data:`iris.LOAD_POLICY` effectively implements some version of the following demonstration
+ automatically upon loading. It may still be worth being aware of how to handle this manually if an even finer degree
+ of control is required.
+
+How to Merge Cubes When Coordinates Differ
+------------------------------------------
Sometimes it is not possible to appropriately combine a CubeList using merge and concatenate on their own. In such cases
it is possible to achieve much more control over cube combination by using the :func:`~iris.util.new_axis` utility.
@@ -200,5 +215,8 @@ Once merged, we can now concatenate all these cubes into a single result cube, w
source 'Data from Met Office Unified Model'
um_version '12.1'
-.. todo::
- Mention the work done in #6168
+See Also
+--------
+* :data:`iris.LOAD_POLICY` can be controlled to apply similar operations
+ within the load functions, i.e. :func:`~iris.load`, :func:`~iris.load_cube` and
+ :func:`~iris.load_cubes`.
diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst
index 58e3f5f956..cb1ccdf348 100644
--- a/docs/src/whatsnew/latest.rst
+++ b/docs/src/whatsnew/latest.rst
@@ -45,6 +45,10 @@ This document explains the changes made to Iris for this release
Requested in :issue:`5970`, actioned in :pull:`6169`.
+#. `@pp-mo`_ and `@stephenworsley`_ added support for hybrid coordinates whose
+ references are split across multiple input fields, and :meth:`~iris.LOAD_POLICY` to
+ control it, as requested in :issue:`5369`, actioned in :pull:`6168`.
+
🐛 Bugs Fixed
=============
diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py
index a06e36a2e2..d4454efe89 100644
--- a/lib/iris/__init__.py
+++ b/lib/iris/__init__.py
@@ -94,7 +94,7 @@ def callback(cube, field, filename):
import itertools
import os.path
import threading
-from typing import Callable, Literal
+from typing import Callable, Literal, Mapping
import iris._constraints
import iris.config
@@ -121,6 +121,8 @@ def callback(cube, field, filename):
"FUTURE",
"Future",
"IrisDeprecation",
+ "LOAD_POLICY",
+ "LoadPolicy",
"NameConstraint",
"load",
"load_cube",
@@ -292,8 +294,17 @@ def _generate_cubes(uris, callback, constraints):
def _load_collection(uris, constraints=None, callback=None):
from iris.cube import _CubeFilterCollection
+ from iris.fileformats.rules import _MULTIREF_DETECTION
try:
+ # This routine is called once per iris load operation.
+ # Control of the "multiple refs" handling is implicit in this routine
+ # NOTE: detection of multiple reference fields, and it's enabling of post-load
+ # concatenation, is triggered **per-load, not per-cube**
+ # This behaves unexpectefly for "iris.load_cubes" : a post-concatenation is
+ # triggered for all cubes or none, not per-cube (i.e. per constraint).
+ _MULTIREF_DETECTION.found_multiple_refs = False
+
cubes = _generate_cubes(uris, callback, constraints)
result = _CubeFilterCollection.from_cubes(cubes, constraints)
except EOFError as e:
@@ -303,6 +314,326 @@ def _load_collection(uris, constraints=None, callback=None):
return result
+class LoadPolicy(threading.local):
+ """A container for loading strategy options.
+
+ Controls merge/concatenate usage during loading.
+
+ Also controls the detection and handling of cases where a hybrid coordinate
+ uses multiple reference fields : for example, a UM file which contains a series of
+ fields describing time-varying orography.
+
+ Options can be set directly, or via :meth:`~iris.LoadPolicy.set`, or changed for
+ the scope of a code block with :meth:`~iris.LoadPolicy.context`.
+
+ .. note ::
+
+ The default behaviour will "fix" loading for cases like the one just described.
+ However this is not strictly backwards-compatible. If this causes problems,
+ you can force identical loading behaviour to earlier Iris versions with
+ ``LOAD_POLICY.set("legacy")`` or equivalent.
+
+ .. testsetup::
+
+ from iris import LOAD_POLICY
+
+ Notes
+ -----
+ The individual configurable options are :
+
+ * ``support_multiple_references`` = True / False
+ When enabled, the presence of multiple aux-factory reference cubes, which merge
+ to define a extra dimension, will add that dimension to the loaded cubes.
+ This is essential for correct support of time-dependent hybrid coordinates (i.e.
+ aux factories) when loading from fields-based data (e.g. PP or GRIB).
+ For example (notably) time-dependent orography in UM data on hybrid-heights.
+
+ In addition, when such multiple references are detected, an extra concatenate
+ step is added to the 'merge_concat_sequence' (see below), if none is already
+ configured there.
+
+ * ``merge_concat_sequence`` = "m" / "c" / "cm" / "mc"
+ Specifies whether to merge, or concatenate, or both in either order.
+ This is the "combine" operation which is applied to loaded data.
+
+ * ``repeat_until_unchanged`` = True / False
+ When enabled, the configured "combine" operation will be repeated until the
+ result is stable (no more cubes are combined).
+
+ Several common sets of options are provided in :data:`~iris.LOAD_POLICY.SETTINGS` :
+
+ * ``"legacy"``
+ Produces results identical to Iris versions < 3.11, i.e. before the varying
+ hybrid references were supported.
+
+ * ``"default"``
+ As "legacy" except that ``support_multiple_references=True``. This differs
+ from "legacy" only when multiple mergeable reference fields are encountered,
+ in which case incoming cubes are extended into the extra dimension, and a
+ concatenate step is added.
+
+ * ``"recommended"``
+ Enables multiple reference handling, and applies a merge step followed by
+ a concatenate step.
+
+ * ``"comprehensive"``
+ Like "recommended", but will also *repeat* the merge+concatenate steps until no
+ further change is produced.
+
+ .. note ::
+
+ The 'comprehensive' policy makes a maximum effort to reduce the number of
+ cubes to a minimum. However, it still cannot combine cubes with a mixture
+ of matching dimension and scalar coordinates. This may be supported at
+ some later date, but for now is not possible without specific user actions.
+
+ .. Note ::
+
+ See also : :ref:`controlling_merge`.
+
+ Examples
+ --------
+ >>> LOAD_POLICY.set("legacy")
+ >>> print(LOAD_POLICY)
+ LoadPolicy(support_multiple_references=False, merge_concat_sequence='m', repeat_until_unchanged=False)
+ >>> LOAD_POLICY.support_multiple_references = True
+ >>> print(LOAD_POLICY)
+ LoadPolicy(support_multiple_references=True, merge_concat_sequence='m', repeat_until_unchanged=False)
+ >>> LOAD_POLICY.set(merge_concat_sequence="cm")
+ >>> print(LOAD_POLICY)
+ LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False)
+ >>> with LOAD_POLICY.context("comprehensive"):
+ ... print(LOAD_POLICY)
+ LoadPolicy(support_multiple_references=True, merge_concat_sequence='mc', repeat_until_unchanged=True)
+ >>> print(LOAD_POLICY)
+ LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False)
+
+ """
+
+ # Useful constants
+ OPTION_KEYS = (
+ "support_multiple_references",
+ "merge_concat_sequence",
+ "repeat_until_unchanged",
+ )
+ _OPTIONS_ALLOWED_VALUES = {
+ "support_multiple_references": (False, True),
+ "merge_concat_sequence": ("", "m", "c", "mc", "cm"),
+ "repeat_until_unchanged": (False, True),
+ }
+ SETTINGS = {
+ "legacy": dict(
+ support_multiple_references=False,
+ merge_concat_sequence="m",
+ repeat_until_unchanged=False,
+ ),
+ "default": dict(
+ support_multiple_references=True,
+ merge_concat_sequence="m",
+ repeat_until_unchanged=False,
+ ),
+ "recommended": dict(
+ support_multiple_references=True,
+ merge_concat_sequence="mc",
+ repeat_until_unchanged=False,
+ ),
+ "comprehensive": dict(
+ support_multiple_references=True,
+ merge_concat_sequence="mc",
+ repeat_until_unchanged=True,
+ ),
+ }
+
+ def __init__(self, options: str | dict | None = None, **kwargs):
+ """Create loading strategy control object."""
+ self.set("default")
+ self.set(options, **kwargs)
+
+ def __setattr__(self, key, value):
+ if key not in self.OPTION_KEYS:
+ raise KeyError(f"LoadPolicy object has no property '{key}'.")
+
+ allowed_values = self._OPTIONS_ALLOWED_VALUES[key]
+ if value not in allowed_values:
+ msg = (
+ f"{value!r} is not a valid setting for LoadPolicy.{key} : "
+ f"must be one of '{allowed_values}'."
+ )
+ raise ValueError(msg)
+
+ self.__dict__[key] = value
+
+ def set(self, options: str | dict | None = None, **kwargs):
+ """Set new options.
+
+ Parameters
+ ----------
+ * options : str or dict, optional
+ A dictionary of options values, or the name of one of the
+ :data:`~iris.LoadPolicy.SETTINGS` standard option sets,
+ e.g. "legacy" or "comprehensive".
+ * kwargs : dict
+ Individual option settings, from :data:`~iris.LoadPolicy.OPTION_KEYS`.
+
+ Note
+ ----
+ Keyword arguments are applied after the 'options' arg, and
+ so will take precedence.
+
+ """
+ if options is None:
+ options = {}
+ elif isinstance(options, str) and options in self.SETTINGS:
+ options = self.SETTINGS[options]
+ elif not isinstance(options, Mapping):
+ msg = (
+ f"Invalid arg options={options!r} : "
+ f"must be a dict, or one of {tuple(self.SETTINGS.keys())}"
+ )
+ raise TypeError(msg)
+
+ # Override any options with keywords
+ options.update(**kwargs)
+ bad_keys = [key for key in options if key not in self.OPTION_KEYS]
+ if bad_keys:
+ msg = f"Unknown options {bad_keys} : valid options are {self.OPTION_KEYS}."
+ raise ValueError(msg)
+
+ # Implement all options by changing own content.
+ for key, value in options.items():
+ setattr(self, key, value)
+
+ def settings(self):
+ """Return an options dict containing the current settings."""
+ return {key: getattr(self, key) for key in self.OPTION_KEYS}
+
+ def __repr__(self):
+ msg = f"{self.__class__.__name__}("
+ msg += ", ".join(f"{key}={getattr(self, key)!r}" for key in self.OPTION_KEYS)
+ msg += ")"
+ return msg
+
+ @contextlib.contextmanager
+ def context(self, settings=None, **kwargs):
+ """Return a context manager applying given options.
+
+ Parameters
+ ----------
+ settings : str or dict
+ Options dictionary or name, as for :meth:`~LoadPolicy.set`.
+ kwargs : dict
+ Option values, as for :meth:`~LoadPolicy.set`.
+
+ Examples
+ --------
+ .. testsetup::
+
+ import iris
+ from iris import LOAD_POLICY, sample_data_path
+
+ >>> path = sample_data_path("time_varying_hybrid_height", "*.pp")
+ >>> with LOAD_POLICY.context("legacy"):
+ ... cubes = iris.load(path, "x_wind")
+ >>> print(cubes)
+ 0: x_wind / (m s-1) (time: 2; model_level_number: 5; latitude: 144; longitude: 192)
+ 1: x_wind / (m s-1) (time: 12; model_level_number: 5; latitude: 144; longitude: 192)
+ 2: x_wind / (m s-1) (model_level_number: 5; latitude: 144; longitude: 192)
+ >>>
+ >>> with LOAD_POLICY.context("recommended"):
+ ... cubes = iris.load(path, "x_wind")
+ >>> print(cubes)
+ 0: x_wind / (m s-1) (model_level_number: 5; time: 15; latitude: 144; longitude: 192)
+ """
+ # Save the current state
+ saved_settings = self.settings()
+
+ # Apply the new options and execute the context
+ try:
+ self.set(settings, **kwargs)
+ yield
+ finally:
+ # Re-establish the former state
+ self.set(saved_settings)
+
+
+#: A control object containing the current file loading options.
+LOAD_POLICY = LoadPolicy()
+
+
+def _combine_cubes(cubes, options, merge_require_unique):
+ """Combine cubes as for load, according to "loading policy" options.
+
+ Applies :meth:`~iris.cube.CubeList.merge`/:meth:`~iris.cube.CubeList.concatenate`
+ steps to the given cubes, as determined by the 'settings'.
+
+ Parameters
+ ----------
+ cubes : list of :class:`~iris.cube.Cube`
+ A list of cubes to combine.
+ options : dict
+ Settings, as described for :meth:`iris.LOAD_POLICY.set`.
+ Defaults to current :meth:`iris.LOAD_POLICY.settings`.
+ merge_require_unique : bool
+ Value for the 'unique' keyword in any merge operations.
+
+ Returns
+ -------
+ :class:`~iris.cube.CubeList`
+
+ .. Note::
+ The ``support_multiple_references`` keyword/property has no effect on the
+ :func:`_combine_cubes` operation : it only takes effect during a load operation.
+
+ Notes
+ -----
+ TODO: make this public API in future.
+ At that point, change the API to support (options=None, **kwargs) + add testing of
+ those modes (notably arg type = None / str / dict).
+
+ """
+ from iris.cube import CubeList
+
+ if not isinstance(cubes, CubeList):
+ cubes = CubeList(cubes)
+
+ while True:
+ n_original_cubes = len(cubes)
+ sequence = options["merge_concat_sequence"]
+
+ if sequence[0] == "c":
+ # concat if it comes first
+ cubes = cubes.concatenate()
+ if "m" in sequence:
+ # merge if requested
+ cubes = cubes.merge(unique=merge_require_unique)
+ if sequence[-1] == "c":
+ # concat if it comes last
+ cubes = cubes.concatenate()
+
+ # Repeat if requested, *and* this step reduced the number of cubes
+ if not options["repeat_until_unchanged"] or len(cubes) >= n_original_cubes:
+ break
+
+ return cubes
+
+
+def _combine_load_cubes(cubes, merge_require_unique=False):
+ # A special version to call _combine_cubes while also implementing the
+ # _MULTIREF_DETECTION behaviour
+ options = LOAD_POLICY.settings()
+ if (
+ options["support_multiple_references"]
+ and "c" not in options["merge_concat_sequence"]
+ ):
+ # Add a concatenate to implement the "multiref triggers concatenate" mechanism
+ from iris.fileformats.rules import _MULTIREF_DETECTION
+
+ if _MULTIREF_DETECTION.found_multiple_refs:
+ options["merge_concat_sequence"] += "c"
+
+ return _combine_cubes(cubes, options, merge_require_unique=merge_require_unique)
+
+
def load(uris, constraints=None, callback=None):
"""Load any number of Cubes for each constraint.
@@ -327,7 +658,8 @@ def load(uris, constraints=None, callback=None):
were random.
"""
- return _load_collection(uris, constraints, callback).merged().cubes()
+ cubes = _load_collection(uris, constraints, callback).combined().cubes()
+ return cubes
def load_cube(uris, constraint=None, callback=None):
@@ -355,9 +687,11 @@ def load_cube(uris, constraint=None, callback=None):
if len(constraints) != 1:
raise ValueError("only a single constraint is allowed")
- cubes = _load_collection(uris, constraints, callback).cubes()
+ cubes = _load_collection(uris, constraints, callback).combined(unique=False).cubes()
try:
+ # NOTE: this call currently retained to preserve the legacy exceptions
+ # TODO: replace with simple testing to duplicate the relevant error cases
cube = cubes.merge_cube()
except iris.exceptions.MergeError as e:
raise iris.exceptions.ConstraintMismatchError(str(e))
@@ -392,7 +726,7 @@ def load_cubes(uris, constraints=None, callback=None):
"""
# Merge the incoming cubes
- collection = _load_collection(uris, constraints, callback).merged()
+ collection = _load_collection(uris, constraints, callback).combined()
# Make sure we have exactly one merged cube per constraint
bad_pairs = [pair for pair in collection.pairs if len(pair) != 1]
diff --git a/lib/iris/cube.py b/lib/iris/cube.py
index 40e50da4ff..30ac3432b7 100644
--- a/lib/iris/cube.py
+++ b/lib/iris/cube.py
@@ -78,8 +78,10 @@ def add(self, cube):
if sub_cube is not None:
self.cubes.append(sub_cube)
- def merged(self, unique=False):
- """Return a new :class:`_CubeFilter` by merging the list of cubes.
+ def combined(self, unique=False):
+ """Return a new :class:`_CubeFilter` by combining the list of cubes.
+
+ Combines the list of cubes with :func:`~iris._combine_load_cubes`.
Parameters
----------
@@ -88,7 +90,12 @@ def merged(self, unique=False):
duplicate cubes are detected.
"""
- return _CubeFilter(self.constraint, self.cubes.merge(unique))
+ from iris import _combine_load_cubes
+
+ return _CubeFilter(
+ self.constraint,
+ _combine_load_cubes(self.cubes, merge_require_unique=unique),
+ )
class _CubeFilterCollection:
@@ -113,14 +120,18 @@ def add_cube(self, cube):
pair.add(cube)
def cubes(self):
- """Return all the cubes in this collection concatenated into a single :class:`CubeList`."""
+ """Return all the cubes in this collection in a single :class:`CubeList`."""
+ from iris.cube import CubeList
+
result = CubeList()
for pair in self.pairs:
result.extend(pair.cubes)
return result
- def merged(self, unique=False):
- """Return a new :class:`_CubeFilterCollection` by merging all the cube lists of this collection.
+ def combined(self, unique=False):
+ """Return a new :class:`_CubeFilterCollection` by combining all the cube lists of this collection.
+
+ Combines each list of cubes using :func:`~iris._combine_load_cubes`.
Parameters
----------
@@ -129,7 +140,7 @@ def merged(self, unique=False):
duplicate cubes are detected.
"""
- return _CubeFilterCollection([pair.merged(unique) for pair in self.pairs])
+ return _CubeFilterCollection([pair.combined(unique) for pair in self.pairs])
class CubeList(list):
diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py
index 8299021fb5..2a1a74f374 100644
--- a/lib/iris/fileformats/rules.py
+++ b/lib/iris/fileformats/rules.py
@@ -5,6 +5,7 @@
"""Generalised mechanisms for metadata translation and cube construction."""
import collections
+import threading
import warnings
import cf_units
@@ -143,7 +144,11 @@ class _ReferenceError(Exception):
def _dereference_args(factory, reference_targets, regrid_cache, cube):
- """Convert all the arguments for a factory into concrete coordinates."""
+ """Convert all the arguments for a factory into concrete coordinates.
+
+ Note: where multiple reference fields define an additional dimension, this routine
+ returns a modified 'cube', with the necessary additional dimensions.
+ """
args = []
for arg in factory.args:
if isinstance(arg, Reference):
@@ -151,7 +156,7 @@ def _dereference_args(factory, reference_targets, regrid_cache, cube):
src = reference_targets[arg.name].as_cube()
# If necessary, regrid the reference cube to
# match the grid of this cube.
- src = _ensure_aligned(regrid_cache, src, cube)
+ src, cube = _ensure_aligned(regrid_cache, src, cube)
if src is not None:
new_coord = iris.coords.AuxCoord(
src.data,
@@ -178,7 +183,8 @@ def _dereference_args(factory, reference_targets, regrid_cache, cube):
# If it wasn't a Reference, then arg is a dictionary
# of keyword arguments for cube.coord(...).
args.append(cube.coord(**arg))
- return args
+
+ return args, cube
def _regrid_to_target(src_cube, target_coords, target_cube):
@@ -211,9 +217,9 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube):
# Check that each of src_cube's dim_coords matches up with a single
# coord on target_cube.
try:
- target_coords = []
+ target_dimcoords = []
for dim_coord in src_cube.dim_coords:
- target_coords.append(target_cube.coord(dim_coord))
+ target_dimcoords.append(target_cube.coord(dim_coord))
except iris.exceptions.CoordinateNotFoundError:
# One of the src_cube's dim_coords didn't exist on the
# target_cube... so we can't regrid (i.e. just return None).
@@ -222,7 +228,32 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube):
# So we can use `iris.analysis.interpolate.linear()` later,
# ensure each target coord is either a scalar or maps to a
# single, distinct dimension.
- target_dims = [target_cube.coord_dims(coord) for coord in target_coords]
+ # PP-MOD: first promote any scalar coords when needed as dims
+ for target_coord in target_dimcoords:
+ from iris import LOAD_POLICY
+
+ if (
+ not target_cube.coord_dims(target_coord)
+ and LOAD_POLICY.support_multiple_references
+ ):
+ # The chosen coord is not a dimcoord in the target (yet)
+ # Make it one with 'new_axis'
+ from iris.util import new_axis
+
+ _MULTIREF_DETECTION.found_multiple_refs = True
+ # Include the other coords on that dim in the src : this means the
+ # src merge identifies which belong on that dim
+ # (e.g. 'forecast_period' along with 'time')
+ (src_dim,) = src_cube.coord_dims(target_coord) # should have 1 dim
+ promote_other_coords = [
+ target_cube.coord(src_coord)
+ for src_coord in src_cube.coords(contains_dimension=src_dim)
+ if src_coord.name() != target_coord.name()
+ ]
+ target_cube = new_axis(
+ target_cube, target_coord, expand_extras=promote_other_coords
+ )
+ target_dims = [target_cube.coord_dims(coord) for coord in target_dimcoords]
target_dims = list(filter(None, target_dims))
unique_dims = set()
for dims in target_dims:
@@ -236,19 +267,19 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube):
grids, cubes = regrid_cache[cache_key]
# 'grids' is a list of tuples of coordinates, so convert
# the 'target_coords' list into a tuple to be consistent.
- target_coords = tuple(target_coords)
+ target_dimcoords = tuple(target_dimcoords)
try:
# Look for this set of target coordinates in the cache.
- i = grids.index(target_coords)
+ i = grids.index(target_dimcoords)
result_cube = cubes[i]
except ValueError:
# Not already cached, so do the hard work of interpolating.
- result_cube = _regrid_to_target(src_cube, target_coords, target_cube)
+ result_cube = _regrid_to_target(src_cube, target_dimcoords, target_cube)
# Add it to the cache.
- grids.append(target_coords)
+ grids.append(target_dimcoords)
cubes.append(result_cube)
- return result_cube
+ return result_cube, target_cube
class Loader(
@@ -331,7 +362,7 @@ def _resolve_factory_references(
# across multiple result cubes.
for factory in factories:
try:
- args = _dereference_args(
+ args, cube = _dereference_args(
factory, concrete_reference_targets, regrid_cache, cube
)
except _ReferenceError as e:
@@ -345,6 +376,34 @@ def _resolve_factory_references(
aux_factory = factory.factory_class(*args)
cube.add_aux_factory(aux_factory)
+ # In the case of multiple references which vary on a new dimension
+ # (such as time-dependent orography or surface-pressure), the cube may get replaced
+ # by one with a new dimension.
+ # In that case we must update the factory so its dependencies are coords of the
+ # new cube.
+ cube_coord_ids = [
+ id(coord) for coord, _ in cube._dim_coords_and_dims + cube._aux_coords_and_dims
+ ]
+ for factory in cube.aux_factories:
+ for name, dep in list(factory.dependencies.items()):
+ if dep and id(dep) not in cube_coord_ids:
+ factory.update(dep, cube.coord(dep))
+
+ return cube
+
+
+class MultipleReferenceFieldDetector(threading.local):
+ def __init__(self):
+ self.found_multiple_refs = False
+
+
+# A single global object (per thread) to record whether multiple reference fields
+# (e.g. time-dependent orography, or surface pressure fields) have been detected during
+# the latest load operation.
+# This is used purely to implement the iris.LOAD_POLICY.multiref_triggers_concatenate
+# functionality.
+_MULTIREF_DETECTION = MultipleReferenceFieldDetector()
+
def _load_pairs_from_fields_and_filenames(
fields_and_filenames, converter, user_callback_wrapper=None
@@ -355,6 +414,7 @@ def _load_pairs_from_fields_and_filenames(
# needs a filename associated with each field to support the load callback.
concrete_reference_targets = {}
results_needing_reference = []
+
for field, filename in fields_and_filenames:
# Convert the field to a Cube, passing down the 'converter' function.
cube, factories, references = _make_cube(field, converter)
@@ -383,7 +443,7 @@ def _load_pairs_from_fields_and_filenames(
regrid_cache = {}
for cube, factories, field in results_needing_reference:
- _resolve_factory_references(
+ cube = _resolve_factory_references(
cube, factories, concrete_reference_targets, regrid_cache
)
yield (cube, field)
diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py
index aa4ce67a3b..f56970f9fa 100644
--- a/lib/iris/tests/integration/test_trajectory.py
+++ b/lib/iris/tests/integration/test_trajectory.py
@@ -11,6 +11,7 @@
import numpy as np
import iris
+from iris import LOAD_POLICY
from iris._lazy_data import as_lazy_data
from iris.analysis.trajectory import Trajectory
from iris.analysis.trajectory import interpolate as traj_interpolate
@@ -22,7 +23,9 @@ class TestColpex(tests.IrisTest):
def setUp(self):
# Load the COLPEX data => TZYX
path = tests.get_data_path(["PP", "COLPEX", "theta_and_orog_subset.pp"])
- cube = iris.load_cube(path, "air_potential_temperature")
+ # Fix to ignore time-varying orography, for the purposes of these tests
+ with LOAD_POLICY.context(support_multiple_references=False):
+ cube = iris.load_cube(path, "air_potential_temperature")
cube.coord("grid_latitude").bounds = None
cube.coord("grid_longitude").bounds = None
# TODO: Workaround until regrid can handle factories
diff --git a/lib/iris/tests/integration/varying_references/__init__.py b/lib/iris/tests/integration/varying_references/__init__.py
new file mode 100644
index 0000000000..3c37f02242
--- /dev/null
+++ b/lib/iris/tests/integration/varying_references/__init__.py
@@ -0,0 +1,12 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the BSD license.
+# See LICENSE in the root of the repository for full licensing details.
+"""Integration tests for loading with varying references.
+
+Practically, this mostly means loading from fields-based file formats such as PP and
+GRIB, and when hybrid vertical coordinates which have time-varying reference fields.
+E.G. hybrid height with time-varying orography, or hybrid-pressure with time-varying
+surface pressure.
+
+"""
diff --git a/lib/iris/tests/integration/varying_references/test_realdata_load.py b/lib/iris/tests/integration/varying_references/test_realdata_load.py
new file mode 100644
index 0000000000..edf2b00824
--- /dev/null
+++ b/lib/iris/tests/integration/varying_references/test_realdata_load.py
@@ -0,0 +1,58 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the BSD license.
+# See LICENSE in the root of the repository for full licensing details.
+"""Test loading PP data with time-varying orography."""
+
+import pytest
+
+import iris
+from iris import LOAD_POLICY, sample_data_path
+
+
+@pytest.fixture(params=["default", "recommended", "legacy"])
+def load_policy(request):
+ return request.param
+
+
+def test_load_pp_timevarying_orography(load_policy):
+ testdata_dirpath = sample_data_path("time_varying_hybrid_height", "*.pp")
+
+ with LOAD_POLICY.context(load_policy):
+ cubes = iris.load(testdata_dirpath)
+
+ n_cubes = len(cubes)
+ if load_policy == "legacy":
+ # This doesn't merge fully: get a phenomenon cube for each reference field
+ assert n_cubes == 4
+ else:
+ # Other load policies load with full merge, producing a 4D result.
+ assert n_cubes == 2
+ phenom_cube = cubes.extract_cube("x_wind")
+ ref_cube = cubes.extract_cube("surface_altitude")
+
+ cube_dims = [
+ phenom_cube.coord(dim_coords=True, dimensions=i_dim).name()
+ for i_dim in range(phenom_cube.ndim)
+ ]
+ assert cube_dims == ["model_level_number", "time", "latitude", "longitude"]
+
+ ref_coord = phenom_cube.coord("surface_altitude")
+ ref_coord_dims = [
+ phenom_cube.coord(dim_coords=True, dimensions=i_dim).name()
+ for i_dim in phenom_cube.coord_dims(ref_coord)
+ ]
+ assert ref_coord_dims == ["time", "latitude", "longitude"]
+
+ ref_cube_dims = [
+ ref_cube.coord(dim_coords=True, dimensions=i_dim).name()
+ for i_dim in range(ref_cube.ndim)
+ ]
+ assert ref_cube_dims == ref_cube_dims
+
+ derived_coord = phenom_cube.coord("altitude")
+ derived_dims = [
+ phenom_cube.coord(dim_coords=True, dimensions=i_dim).name()
+ for i_dim in phenom_cube.coord_dims(derived_coord)
+ ]
+ assert derived_dims == ["model_level_number", "time", "latitude", "longitude"]
diff --git a/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py
new file mode 100644
index 0000000000..0ad4b5a941
--- /dev/null
+++ b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py
@@ -0,0 +1,341 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the BSD license.
+# See LICENSE in the root of the repository for full licensing details.
+"""Code to save and re-load hybrid vertical coordinates with variable reference fields.
+
+Tests all combinations of:
+ * file format: PP, GRIB and NetCDF
+ * reference fields: static (for legacy reference) and time-dependent
+ * hybrid coordinate fields:
+ * hybrid-height levels with orography, and
+ * hybrid-pressure levels with surface-pressure
+"""
+
+import numpy as np
+import pytest
+
+import iris
+from iris import LOAD_POLICY
+from iris.aux_factory import HybridHeightFactory, HybridPressureFactory
+from iris.coord_systems import GeogCS
+from iris.coords import AuxCoord, DimCoord
+from iris.cube import Cube, CubeList
+from iris.fileformats.pp import EARTH_RADIUS, STASH
+from iris.util import new_axis
+
+try:
+ import iris_grib
+except ImportError:
+ iris_grib = None
+
+# General test dimensions = (timepoints, levels, lats, lons)
+NT, NZ, NY, NX = (3, 4, 5, 6)
+
+
+def make_hybrid_z_testdata(
+ hybrid_zcoord_type="height",
+ make_reference_time_dependent=True,
+ include_reference_as_cube=False,
+):
+ """Construct a realistic synthetic data cube with a hybrid vertical coordinate.
+
+ Parameters
+ ----------
+ hybrid_zcoord_type : string, default "height"
+ either "height" or "pressure"
+ make_reference_time_dependent : bool, default True
+ if True, the reference coord has dims (t, y, x), otherwise just (y, x)
+ include_reference_as_cube : bool, default False
+ if True, the result includes a separate cube of the reference values.
+ (Because this must be separately passed to save for the fields-based formats.)
+
+ Returns
+ -------
+ cubes
+ A list containing a cube with (t, z, y, x) dims and the appropriate
+ aux-factory.
+ Optionally, if "include_reference_as_cube" is True, an extra cube
+ containing the reference data is aldo returned.
+
+ """
+ crs = GeogCS(EARTH_RADIUS)
+ z_dim, t_dim, y_dim, x_dim = 0, 1, 2, 3
+ co_t = DimCoord(
+ np.arange(NT, dtype=np.float32),
+ standard_name="time",
+ units="days since 2000-01-01",
+ )
+ co_z = DimCoord(
+ np.arange(1, NZ + 1, dtype=np.int32),
+ standard_name="model_level_number",
+ units=1,
+ )
+ co_y = DimCoord(
+ np.linspace(0, 120.0, NY, dtype=np.float32),
+ standard_name="latitude",
+ units="degrees",
+ coord_system=crs,
+ )
+ co_x = DimCoord(
+ np.linspace(-30.0, 50.0, NX, dtype=np.float32),
+ standard_name="longitude",
+ units="degrees",
+ coord_system=crs,
+ )
+ cube = Cube(
+ np.zeros((NZ, NT, NY, NX), dtype=np.float32),
+ standard_name="air_temperature",
+ units="K",
+ dim_coords_and_dims=zip((co_t, co_z, co_y, co_x), (t_dim, z_dim, y_dim, x_dim)),
+ )
+
+ delta_vals = np.linspace(200.0, 600, NZ, dtype=np.float32)
+ if hybrid_zcoord_type == "pressure":
+ co_delta = DimCoord(delta_vals, long_name="delta", units="hPa")
+ elif hybrid_zcoord_type == "height":
+ co_delta = DimCoord(delta_vals, long_name="level_height", units="m")
+ else:
+ raise ValueError(f"Unknown hybrid coordinate type: {hybrid_zcoord_type}")
+
+ sigma_vals = np.linspace(0.2, 0.8, NZ, dtype=np.float32)
+ co_sigma = DimCoord(sigma_vals, long_name="sigma", units=1)
+
+ # Note: will not save as HH to PP without bounds on delta+sigma
+ for coord in (co_delta, co_sigma):
+ coord.guess_bounds()
+ cube.add_aux_coord(co_delta, z_dim)
+ cube.add_aux_coord(co_sigma, z_dim)
+
+ refdata = np.arange(NT * NY * NX, dtype=np.float32)
+ refdata = 1000.0 + refdata.reshape(NT, NY, NX)
+ if hybrid_zcoord_type == "pressure":
+ co_ref = AuxCoord(
+ refdata,
+ standard_name="surface_air_pressure",
+ units="hPa",
+ attributes={"STASH": STASH(model=1, section=0, item=409)},
+ )
+ elif hybrid_zcoord_type == "height":
+ co_ref = AuxCoord(
+ refdata,
+ standard_name="surface_altitude",
+ units="m",
+ attributes={"STASH": STASH(model=1, section=0, item=33)},
+ )
+ else:
+ raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}")
+
+ ref_dims = (t_dim, y_dim, x_dim)
+ if not make_reference_time_dependent:
+ co_ref = co_ref[0]
+ ref_dims = ref_dims[1:]
+
+ cube.add_aux_coord(co_ref, ref_dims)
+ if hybrid_zcoord_type == "pressure":
+ factory = HybridPressureFactory(
+ sigma=co_sigma, delta=co_delta, surface_air_pressure=co_ref
+ )
+ elif hybrid_zcoord_type == "height":
+ factory = HybridHeightFactory(sigma=co_sigma, delta=co_delta, orography=co_ref)
+ else:
+ raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}")
+
+ cube.add_aux_factory(factory)
+
+ cubes = CubeList([cube])
+
+ if include_reference_as_cube:
+ ref_dimcoords = [
+ cube.coord(dim_coords=True, dimensions=cube_refdim)
+ for cube_refdim in cube.coord_dims(co_ref)
+ ]
+ reference_cube = Cube(
+ co_ref.points,
+ standard_name=co_ref.standard_name,
+ units=co_ref.units,
+ dim_coords_and_dims=[
+ (ref_dimcoord, i_refdim)
+ for i_refdim, ref_dimcoord in enumerate(ref_dimcoords)
+ ],
+ attributes=co_ref.attributes,
+ )
+ if not reference_cube.coords("time"):
+ # Add a dummy time coordinate to non-time-dependent reference cube
+ # - mostly because otherwise it cannot be saved to GRIB format
+ # NOTE: we give this a different nominal time to any of the data : when
+ # there is only one reference field, it's recorded time value should be
+ # **ignored** by the loader
+ reference_cube.add_aux_coord(
+ DimCoord(
+ np.array(0, dtype=np.float32),
+ standard_name="time",
+ units="days since 1900-01-01",
+ )
+ )
+ cubes.append(reference_cube)
+
+ return cubes
+
+
+def check_expected(result_cubes, file_extension, time_dependence, zcoord_type):
+ assert len(result_cubes) == 2
+ result_phenom = result_cubes.extract_cube("air_temperature")
+
+ if zcoord_type == "pressure":
+ ref_coord_name = ref_cube_name = "surface_air_pressure"
+ if file_extension == "grib2":
+ ref_cube_name = "air_pressure"
+ elif zcoord_type == "height":
+ ref_coord_name = ref_cube_name = "surface_altitude"
+ else:
+ raise ValueError(f"Unknown hybrid coordinate type: {zcoord_type}")
+
+ result_ref_cube = result_cubes.extract_cube(ref_cube_name)
+ result_ref_coord = result_phenom.coord(ref_coord_name)
+
+ # Check that the reference cube and the coord are equivalent
+ assert result_ref_coord.shape == result_ref_cube.shape
+ assert np.array_equal(result_ref_cube.data, result_ref_coord.points)
+ assert not result_ref_coord.bounds # bounds are unused in our testcases
+
+ # Check the expected phenomenon shape
+ if time_dependence == "static" and file_extension in ("pp", "grib2"):
+ phenom_shape = (NT, NZ, NY, NX)
+ else:
+ phenom_shape = (NZ, NT, NY, NX)
+ assert result_phenom.shape == phenom_shape
+
+ # Check expected reference values against calculated values.
+ # This shows that the reference was correctly divided into 2d fields and
+ # reconstructed on load to match the original (for fields-based formats).
+ if time_dependence == "static":
+ ref_shape = (NY, NX)
+ else:
+ ref_shape = (NT, NY, NX)
+ ref_data = 1000.0 + np.arange(np.prod(ref_shape)).reshape(ref_shape)
+ if zcoord_type == "pressure" and file_extension == "grib2":
+ # values come back in Pa not hPa
+ ref_data *= 100.0
+ assert np.array_equal(ref_data, result_ref_cube.data)
+
+
+_file_formats = ["pp", "nc"]
+if iris_grib:
+ _file_formats += ["grib2"]
+
+
+@pytest.fixture(params=_file_formats)
+def file_extension(request):
+ return request.param
+
+
+@pytest.fixture(params=["static", "time_varying"])
+def time_dependence(request):
+ return request.param
+
+
+@pytest.fixture(params=["height", "pressure"])
+def zcoord_type(request):
+ return request.param
+
+
+@pytest.fixture(params=[f"{name}_policy" for name in LOAD_POLICY.SETTINGS])
+def load_policy(request):
+ return request.param
+
+
+def test_roundtrip(file_extension, time_dependence, zcoord_type, load_policy, tmp_path):
+ if (
+ load_policy == "legacy_policy"
+ and time_dependence == "time_varying"
+ and file_extension in ("pp", "grib2")
+ ):
+ pytest.skip("Testcase not supported in 'legacy' mode.")
+
+ filepath = tmp_path / f"tmp.{file_extension}"
+ include_ref = file_extension in ("grib2", "pp")
+ is_time_dependent = time_dependence == "time_varying"
+ data = make_hybrid_z_testdata(
+ hybrid_zcoord_type=zcoord_type,
+ include_reference_as_cube=include_ref,
+ make_reference_time_dependent=is_time_dependent,
+ )
+
+ iris.save(data, filepath)
+
+ policy_name = load_policy.split("_")[0]
+ with LOAD_POLICY.context(policy_name):
+ # NOTE: this is default, but "legacy" mode would fail
+ readback = iris.load(filepath)
+
+ check_expected(
+ readback,
+ file_extension=file_extension,
+ time_dependence=time_dependence,
+ zcoord_type=zcoord_type,
+ )
+
+
+def test_split_netcdf_roundtrip(zcoord_type, load_policy, tmp_path):
+ # NetCDF special test : split the data into 2D slices (like "fields"),
+ # and save each to a different file.
+ policy_name = load_policy.split("_")[0]
+ reference_surface_name = {
+ "pressure": "surface_air_pressure",
+ "height": "surface_altitude",
+ }[zcoord_type]
+
+ data = make_hybrid_z_testdata(
+ hybrid_zcoord_type=zcoord_type,
+ include_reference_as_cube=False,
+ make_reference_time_dependent=True,
+ )
+
+ # There is just 1 cube
+ (data,) = data # just 1 cube for netcdf, no separate reference cube
+ # split it into 2D YX "field" cubes
+ field_cubes = list(data.slices(("latitude", "longitude")))
+ # Reinstate a length-1 "time" dimension in each cube.
+ field_cubes = [
+ new_axis(field_cube, "time", expand_extras=[reference_surface_name])
+ for field_cube in field_cubes
+ ]
+ # Save to 1 file per 'field_cube'
+ result_paths = [
+ tmp_path / f"field_{i_field:02d}.nc" for i_field in range(len(field_cubes))
+ ]
+ for field_cube, path in zip(field_cubes, result_paths):
+ iris.save(field_cube, path)
+
+ # load back with the chosen policy.
+ with LOAD_POLICY.context(policy_name):
+ readback = iris.load(result_paths)
+
+ n_cubes = len(readback)
+ n_datacubes = len(readback.extract("air_temperature"))
+ if policy_name == "legacy":
+ assert (n_cubes, n_datacubes) == (15, 3)
+ elif policy_name == "default":
+ assert (n_cubes, n_datacubes) == (15, 3)
+ elif policy_name == "recommended":
+ assert (n_cubes, n_datacubes) == (5, 1)
+ elif policy_name == "comprehensive":
+ assert (n_cubes, n_datacubes) == (5, 1)
+ else:
+ raise ValueError(f"unknown policy {policy_name!r}")
+
+ if n_datacubes == 1:
+ check_expected(
+ CubeList(
+ [
+ readback.extract_cube("air_temperature"),
+ # include only 1 of N (identical) reference cubes
+ # (all this would be easier if we could rely on load-cube ordering!)
+ readback.extract(reference_surface_name)[0],
+ ]
+ ),
+ file_extension=file_extension,
+ time_dependence=time_dependence,
+ zcoord_type=zcoord_type,
+ )
diff --git a/lib/iris/tests/results/cube_io/pickling/cubelist.cml b/lib/iris/tests/results/cube_io/pickling/cubelist.cml
index c52486b1d0..6510b200e6 100644
--- a/lib/iris/tests/results/cube_io/pickling/cubelist.cml
+++ b/lib/iris/tests/results/cube_io/pickling/cubelist.cml
@@ -7,398 +7,2360 @@
-
-
+
+
+ [[[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]]],
+
+
+ [[[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]]],
+
+
+ [[[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]]]]" shape="(70, 6, 100, 100)" standard_name="altitude" units="Unit('m')" value_type="float32">
@@ -407,7 +2369,7 @@
-
+
@@ -436,7 +2398,7 @@
-
+
+
+
-
-
+
+
@@ -531,14 +2563,14 @@
-
+
-
+
diff --git a/lib/iris/tests/results/cube_io/pickling/single_cube.cml b/lib/iris/tests/results/cube_io/pickling/single_cube.cml
index eb3e9d0112..aead0825ea 100644
--- a/lib/iris/tests/results/cube_io/pickling/single_cube.cml
+++ b/lib/iris/tests/results/cube_io/pickling/single_cube.cml
@@ -7,398 +7,2360 @@
-
-
+
+
+ [[[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]],
+
+ [[32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ ...,
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ],
+ [32192.732 , 32192.732 , 32192.732 , ...,
+ 32192.732 , 32192.732 , 32192.732 ]]],
+
+
+ [[[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]],
+
+ [[35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ ...,
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ],
+ [35845.004 , 35845.004 , 35845.004 , ...,
+ 35845.004 , 35845.004 , 35845.004 ]]],
+
+
+ [[[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]],
+
+ [[40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ ...,
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ],
+ [40000. , 40000. , 40000. , ...,
+ 40000. , 40000. , 40000. ]]]]" shape="(70, 6, 100, 100)" standard_name="altitude" units="Unit('m')" value_type="float32">
@@ -407,7 +2369,7 @@
-
+
@@ -436,7 +2398,7 @@
-
+
+
+
-
-
+
+
@@ -531,13 +2563,13 @@
-
+
-
+
diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py
index 50773f0d24..b263313b90 100644
--- a/lib/iris/tests/test_plot.py
+++ b/lib/iris/tests/test_plot.py
@@ -821,6 +821,9 @@ def setUp(self):
super().setUp()
filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp"))
self.cube = load_cube_once(filename, "air_potential_temperature")
+ if self.cube.coord_dims("time") != (0,):
+ # A quick fix for data which has changed since we support time-varying orography
+ self.cube.transpose((1, 0, 2, 3))
self.draw_module = iris.plot
self.contourf = LambdaStr(
diff --git a/lib/iris/tests/test_pp_to_cube.py b/lib/iris/tests/test_pp_to_cube.py
index da49ff8188..a61703761f 100644
--- a/lib/iris/tests/test_pp_to_cube.py
+++ b/lib/iris/tests/test_pp_to_cube.py
@@ -81,7 +81,7 @@ def test_regrid_missing_coord(self):
# If the target cube is missing one of the source dimension
# coords, ensure the re-grid fails nicely - i.e. returns None.
self.target.remove_coord("bar")
- new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target)
+ new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target)
self.assertIsNone(new_ref)
def test_regrid_codimension(self):
@@ -92,11 +92,11 @@ def test_regrid_codimension(self):
new_foo = self.target.coord("bar").copy()
new_foo.rename("foo")
self.target.add_aux_coord(new_foo, 0)
- new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target)
+ new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target)
self.assertIsNone(new_ref)
def test_regrid_identity(self):
- new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target)
+ new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target)
# Bounds don't make it through the re-grid process
self.ref.coord("bar").bounds = None
self.ref.coord("foo").bounds = None
diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py
index 8469aa0776..25bd8904a7 100644
--- a/lib/iris/tests/test_quickplot.py
+++ b/lib/iris/tests/test_quickplot.py
@@ -49,6 +49,9 @@ def setUp(self):
tests.GraphicsTest.setUp(self)
filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp"))
self.cube = test_plot.load_cube_once(filename, "air_potential_temperature")
+ if self.cube.coord_dims("time") != (0,):
+ # A quick fix for data which has changed since we support time-varying orography
+ self.cube.transpose((1, 0, 2, 3))
self.draw_module = iris.quickplot
self.contourf = test_plot.LambdaStr(
diff --git a/lib/iris/tests/unit/fileformats/test_load_functions.py b/lib/iris/tests/unit/fileformats/test_load_functions.py
new file mode 100644
index 0000000000..3c3d361080
--- /dev/null
+++ b/lib/iris/tests/unit/fileformats/test_load_functions.py
@@ -0,0 +1,231 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the BSD license.
+# See LICENSE in the root of the repository for full licensing details.
+"""Unit tests for iris load functions.
+
+* :func:`iris.load`
+* :func:`iris.load_cube`
+* :func:`iris.load_cubes`
+* :func:`iris.load_raw`
+"""
+
+import re
+from typing import Iterable
+from unittest import mock
+
+import numpy as np
+import pytest
+
+import iris
+from iris.coords import AuxCoord, DimCoord
+from iris.cube import Cube
+
+_time_unit = "days since 2001-01-01"
+
+
+def cu(n="a", t=0, z=0):
+ """Create a single test cube.
+
+ All cubes have, potentially, 4 dimensions (z, t, y, x).
+ The (y, x) dims are always the same, but (z, t) can be scalar, or various lengths.
+ t/z values which are scalar/vector produce likewise scalar/vector coordinates.
+ """
+ yco = DimCoord(np.arange(3), long_name="latitude", units="degrees")
+ xco = DimCoord(np.arange(4), long_name="longitude", units="degrees")
+ dim_coords = [yco, xco]
+ shape = [3, 4] # the xy shape
+ scalar_coords = []
+ tco = DimCoord(
+ np.array(t, dtype=np.float32), standard_name="time", units=_time_unit
+ )
+ zco = DimCoord(np.array(z, dtype=np.float32), standard_name="height", units="m")
+ for tz, tzco in [(t, tco), (z, zco)]:
+ if isinstance(tz, Iterable):
+ # N.B. insert an extra dim at the front
+ dim_coords[:0] = [tzco]
+ shape[:0] = tzco.shape[:1]
+ else:
+ scalar_coords.append(tzco)
+
+ cube = Cube(
+ data=np.zeros(shape),
+ long_name=n,
+ dim_coords_and_dims=[(dim, i_dim) for i_dim, dim in enumerate(dim_coords)],
+ aux_coords_and_dims=[(dim, ()) for dim in scalar_coords],
+ )
+ return cube
+
+
+@pytest.fixture(params=["load", "load_cube", "load_cubes", "load_raw"])
+def loadfunc_name(request):
+ # N.B. "request" is a standard PyTest fixture
+ return request.param # Return the name of the attribute to test.
+
+
+def run_testcase(input_cubes, loadfunc_name, constraints=None):
+ loadfunc = getattr(iris, loadfunc_name)
+
+ def mock_generate_cubes(uris, callback, constraints):
+ for cube in input_cubes:
+ yield cube
+
+ try:
+ with mock.patch("iris._generate_cubes", mock_generate_cubes):
+ result = loadfunc(input_cubes, constraints)
+ except Exception as e:
+ result = e
+
+ return result
+
+
+def debug_result(cubes):
+ print()
+ print(cubes)
+ if isinstance(cubes, iris.cube.CubeList):
+ print(len(cubes), " cubes..")
+ for i_cube, cube in enumerate(cubes):
+ vh = cube.coord("height").points
+ vt = cube.coord("time").points
+ print(i_cube, cube.name(), ": h=", vh, " :: t=", vt)
+
+
+def check_result(input_cubes, loadfunc_name, result, expected_results):
+ if "load_raw" not in expected_results and loadfunc_name == "load_raw":
+ expected = input_cubes
+ else:
+ expected = expected_results[loadfunc_name]
+
+ if isinstance(expected, str):
+ # We expect an error result : stored 'expected' is a regexp to match its repr
+ assert re.search(expected, repr(result))
+ else:
+ assert result == expected
+
+
+class TestLoadFunctions:
+ def test_mergeable(self, loadfunc_name):
+ _cube = cu(t=(0, 1), z=(0, 1))
+ input_cubes = [cu(t=i_t, z=i_z) for i_t in (0, 1) for i_z in (0, 1)]
+ expected_results = {
+ "load": [_cube],
+ "load_cube": _cube,
+ "load_cubes": [_cube],
+ }
+ result = run_testcase(input_cubes, loadfunc_name)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
+
+ def test_multiple(self, loadfunc_name):
+ input_cubes = [cu(), cu(n="b")]
+ expected_results = {
+ "load": [cu(), cu(n="b")],
+ "load_cube": "ConstraintMismatchError.*failed to merge into a single cube",
+ "load_cubes": r"ConstraintMismatchError.*-> \d+ cubes",
+ }
+ result = run_testcase(input_cubes, loadfunc_name)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
+
+ def test_multiple_constrained(self, loadfunc_name):
+ cube, cube_b = cu(), cu(n="b")
+ input_cubes = [cube, cube_b]
+ constraint = "a"
+ expected_results = {
+ "load": [cube],
+ "load_cube": cube,
+ "load_cubes": [cube],
+ "load_raw": [cube],
+ }
+ result = run_testcase(input_cubes, loadfunc_name, constraints=constraint)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
+
+ def test_multiple_multi_constraints(self, loadfunc_name):
+ ca, cb, cc = cu(), cu(n="b"), cu(n="c")
+ input_cubes = [ca, cb, cc]
+ constraints = ["c", "a"]
+ expected_results = {
+ "load": [cc, ca],
+ "load_cube": "ValueError.*only a single constraint is allowed",
+ "load_cubes": [cc, ca],
+ "load_raw": [cc, ca],
+ }
+ result = run_testcase(input_cubes, loadfunc_name, constraints=constraints)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
+
+ def test_nonmergeable_part_missing(self, loadfunc_name):
+ c1, c2, c3, c4 = [cu(t=i_t, z=i_z) for i_t in (0, 1) for i_z in (0, 1)]
+ input_cubes = [c1, c2, c4]
+
+ c124 = cu(t=(0, 1, 2))
+ c124.remove_coord("time") # we now have an unnamed dimension
+ c124.remove_coord("height") # we now have an unnamed dimension
+ c124.add_aux_coord(AuxCoord([0.0, 1, 1], standard_name="height", units="m"), 0)
+ c124.add_aux_coord(
+ AuxCoord([0.0, 0, 1], standard_name="time", units=_time_unit), 0
+ )
+ expected_results = {
+ "load": [c124],
+ "load_cube": c124,
+ "load_cubes": [c124],
+ }
+ result = run_testcase(input_cubes, loadfunc_name)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
+
+ def test_nonmergeable_part_extra(self, loadfunc_name):
+ c1, c2, c3, c4 = [cu(t=i_t, z=i_z) for i_t in (0, 1) for i_z in (0, 1)]
+ c5 = cu(t=5)
+ input_cubes = [c1, c2, c5, c4, c3] # scramble order, just to test
+
+ cx = cu(t=range(5))
+ cx.remove_coord("time") # we now have an unnamed dimension
+ cx.remove_coord("height") # we now have an unnamed dimension
+ cx.add_aux_coord(
+ AuxCoord([0.0, 1, 0, 1, 0], standard_name="height", units="m"), 0
+ )
+ cx.add_aux_coord(
+ AuxCoord([0.0, 0, 5, 1, 1], standard_name="time", units=_time_unit), 0
+ )
+ expected_results = {
+ "load": [cx],
+ "load_cube": cx,
+ "load_cubes": [cx],
+ }
+ result = run_testcase(input_cubes, loadfunc_name)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
+
+ def test_constraint_overlap(self, loadfunc_name):
+ c1, c2, c3, c4, c5, c6 = (cu(z=ind) for ind in (1, 2, 3, 4, 5, 6))
+ input_cubes = [c1, c2, c3, c4, c5, c6]
+ constraints = [
+ iris.Constraint(height=[1, 2]),
+ iris.Constraint(height=[1, 4, 5]),
+ ]
+ c12 = cu(z=[1, 2])
+ c145 = cu(z=[1, 4, 5])
+ expected_results = {
+ "load": [c12, c145],
+ "load_cube": "ValueError.*only a single constraint is allowed",
+ "load_cubes": [c12, c145], # selected parts merge, as for load
+ "load_raw": [c1, c2, c1, c4, c5], # THIS VERY STRANGE BEHAVIOUR!!
+ }
+ result = run_testcase(input_cubes, loadfunc_name, constraints=constraints)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
+
+ def test_multiple_match(self, loadfunc_name):
+ c1 = cu(z=1)
+ c2 = cu(z=2)
+ c3 = cu(n="b", z=1)
+ c4 = cu(n="b", z=2)
+ input_cubes = [c1, c2, c3, c4]
+ constraints = [
+ iris.Constraint("a") & iris.Constraint(height=1),
+ iris.Constraint(height=2),
+ ]
+ expected_results = {
+ "load": [c1, c2, c4],
+ "load_cube": "ValueError.*only a single constraint is allowed",
+ "load_cubes": r"ConstraintMismatchError.*-> \d+ cubes",
+ "load_raw": [c1, c2, c4],
+ }
+ result = run_testcase(input_cubes, loadfunc_name, constraints=constraints)
+ debug_result(result)
+ check_result(input_cubes, loadfunc_name, result, expected_results)
diff --git a/lib/iris/tests/unit/test_LoadPolicy.py b/lib/iris/tests/unit/test_LoadPolicy.py
new file mode 100644
index 0000000000..8772b089c1
--- /dev/null
+++ b/lib/iris/tests/unit/test_LoadPolicy.py
@@ -0,0 +1,144 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the BSD license.
+# See LICENSE in the root of the repository for full licensing details.
+"""Unit tests for the :mod:`iris.io.loading.LoadPolicy` package."""
+
+from unittest import mock
+
+import pytest
+
+from iris import LoadPolicy
+
+
+class TestInit:
+ def test_init_empty(self):
+ # Check how a bare init works
+ options = LoadPolicy()
+ assert options.settings() == LoadPolicy.SETTINGS["default"]
+
+ def test_init_args_kwargs(self):
+ # Check that init with args, kwargs equates to a pair of set() calls.
+ with mock.patch("iris.LoadPolicy.set") as mock_set:
+ test_option = mock.sentinel.option
+ test_kwargs = {"junk": "invalid"}
+ LoadPolicy(options=test_option, **test_kwargs)
+ assert mock_set.call_args_list == [
+ mock.call("default"),
+ mock.call(test_option, **test_kwargs),
+ ]
+
+
+class Test_settings:
+ """The .settings() returns a dict full of the settings."""
+
+ def test_settings(self):
+ options = LoadPolicy()
+ settings = options.settings()
+ assert isinstance(settings, dict)
+ assert tuple(settings.keys()) == LoadPolicy.OPTION_KEYS
+ for key in LoadPolicy.OPTION_KEYS:
+ assert settings[key] == getattr(options, key)
+
+
+class Test_set:
+ """Check the .set(arg, **kwargs) behaviour."""
+
+ def test_empty(self):
+ options = LoadPolicy()
+ orig_settings = options.settings()
+ options.set()
+ assert options.settings() == orig_settings
+
+ def test_arg_dict(self):
+ options = LoadPolicy()
+ assert options.settings()["merge_concat_sequence"] == "m"
+ assert options.settings()["repeat_until_unchanged"] is False
+ options.set({"merge_concat_sequence": "c", "repeat_until_unchanged": True})
+ assert options.settings()["merge_concat_sequence"] == "c"
+ assert options.settings()["repeat_until_unchanged"] is True
+
+ def test_arg_string(self):
+ options = LoadPolicy()
+ assert options.settings()["merge_concat_sequence"] == "m"
+ assert options.settings()["repeat_until_unchanged"] is False
+ options.set("comprehensive")
+ assert options.settings()["merge_concat_sequence"] == "mc"
+ assert options.settings()["repeat_until_unchanged"] is True
+
+ def test_arg_bad_dict(self):
+ options = LoadPolicy()
+ expected = "Unknown options.*'junk'.* : valid options are"
+ with pytest.raises(ValueError, match=expected):
+ options.set({"junk": "invalid"})
+
+ def test_arg_bad_string(self):
+ options = LoadPolicy()
+ expected = "Invalid arg options='unknown' : must be a dict, or one of"
+ with pytest.raises(TypeError, match=expected):
+ options.set("unknown")
+
+ def test_arg_bad_type(self):
+ options = LoadPolicy()
+ expected = "must be a dict, or one of"
+ with pytest.raises(TypeError, match=expected):
+ options.set((1, 2, 3))
+
+ def test_kwargs(self):
+ options = LoadPolicy()
+ assert options.settings()["merge_concat_sequence"] == "m"
+ assert options.settings()["repeat_until_unchanged"] is False
+ options.set(merge_concat_sequence="c", repeat_until_unchanged=True)
+ assert options.settings()["merge_concat_sequence"] == "c"
+ assert options.settings()["repeat_until_unchanged"] is True
+
+ def test_arg_kwargs(self):
+ # Show that kwargs override arg
+ options = LoadPolicy(
+ support_multiple_references=False,
+ merge_concat_sequence="",
+ repeat_until_unchanged=False,
+ )
+ options.set(
+ dict(merge_concat_sequence="c", repeat_until_unchanged=True),
+ merge_concat_sequence="mc",
+ )
+ assert options.merge_concat_sequence == "mc"
+ assert options.repeat_until_unchanged is True
+
+ def test_bad_kwarg(self):
+ options = LoadPolicy()
+ expected = "Unknown options.*'junk'.* : valid options are"
+ with pytest.raises(ValueError, match=expected):
+ options.set({"junk": "invalid"})
+
+
+class Test_AttributeAccess:
+ """Check operation of direct property access (with ".")."""
+
+ def test_getattr(self):
+ options = LoadPolicy(merge_concat_sequence="m")
+ assert options.merge_concat_sequence == "m"
+
+ def test_getattr_badname(self):
+ options = LoadPolicy()
+ expected = "'LoadPolicy' object has no attribute 'unknown'"
+ with pytest.raises(AttributeError, match=expected):
+ options.unknown
+
+ def test_setattr(self):
+ options = LoadPolicy(merge_concat_sequence="m")
+ options.merge_concat_sequence = "mc"
+ assert options.merge_concat_sequence == "mc"
+
+ def test_setattr_badname(self):
+ options = LoadPolicy()
+ expected = "LoadPolicy object has no property 'anyold_property'"
+ with pytest.raises(KeyError, match=expected):
+ options.anyold_property = "x"
+
+ def test_setattr_badvalue(self):
+ options = LoadPolicy()
+ expected = "'mcm' is not a valid.*merge_concat_sequence : must be one of"
+ with pytest.raises(ValueError, match=expected):
+ options.merge_concat_sequence = "mcm"
diff --git a/lib/iris/tests/unit/test_combine_cubes.py b/lib/iris/tests/unit/test_combine_cubes.py
new file mode 100644
index 0000000000..e159582497
--- /dev/null
+++ b/lib/iris/tests/unit/test_combine_cubes.py
@@ -0,0 +1,89 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the BSD license.
+# See LICENSE in the root of the repository for full licensing details.
+"""Unit tests for the :func:`iris.io.loading.combine_cubes` function.
+
+Note: These tests are fairly extensive to cover functional uses within the loading
+operations.
+TODO: when function is public API, extend testing to the extended API options,
+i.e. different types + defaulting of the 'options' arg, and **kwargs support.
+"""
+
+import pytest
+
+from iris import LoadPolicy, _combine_cubes
+from iris.tests.unit.fileformats.test_load_functions import cu
+
+
+@pytest.fixture(params=list(LoadPolicy.SETTINGS.keys()))
+def options(request):
+ # N.B. "request" is a standard PyTest fixture
+ return request.param # Return the name of the attribute to test.
+
+
+# Interface to convert settings-name / kwargs into an options dict,
+# TODO: remove this wrapper when the API of "combine_cubes" is opened up.
+def combine_cubes(cubes, settings_name="default", **kwargs):
+ options = LoadPolicy.SETTINGS[settings_name]
+ options.update(kwargs)
+ return _combine_cubes(cubes, options, merge_require_unique=False)
+
+
+class Test:
+ def test_mergeable(self, options):
+ c1, c2 = cu(t=1), cu(t=2)
+ c12 = cu(t=(1, 2))
+ input_cubes = [c1, c2]
+ result = combine_cubes(input_cubes, options)
+ expected = [c12] # same in all cases
+ assert result == expected
+
+ def test_catable(self, options):
+ c1, c2 = cu(t=(1, 2)), cu(t=(3, 4))
+ c12 = cu(t=(1, 2, 3, 4))
+ input_cubes = [c1, c2]
+ result = combine_cubes(input_cubes, options)
+ expected = {
+ "legacy": [c1, c2], # standard options can't do this ..
+ "default": [c1, c2],
+ "recommended": [c12], # .. but it works if you enable concatenate
+ "comprehensive": [c12],
+ }[options]
+ assert result == expected
+
+ def test_cat_enables_merge(self, options):
+ c1, c2 = cu(t=(1, 2), z=1), cu(t=(3, 4, 5), z=1)
+ c3, c4 = cu(t=(1, 2, 3), z=2), cu(t=(4, 5), z=2)
+ c1234 = cu(t=(1, 2, 3, 4, 5), z=(1, 2))
+ c12 = cu(t=(1, 2, 3, 4, 5), z=1)
+ c34 = cu(t=(1, 2, 3, 4, 5), z=2)
+ input_cubes = [c1, c2, c3, c4]
+ result = combine_cubes(input_cubes, options)
+ expected = {
+ "legacy": input_cubes,
+ "default": input_cubes,
+ "recommended": [c12, c34], # standard "mc" sequence can't do this one..
+ "comprehensive": [c1234], # .. but works if you repeat
+ }[options]
+ assert result == expected
+
+ def test_cat_enables_merge__custom(self):
+ c1, c2 = cu(t=(1, 2), z=1), cu(t=(3, 4, 5), z=1)
+ c3, c4 = cu(t=(1, 2, 3), z=2), cu(t=(4, 5), z=2)
+ c1234 = cu(t=(1, 2, 3, 4, 5), z=(1, 2))
+ input_cubes = [c1, c2, c3, c4]
+ result = combine_cubes(input_cubes, merge_concat_sequence="cm")
+ assert result == [c1234]
+
+ def test_nocombine_overlapping(self, options):
+ c1, c2 = cu(t=(1, 3)), cu(t=(2, 4))
+ input_cubes = [c1, c2]
+ result = combine_cubes(input_cubes, options)
+ assert result == input_cubes # same in all cases : can't do this
+
+ def test_nocombine_dim_scalar(self, options):
+ c1, c2 = cu(t=(1,)), cu(t=2)
+ input_cubes = [c1, c2]
+ result = combine_cubes(input_cubes, options)
+ assert result == input_cubes # can't do this at present