From dc39651cb9958b6e66e361d3253690251772a304 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Oct 2024 08:49:06 +0100 Subject: [PATCH 01/33] Enable factory references to create new dimensions on load. --- lib/iris/fileformats/rules.py | 45 +++++++++++++++++++++++++---------- 1 file changed, 33 insertions(+), 12 deletions(-) diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 8299021fb5..0c57607ff6 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -151,7 +151,7 @@ def _dereference_args(factory, reference_targets, regrid_cache, cube): src = reference_targets[arg.name].as_cube() # If necessary, regrid the reference cube to # match the grid of this cube. - src = _ensure_aligned(regrid_cache, src, cube) + src, cube = _ensure_aligned(regrid_cache, src, cube) if src is not None: new_coord = iris.coords.AuxCoord( src.data, @@ -178,7 +178,7 @@ def _dereference_args(factory, reference_targets, regrid_cache, cube): # If it wasn't a Reference, then arg is a dictionary # of keyword arguments for cube.coord(...). args.append(cube.coord(**arg)) - return args + return args, cube def _regrid_to_target(src_cube, target_coords, target_cube): @@ -211,9 +211,9 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): # Check that each of src_cube's dim_coords matches up with a single # coord on target_cube. try: - target_coords = [] + target_dimcoords = [] for dim_coord in src_cube.dim_coords: - target_coords.append(target_cube.coord(dim_coord)) + target_dimcoords.append(target_cube.coord(dim_coord)) except iris.exceptions.CoordinateNotFoundError: # One of the src_cube's dim_coords didn't exist on the # target_cube... so we can't regrid (i.e. just return None). @@ -222,7 +222,26 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): # So we can use `iris.analysis.interpolate.linear()` later, # ensure each target coord is either a scalar or maps to a # single, distinct dimension. - target_dims = [target_cube.coord_dims(coord) for coord in target_coords] + # PP-MOD: first promote any scalar coords when needed as dims + for target_coord in target_dimcoords: + if not target_cube.coord_dims(target_coord): + # The chosen coord is not a dimcoord in the target (yet) + # Make it one with 'new_axis' + from iris.util import new_axis + + # Include the other coords on that dim in the src : this means the + # src merge identifies which belong on that dim + # (e.g. 'forecast_period' along with 'time') + (src_dim,) = src_cube.coord_dims(target_coord) # should have 1 dim + promote_other_coords = [ + target_cube.coord(src_coord) + for src_coord in src_cube.coords(dimensions=src_dim) + if src_coord.name() != target_coord.name() + ] + target_cube = new_axis( + target_cube, target_coord, expand_extras=promote_other_coords + ) + target_dims = [target_cube.coord_dims(coord) for coord in target_dimcoords] target_dims = list(filter(None, target_dims)) unique_dims = set() for dims in target_dims: @@ -236,19 +255,19 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): grids, cubes = regrid_cache[cache_key] # 'grids' is a list of tuples of coordinates, so convert # the 'target_coords' list into a tuple to be consistent. - target_coords = tuple(target_coords) + target_dimcoords = tuple(target_dimcoords) try: # Look for this set of target coordinates in the cache. - i = grids.index(target_coords) + i = grids.index(target_dimcoords) result_cube = cubes[i] except ValueError: # Not already cached, so do the hard work of interpolating. - result_cube = _regrid_to_target(src_cube, target_coords, target_cube) + result_cube = _regrid_to_target(src_cube, target_dimcoords, target_cube) # Add it to the cache. - grids.append(target_coords) + grids.append(target_dimcoords) cubes.append(result_cube) - return result_cube + return result_cube, target_cube class Loader( @@ -331,7 +350,7 @@ def _resolve_factory_references( # across multiple result cubes. for factory in factories: try: - args = _dereference_args( + args, cube = _dereference_args( factory, concrete_reference_targets, regrid_cache, cube ) except _ReferenceError as e: @@ -345,6 +364,8 @@ def _resolve_factory_references( aux_factory = factory.factory_class(*args) cube.add_aux_factory(aux_factory) + return cube + def _load_pairs_from_fields_and_filenames( fields_and_filenames, converter, user_callback_wrapper=None @@ -383,7 +404,7 @@ def _load_pairs_from_fields_and_filenames( regrid_cache = {} for cube, factories, field in results_needing_reference: - _resolve_factory_references( + cube = _resolve_factory_references( cube, factories, concrete_reference_targets, regrid_cache ) yield (cube, field) From d7126824c937f5bc553f0584464c004da30a5e14 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Oct 2024 10:46:43 +0100 Subject: [PATCH 02/33] Skip hanging tests. --- lib/iris/tests/test_plot.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 50773f0d24..916e205dd8 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -2,6 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. +import pytest # import iris tests first so that some things can be initialised before # importing anything else @@ -816,6 +817,7 @@ def __repr__(self): @tests.skip_data @tests.skip_plot +@pytest.mark.skip class TestPlotCoordinatesGiven(tests.GraphicsTest): def setUp(self): super().setUp() From 5e53a08d057c878438170bcbf32db30de75c991e Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Oct 2024 10:55:10 +0100 Subject: [PATCH 03/33] Skip more hanging tests. --- lib/iris/tests/test_quickplot.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index fdd534a2c5..0714e90f36 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -8,6 +8,7 @@ import iris.tests as tests # isort:skip import numpy as np +import pytest import iris import iris.tests.test_plot as test_plot @@ -104,6 +105,7 @@ def setUp(self): @tests.skip_data @tests.skip_plot +@pytest.mark.skip class TestLabels(tests.GraphicsTest): def setUp(self): super().setUp() From 92369eccaea7dc7fa9aafe200dc1b91bd4caf755 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 10 Oct 2024 17:26:16 +0100 Subject: [PATCH 04/33] Adjust misleading comment. --- lib/iris/cube.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 40e50da4ff..beda79ea6a 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -113,7 +113,7 @@ def add_cube(self, cube): pair.add(cube) def cubes(self): - """Return all the cubes in this collection concatenated into a single :class:`CubeList`.""" + """Return all the cubes in this collection in a single :class:`CubeList`.""" result = CubeList() for pair in self.pairs: result.extend(pair.cubes) From 85634553f9b905e256e87fc144d54b886252b856 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 11 Oct 2024 15:18:16 +0100 Subject: [PATCH 05/33] Add policy control and auto-detect. NOTE: for now only load, not load_cubes/load_cube --- lib/iris/__init__.py | 126 +++++++++++++++++++++++++++++++++- lib/iris/fileformats/rules.py | 45 +++++++++++- 2 files changed, 166 insertions(+), 5 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index a06e36a2e2..d58d4b5dec 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -292,8 +292,17 @@ def _generate_cubes(uris, callback, constraints): def _load_collection(uris, constraints=None, callback=None): from iris.cube import _CubeFilterCollection + from iris.fileformats.rules import _MULTIREF_DETECTION try: + # This routine is called once per iris load operation. + # Control of the "multiple refs" handling is implicit in this routine + # NOTE: detection of multiple reference fields, and it's enabling of post-load + # concatenation, is triggered **per-load, not per-cube** + # This behaves unexpectefly for "iris.load_cubes" : a post-concatenation is + # triggered for all cubes or none, not per-cube (i.e. per constraint). + _MULTIREF_DETECTION.found_multiple_refs = False + cubes = _generate_cubes(uris, callback, constraints) result = _CubeFilterCollection.from_cubes(cubes, constraints) except EOFError as e: @@ -303,7 +312,118 @@ def _load_collection(uris, constraints=None, callback=None): return result -def load(uris, constraints=None, callback=None): +class LoadPolicy(threading.local): + """Object defining a general loading strategy.""" + + _allkeys = ( + "support_multiple_references", + "multiref_triggers_concatenate", + "use_concatenate", + "use_merge", + "cat_before_merge", + "repeat_until_done", + ) + + def __init__( + self, + support_multiple_references: bool = False, + multiref_triggers_concatenate: bool = False, + use_concatenate: bool = False, + use_merge: bool = True, + cat_before_merge: bool = False, + repeat_until_done: bool = False, + ): + """Container for loading controls.""" + self.support_multiple_references = support_multiple_references + self.multiref_triggers_concatenate = multiref_triggers_concatenate + self.use_concatenate = use_concatenate + self.use_merge = use_merge + self.cat_before_merge = cat_before_merge + self.repeat_until_done = repeat_until_done + + def __repr__(self): + msg = ( + "LoadPolicy(" + f"support_multiple_references={self.support_multiple_references}, " + f"multiref_triggers_concatenate={self.multiref_triggers_concatenate}, " + f"use_concatenate={self.use_concatenate}, " + f"use_merge={self.use_merge}, " + f"cat_before_merge={self.cat_before_merge}, " + f"repeat_until_done={self.repeat_until_done}" + ")" + ) + return msg + + def copy(self): + return LoadPolicy(**{key: getattr(self, key) for key in self._allkeys}) + + @contextlib.contextmanager + def context(self, policy=None, **kwargs): + """Return context manager for temporary options. + + Modifies the given parameters within a context, for the active thread. + """ + # Save the current statr + current_state = self.__dict__.copy() + + # Update the state from given policy object and/or method keywords + for name in self._allkeys: + value = getattr(self, name) + if policy and hasattr(policy, name): + value = getattr(policy, name) + if name in kwargs: + value = kwargs[name] + setattr(self, name, value) + + try: + # Execute the context + yield + finally: + # Return the state + self.__dict__.clear() + self.__dict__.update(current_state) + + +LOAD_POLICY = LoadPolicy() +LOAD_POLICY_LEGACY = LoadPolicy() +LOAD_POLICY_RECOMMENDED = LoadPolicy( + support_multiple_references=True, multiref_triggers_concatenate=True +) +LOAD_POLICY_COMPREHENSIVE = LoadPolicy( + support_multiple_references=True, use_concatenate=True, repeat_until_done=True +) + + +def _current_effective_policy(): + policy = LOAD_POLICY + if not policy.use_concatenate and policy.multiref_triggers_concatenate: + from iris.fileformats.rules import _MULTIREF_DETECTION + + if _MULTIREF_DETECTION.found_multiple_refs: + policy = policy.copy() + policy.use_concatenate = True + return policy + + +def _apply_loading_policy(cubes, policy=None): + if not policy: + policy = _current_effective_policy() + while True: + n_original_cubes = len(cubes) + if policy.use_concatenate and policy.cat_before_merge: + cubes = cubes.concatenate() + if policy.use_merge: + cubes = cubes.merge() + if policy.use_concatenate and not policy.cat_before_merge: + cubes = cubes.concatenate() + n_new_cubes = len(cubes) + if not policy.repeat_until_done or n_new_cubes >= n_original_cubes: + break + + return cubes + + +def load(uris, constraints=None, callback=None, policy=None): """Load any number of Cubes for each constraint. For a full description of the arguments, please see the module @@ -327,7 +447,9 @@ def load(uris, constraints=None, callback=None): were random. """ - return _load_collection(uris, constraints, callback).merged().cubes() + cubes = _load_collection(uris, constraints, callback).cubes() + cubes = _apply_loading_policy(cubes) + return cubes def load_cube(uris, constraint=None, callback=None): diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 0c57607ff6..79c02736fa 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -5,6 +5,7 @@ """Generalised mechanisms for metadata translation and cube construction.""" import collections +import threading import warnings import cf_units @@ -143,7 +144,11 @@ class _ReferenceError(Exception): def _dereference_args(factory, reference_targets, regrid_cache, cube): - """Convert all the arguments for a factory into concrete coordinates.""" + """Convert all the arguments for a factory into concrete coordinates. + + Note: where multiple reference fields define an additional dimension, this routine + returns a modified 'cube', with the necessary additional dimensions. + """ args = [] for arg in factory.args: if isinstance(arg, Reference): @@ -178,6 +183,7 @@ def _dereference_args(factory, reference_targets, regrid_cache, cube): # If it wasn't a Reference, then arg is a dictionary # of keyword arguments for cube.coord(...). args.append(cube.coord(**arg)) + return args, cube @@ -224,18 +230,24 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): # single, distinct dimension. # PP-MOD: first promote any scalar coords when needed as dims for target_coord in target_dimcoords: - if not target_cube.coord_dims(target_coord): + from iris import LOAD_POLICY + + if ( + not target_cube.coord_dims(target_coord) + and LOAD_POLICY.support_multiple_references + ): # The chosen coord is not a dimcoord in the target (yet) # Make it one with 'new_axis' from iris.util import new_axis + _MULTIREF_DETECTION.found_multiple_refs = True # Include the other coords on that dim in the src : this means the # src merge identifies which belong on that dim # (e.g. 'forecast_period' along with 'time') (src_dim,) = src_cube.coord_dims(target_coord) # should have 1 dim promote_other_coords = [ target_cube.coord(src_coord) - for src_coord in src_cube.coords(dimensions=src_dim) + for src_coord in src_cube.coords(contains_dimension=src_dim) if src_coord.name() != target_coord.name() ] target_cube = new_axis( @@ -364,9 +376,35 @@ def _resolve_factory_references( aux_factory = factory.factory_class(*args) cube.add_aux_factory(aux_factory) + # In the case of multiple references which vary on a new dimension + # (such as time-dependent orography or surface-pressure), the cube may get replaced + # by one with a new dimension. + # In that case we must update the factory so its dependencies are coords of the + # new cube. + cube_coord_ids = [ + id(coord) for coord, _ in cube._dim_coords_and_dims + cube._aux_coords_and_dims + ] + for factory in cube.aux_factories: + for name, dep in list(factory.dependencies.items()): + if id(dep) not in cube_coord_ids: + factory.update(dep, cube.coord(dep)) + return cube +class MultipleReferenceFieldDetector(threading.local): + def __init__(self): + self.found_multiple_refs = False + + +# A single global object (per thread) to record whether multiple reference fields +# (e.g. time-dependent orography, or surface pressure fields) have been detected during +# the latest load operation. +# This is used purely to implement the iris.LOAD_POLICY.multiref_triggers_concatenate +# functionality. +_MULTIREF_DETECTION = MultipleReferenceFieldDetector() + + def _load_pairs_from_fields_and_filenames( fields_and_filenames, converter, user_callback_wrapper=None ): @@ -376,6 +414,7 @@ def _load_pairs_from_fields_and_filenames( # needs a filename associated with each field to support the load callback. concrete_reference_targets = {} results_needing_reference = [] + for field, filename in fields_and_filenames: # Convert the field to a Cube, passing down the 'converter' function. cube, factories, references = _make_cube(field, converter) From a6928629700f47821e567311359122d862febf04 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 11 Oct 2024 15:22:33 +0100 Subject: [PATCH 06/33] Add temporary testcode. NB no actual test, just printout. --- .../testroundtrip_hybrid_factory_H_or_P.py | 197 ++++++++++++++++++ 1 file changed, 197 insertions(+) create mode 100644 lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py diff --git a/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py b/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py new file mode 100644 index 0000000000..257ca6b0b1 --- /dev/null +++ b/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py @@ -0,0 +1,197 @@ +import numpy as np + +import iris +from iris.aux_factory import HybridHeightFactory, HybridPressureFactory +from iris.coord_systems import GeogCS +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube, CubeList +from iris.fileformats.pp import EARTH_RADIUS, STASH + + +def make_hybrid_z_testdata( + nt=2, + nz=3, + ny=4, + nx=3, + hybrid_zcoord_type="height", + make_reference_time_dependent=True, + include_reference_as_cube=False, +): + crs = GeogCS(EARTH_RADIUS) + t_dim, z_dim, y_dim, x_dim = 0, 1, 2, 3 + co_t = DimCoord( + np.arange(nt, dtype=np.float32), + standard_name="time", + units="days since 2000-01-01", + ) + co_z = DimCoord( + np.arange(1, nz + 1, dtype=np.int32), + standard_name="model_level_number", + units=1, + ) + co_y = DimCoord( + np.linspace(0, 120.0, ny, dtype=np.float32), + standard_name="latitude", + units="degrees", + coord_system=crs, + ) + co_x = DimCoord( + np.linspace(-30.0, 50.0, nx, dtype=np.float32), + standard_name="longitude", + units="degrees", + coord_system=crs, + ) + cube = Cube( + np.zeros((nt, nz, ny, nx), dtype=np.float32), + standard_name="air_temperature", + units="K", + dim_coords_and_dims=zip((co_t, co_z, co_y, co_x), (t_dim, z_dim, y_dim, x_dim)), + ) + + delta_vals = np.linspace(200.0, 600, nz, dtype=np.float32) + if hybrid_zcoord_type == "pressure": + co_delta = DimCoord(delta_vals, long_name="delta", units="hPa") + elif hybrid_zcoord_type == "height": + co_delta = DimCoord(delta_vals, long_name="level_height", units="m") + else: + raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") + + sigma_vals = np.linspace(0.2, 0.8, nz, dtype=np.float32) + co_sigma = DimCoord(sigma_vals, long_name="sigma", units=1) + + # Note: will not save as HH to PP without bounds on delta+sigma + for coord in (co_delta, co_sigma): + coord.guess_bounds() + cube.add_aux_coord(co_delta, z_dim) + cube.add_aux_coord(co_sigma, z_dim) + + refdata = np.arange(nt * ny * nx, dtype=np.float32) + refdata = 1000.0 + refdata.reshape(nt, ny, nx) + if hybrid_zcoord_type == "pressure": + co_ref = AuxCoord( + refdata, + standard_name="surface_air_pressure", + units="hPa", + attributes={"STASH": STASH(model=1, section=0, item=409)}, + ) + elif hybrid_zcoord_type == "height": + co_ref = AuxCoord( + refdata, + standard_name="surface_altitude", + units="m", + attributes={"STASH": STASH(model=1, section=0, item=33)}, + ) + else: + raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") + + ref_dims = (t_dim, y_dim, x_dim) + if not make_reference_time_dependent: + co_ref = co_ref[0] + ref_dims = ref_dims[1:] + + cube.add_aux_coord(co_ref, ref_dims) + if hybrid_zcoord_type == "pressure": + factory = HybridPressureFactory( + sigma=co_sigma, delta=co_delta, surface_air_pressure=co_ref + ) + elif hybrid_zcoord_type == "height": + factory = HybridHeightFactory(sigma=co_sigma, delta=co_delta, orography=co_ref) + else: + raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") + + cube.add_aux_factory(factory) + + cubes = CubeList([cube]) + + if include_reference_as_cube: + ref_dimcoords = [ + cube.coord(dim_coords=True, dimensions=cube_refdim) + for cube_refdim in cube.coord_dims(co_ref) + ] + reference_cube = Cube( + co_ref.points, + standard_name=co_ref.standard_name, + units=co_ref.units, + dim_coords_and_dims=[ + (ref_dimcoord, i_refdim) + for i_refdim, ref_dimcoord in enumerate(ref_dimcoords) + ], + attributes=co_ref.attributes, + ) + if not reference_cube.coords("time"): + # Add a dummy time coordinate to non-time-dependent reference cube + # - mostly because otherwise it cannot be saved to GRIB format + # NOTE: we give this a different nominal time to any of the data : when + # there is only one reference field, it's recorded time value should be + # **ignored** by the loader + reference_cube.add_aux_coord( + DimCoord( + np.array(0, dtype=np.float32), + standard_name="time", + units="days since 1900-01-01", + ) + ) + cubes.append(reference_cube) + + return cubes + + +# _HYBRID_ZCOORD_TYPE = "height" +_HYBRID_ZCOORD_TYPE = "pressure" + +# _FILENAME = "tmp.nc" # Naturally, this "just works" +_FILENAME = "tmp.pp" +# _FILENAME = "tmp.grib2" + +_TEST_TIME_DEPENDENT = True +# _TEST_TIME_DEPENDENT = False + + +def check_create(): + global _FILENAME, _HYBRID_ZCOORD_TYPE, _TEST_TIME_DEPENDENT + file_ext = _FILENAME.split(".")[-1] + include_ref = file_ext in ("grib2", "pp") + + data = make_hybrid_z_testdata( + hybrid_zcoord_type=_HYBRID_ZCOORD_TYPE, + include_reference_as_cube=include_ref, + make_reference_time_dependent=_TEST_TIME_DEPENDENT, + ) + + print() + print(f"Cubes saving to {_FILENAME}:") + print(data) + for cube in data: + print(cube) + + _EXTRA_COORDS_DEBUG = False + if _EXTRA_COORDS_DEBUG: + (datacube,) = [cube for cube in data if "surface" not in cube.name()] + for name in ("level_height", "sigma", "surface_altitude"): + print(f"Coord({name}):") + print(datacube.coord(name)) + print("Ref cube:") + print(data.extract_cube("surface_altitude")) + + iris.save(data, _FILENAME) + readback = iris.load(_FILENAME) + # Apply extra concat : as "raw" cubes with a time dimension won't merge + readback = readback.concatenate() + print() + print("Readback cubes:") + print(readback) + for cube in readback: + print(cube) + + +def test_roundtrip(): + print("Check with Iris from : ", iris.__file__) + from iris import ( + LOAD_POLICY, + LOAD_POLICY_RECOMMENDED, + # LOAD_POLICY_LEGACY, + # LOAD_POLICY_COMPREHENSIVE + ) + + with LOAD_POLICY.context(LOAD_POLICY_RECOMMENDED): + check_create() From 653561f725777597b81cc1c68792e1984fd60734 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 14 Oct 2024 17:39:42 +0100 Subject: [PATCH 07/33] Replaced _CubeFilterCollection.merged() with combined(); replace uses in load;load_cube/load_cubes. --- lib/iris/__init__.py | 13 +++++++------ lib/iris/cube.py | 17 +++++++++++++---- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index d58d4b5dec..8b6d74d804 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -405,7 +405,7 @@ def _current_effective_policy(): return policy -def _apply_loading_policy(cubes, policy=None): +def _combine_with_loading_policy(cubes, policy=None, merge_require_unique=False): if not policy: policy = _current_effective_policy() while True: @@ -413,7 +413,7 @@ def _apply_loading_policy(cubes, policy=None): if policy.use_concatenate and policy.cat_before_merge: cubes = cubes.concatenate() if policy.use_merge: - cubes = cubes.merge() + cubes = cubes.merge(unique=merge_require_unique) if policy.use_concatenate and not policy.cat_before_merge: cubes = cubes.concatenate() n_new_cubes = len(cubes) @@ -447,8 +447,7 @@ def load(uris, constraints=None, callback=None, policy=None): were random. """ - cubes = _load_collection(uris, constraints, callback).cubes() - cubes = _apply_loading_policy(cubes) + cubes = _load_collection(uris, constraints, callback).combined().cubes() return cubes @@ -477,9 +476,11 @@ def load_cube(uris, constraint=None, callback=None): if len(constraints) != 1: raise ValueError("only a single constraint is allowed") - cubes = _load_collection(uris, constraints, callback).cubes() + cubes = _load_collection(uris, constraints, callback).combined(unique=False).cubes() try: + # NOTE: this call currently retained to preserve the legacy exceptions + # TODO: replace with simple testing to duplicate the relevant error cases cube = cubes.merge_cube() except iris.exceptions.MergeError as e: raise iris.exceptions.ConstraintMismatchError(str(e)) @@ -514,7 +515,7 @@ def load_cubes(uris, constraints=None, callback=None): """ # Merge the incoming cubes - collection = _load_collection(uris, constraints, callback).merged() + collection = _load_collection(uris, constraints, callback).combined() # Make sure we have exactly one merged cube per constraint bad_pairs = [pair for pair in collection.pairs if len(pair) != 1] diff --git a/lib/iris/cube.py b/lib/iris/cube.py index beda79ea6a..38ec3896ef 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -64,6 +64,8 @@ class _CubeFilter: """A constraint, paired with a list of cubes matching that constraint.""" def __init__(self, constraint, cubes=None): + from iris.cube import CubeList + self.constraint = constraint if cubes is None: cubes = CubeList() @@ -78,7 +80,7 @@ def add(self, cube): if sub_cube is not None: self.cubes.append(sub_cube) - def merged(self, unique=False): + def combined(self, unique=False): """Return a new :class:`_CubeFilter` by merging the list of cubes. Parameters @@ -88,7 +90,12 @@ def merged(self, unique=False): duplicate cubes are detected. """ - return _CubeFilter(self.constraint, self.cubes.merge(unique)) + from iris import _combine_with_loading_policy + + return _CubeFilter( + self.constraint, + _combine_with_loading_policy(self.cubes, merge_require_unique=unique), + ) class _CubeFilterCollection: @@ -114,12 +121,14 @@ def add_cube(self, cube): def cubes(self): """Return all the cubes in this collection in a single :class:`CubeList`.""" + from iris.cube import CubeList + result = CubeList() for pair in self.pairs: result.extend(pair.cubes) return result - def merged(self, unique=False): + def combined(self, unique=False): """Return a new :class:`_CubeFilterCollection` by merging all the cube lists of this collection. Parameters @@ -129,7 +138,7 @@ def merged(self, unique=False): duplicate cubes are detected. """ - return _CubeFilterCollection([pair.merged(unique) for pair in self.pairs]) + return _CubeFilterCollection([pair.combined(unique) for pair in self.pairs]) class CubeList(list): From 393be734e9ddcfc43c56e0694c95eb1670e76b4b Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 14 Oct 2024 18:25:05 +0100 Subject: [PATCH 08/33] Fix licence header --- lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py b/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py index 257ca6b0b1..30546e9db3 100644 --- a/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py +++ b/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py @@ -1,3 +1,9 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Temporary exercising code for multiple-factory-reference changes.""" + import numpy as np import iris From ca3955f961a7a48baeafac9c74ef2e7fd8a7ebe1 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 14 Oct 2024 18:25:36 +0100 Subject: [PATCH 09/33] Fix to handle empty reference correctly. --- lib/iris/fileformats/rules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 79c02736fa..2a1a74f374 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -386,7 +386,7 @@ def _resolve_factory_references( ] for factory in cube.aux_factories: for name, dep in list(factory.dependencies.items()): - if id(dep) not in cube_coord_ids: + if dep and id(dep) not in cube_coord_ids: factory.update(dep, cube.coord(dep)) return cube From 5fa2b4604d2d515eaeb8e8445e2022cedc561224 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 14 Oct 2024 18:26:02 +0100 Subject: [PATCH 10/33] Fix tests. --- lib/iris/tests/test_pp_to_cube.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/iris/tests/test_pp_to_cube.py b/lib/iris/tests/test_pp_to_cube.py index da49ff8188..a61703761f 100644 --- a/lib/iris/tests/test_pp_to_cube.py +++ b/lib/iris/tests/test_pp_to_cube.py @@ -81,7 +81,7 @@ def test_regrid_missing_coord(self): # If the target cube is missing one of the source dimension # coords, ensure the re-grid fails nicely - i.e. returns None. self.target.remove_coord("bar") - new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) + new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) self.assertIsNone(new_ref) def test_regrid_codimension(self): @@ -92,11 +92,11 @@ def test_regrid_codimension(self): new_foo = self.target.coord("bar").copy() new_foo.rename("foo") self.target.add_aux_coord(new_foo, 0) - new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) + new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) self.assertIsNone(new_ref) def test_regrid_identity(self): - new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) + new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) # Bounds don't make it through the re-grid process self.ref.coord("bar").bounds = None self.ref.coord("foo").bounds = None From 801f9e2f4f809c6e59e7c04b4b5de4992ee63075 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 15 Oct 2024 14:25:47 +0100 Subject: [PATCH 11/33] Simplify policy options and tidy api. --- lib/iris/__init__.py | 287 +++++++++++++----- lib/iris/cube.py | 12 +- .../testroundtrip_hybrid_factory_H_or_P.py | 16 +- 3 files changed, 225 insertions(+), 90 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 8b6d74d804..43869c20b7 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -94,7 +94,7 @@ def callback(cube, field, filename): import itertools import os.path import threading -from typing import Callable, Literal +from typing import Callable, Literal, Mapping import iris._constraints import iris.config @@ -121,7 +121,10 @@ def callback(cube, field, filename): "FUTURE", "Future", "IrisDeprecation", + "LOAD_POLICY", + "LoadPolicy", "NameConstraint", + "combine_cubes", "load", "load_cube", "load_cubes", @@ -313,116 +316,242 @@ def _load_collection(uris, constraints=None, callback=None): class LoadPolicy(threading.local): - """Object defining a general loading strategy.""" + """A container for loading strategy options. - _allkeys = ( + Controls merge/concatenate usage, and the handling of cases where multiple reference + fields merge to define an additional dimension (e.g. a time-varying orography). + + Options can be set directly, or via :meth:`~iris.LoadPolicy.set`, or changed for + the scope of a code block with :meth:`~iris.LoadPolicy.context`. + + .. testsetup:: + + from iris import LOAD_POLICY + + Examples + -------- + >>> LOAD_POLICY.set("legacy") + >>> print(LOAD_POLICY) + LoadPolicy(support_multiple_references=False, merge_concat_sequence='m', repeat_until_unchanged=False) + >>> LOAD_POLICY.support_multiple_references = True + >>> print(LOAD_POLICY) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='m', repeat_until_unchanged=False) + >>> LOAD_POLICY.set(merge_concat_sequence="cm") + >>> print(LOAD_POLICY) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) + >>> with LOAD_POLICY.context("comprehensive"): + ... print(LOAD_POLICY) + ... print(LOAD_POLICY) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='mc', repeat_until_unchanged=True) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) + + """ + + # Useful constants + OPTION_KEYS = ( "support_multiple_references", - "multiref_triggers_concatenate", - "use_concatenate", - "use_merge", - "cat_before_merge", - "repeat_until_done", + "merge_concat_sequence", + "repeat_until_unchanged", ) + _OPTIONS_ALLOWED_VALUES = { + "support_multiple_references": (False, True), + "merge_concat_sequence": ("", "m", "c", "mc", "cm"), + "repeat_until_unchanged": (False, True), + } + SETTINGS = { + "legacy": dict( + support_multiple_references=False, + merge_concat_sequence="m", + repeat_until_unchanged=False, + ), + "default": dict( + support_multiple_references=True, + merge_concat_sequence="m", + repeat_until_unchanged=False, + ), + "recommended": dict( + support_multiple_references=True, + merge_concat_sequence="mc", + repeat_until_unchanged=False, + ), + "comprehensive": dict( + support_multiple_references=True, + merge_concat_sequence="mc", + repeat_until_unchanged=True, + ), + } + + def __init__(self, options: str | dict | None = None, **kwargs): + """Create loading strategy control object.""" + self.set("default") + self.set(options, **kwargs) + + def __setattr__(self, key, value): + if key not in self.OPTION_KEYS: + raise KeyError(f"LoadPolicy object has no property '{key}'.") + + allowed_values = self._OPTIONS_ALLOWED_VALUES[key] + if value not in allowed_values: + msg = ( + f"{value!r} is not a valid setting for LoadPolicy.{key} : " + f"must be one of '{allowed_values}'." + ) + raise ValueError(msg) + + self.__dict__[key] = value + + def set(self, options: str | dict | None = None, **kwargs): + """Set new options. - def __init__( - self, - support_multiple_references: bool = False, - multiref_triggers_concatenate: bool = False, - use_concatenate: bool = False, - use_merge: bool = True, - cat_before_merge: bool = False, - repeat_until_done: bool = False, - ): - """Container for loading controls.""" - self.support_multiple_references = support_multiple_references - self.multiref_triggers_concatenate = multiref_triggers_concatenate - self.use_concatenate = use_concatenate - self.use_merge = use_merge - self.cat_before_merge = cat_before_merge - self.repeat_until_done = repeat_until_done + Parameters + ---------- + * options : str or dict, optional + A dictionary of options values, or the name of one of the + :data:`~iris.LoadPolicy.SETTINGS` standard option sets, + e.g. "legacy" or "comprehensive". + * kwargs : dict + Individual options options, from :data:`~iris.LoadPolicy.OPTION_KEYS`. + + Note + ---- + Keyword arguments are applied after the 'options' arg, and + so will take precedence. + + """ + if options is None: + options = {} + elif isinstance(options, str) and options in self.SETTINGS: + options = self.SETTINGS[options] + elif not isinstance(options, Mapping): + msg = ( + f"Invalid arg options='{options!r}' : " + f"must be a dict, or one of {self.SETTINGS.keys()}" + ) + raise ValueError(msg) + + # Override any options with keywords + options.update(**kwargs) + bad_keys = [key for key in options if key not in self.OPTION_KEYS] + if bad_keys: + msg = f"Unknown options {bad_keys} : valid options are {self.OPTION_KEYS}." + raise ValueError(msg) + + # Implement all options by changing own content. + for key, value in options.items(): + setattr(self, key, value) + + def settings(self): + """Return a options dict containing the current state.""" + return {key: getattr(self, key) for key in self.OPTION_KEYS} def __repr__(self): - msg = ( - "LoadPolicy(" - f"support_multiple_references={self.support_multiple_references}, " - f"multiref_triggers_concatenate={self.multiref_triggers_concatenate}, " - f"use_concatenate={self.use_concatenate}, " - f"use_merge={self.use_merge}, " - f"cat_before_merge={self.cat_before_merge}, " - f"repeat_until_done={self.repeat_until_done}" - ")" - ) + msg = f"{self.__class__.__name__}(" + msg += ", ".join(f"{key}={getattr(self, key)!r}" for key in self.OPTION_KEYS) + msg += ")" return msg - def copy(self): - return LoadPolicy(**{key: getattr(self, key) for key in self._allkeys}) - @contextlib.contextmanager - def context(self, policy=None, **kwargs): - """Return context manager for temporary options. + def context(self, settings=None, **kwargs): + """Return a context manager applying given options. - Modifies the given parameters within a context, for the active thread. - """ - # Save the current statr - current_state = self.__dict__.copy() + Parameters + ---------- + settings : str or dict + Options dictionary or name, as for :meth:`~LoadPolicy.set`. + kwargs : dict + Option values, as for :meth:`~LoadPolicy.set`. - # Update the state from given policy object and/or method keywords - for name in self._allkeys: - value = getattr(self, name) - if policy and hasattr(policy, name): - value = getattr(policy, name) - if name in kwargs: - value = kwargs[name] - setattr(self, name, value) + Examples + -------- + .. testsetup:: + + import iris + from iris import LOAD_POLICY, sample_data_path + path = sample_data_path("hybrid_height.nc") + + >>> with LOAD_POLICY.context("comprehensive"): + ... cubes = iris.load(path) + """ + # Save the current state + saved_settings = self.settings() + + # Apply the new options and execute the context try: - # Execute the context + self.set(settings, **kwargs) yield finally: - # Return the state - self.__dict__.clear() - self.__dict__.update(current_state) + # Re-establish the former state + self.set(saved_settings) -LOAD_POLICY = LoadPolicy() -LOAD_POLICY_LEGACY = LoadPolicy() -LOAD_POLICY_RECOMMENDED = LoadPolicy( - support_multiple_references=True, multiref_triggers_concatenate=True -) -LOAD_POLICY_COMPREHENSIVE = LoadPolicy( - support_multiple_references=True, use_concatenate=True, repeat_until_done=True -) +# The unique (singleton) policy object +# N.B. FOR NOW, our starting point is "legacy" rather than "default" +# TODO: resolve tests as needed, to pass with "default". +LOAD_POLICY = LoadPolicy("legacy") -def _current_effective_policy(): - policy = LOAD_POLICY - if not policy.use_concatenate and policy.multiref_triggers_concatenate: - from iris.fileformats.rules import _MULTIREF_DETECTION +def combine_cubes(cubes, options=None, merge_require_unique=False): + """Combine cubes as for load, according to "loading policy" options. - if _MULTIREF_DETECTION.found_multiple_refs: - policy = policy.copy() - policy.use_concatenate = True - return policy + Applies :meth:`~iris.cube.CubeList.merge`/:meth:`~iris.cube.CubeList.concatenate` + steps to the given cubes, as determined by the 'settings'. + Parameters + ---------- + cubes : list of :class:`~iris.cube.Cube` + A list of cubes to combine. + options : dict or str + Settings, as described for :meth:`iris.LOAD_POLICY.set`. + Defaults to current :meth:`iris.LOAD_POLICY.settings`. + merge_require_unique : bool + Value for the 'unique' keyword in any merge operations. + + Returns + ------- + list of :class:`~iris.cube.Cube` -def _combine_with_loading_policy(cubes, policy=None, merge_require_unique=False): - if not policy: - policy = _current_effective_policy() + """ + if not options: + options = LOAD_POLICY.settings() while True: n_original_cubes = len(cubes) - if policy.use_concatenate and policy.cat_before_merge: + sequence = options["merge_concat_sequence"] + + if sequence[0] == "c": + # concat if it comes first cubes = cubes.concatenate() - if policy.use_merge: + if "m" in sequence: + # merge if requested cubes = cubes.merge(unique=merge_require_unique) - if policy.use_concatenate and not policy.cat_before_merge: + if sequence[-1] == "c": + # concat if it comes last cubes = cubes.concatenate() - n_new_cubes = len(cubes) - if not policy.repeat_until_done or n_new_cubes >= n_original_cubes: + + # Repeat if requested, and this step reduced the number of cubes + if not options["repeat_until_unchanged"] or len(cubes) < n_original_cubes: break return cubes +def _combine_load_cubes(cubes, merge_require_unique=False): + # A special version to call combine_cubes while also implementing the + # _MULTIREF_DETECTION behaviour + options = LOAD_POLICY.settings() + if ( + options["support_multiple_references"] + and "c" not in options["merge_concat_sequence"] + ): + # Add a concatenate to implement the "multiref triggers concatenate" mechanism + from iris.fileformats.rules import _MULTIREF_DETECTION + + if _MULTIREF_DETECTION.found_multiple_refs: + options["merge_concat_sequence"] += "c" + + return combine_cubes(cubes, options, merge_require_unique=merge_require_unique) + + def load(uris, constraints=None, callback=None, policy=None): """Load any number of Cubes for each constraint. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 38ec3896ef..389ef5f981 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -81,7 +81,9 @@ def add(self, cube): self.cubes.append(sub_cube) def combined(self, unique=False): - """Return a new :class:`_CubeFilter` by merging the list of cubes. + """Return a new :class:`_CubeFilter` by combining the list of cubes. + + Combines the list of cubes with :func:`~iris._combine_load_cubes`. Parameters ---------- @@ -90,11 +92,11 @@ def combined(self, unique=False): duplicate cubes are detected. """ - from iris import _combine_with_loading_policy + from iris import _combine_load_cubes return _CubeFilter( self.constraint, - _combine_with_loading_policy(self.cubes, merge_require_unique=unique), + _combine_load_cubes(self.cubes, merge_require_unique=unique), ) @@ -129,7 +131,9 @@ def cubes(self): return result def combined(self, unique=False): - """Return a new :class:`_CubeFilterCollection` by merging all the cube lists of this collection. + """Return a new :class:`_CubeFilterCollection` by combining all the cube lists of this collection. + + Combines each list of cubes using :func:`~iris._combine_load_cubes`. Parameters ---------- diff --git a/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py b/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py index 30546e9db3..9b88ffc489 100644 --- a/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py +++ b/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py @@ -192,12 +192,14 @@ def check_create(): def test_roundtrip(): print("Check with Iris from : ", iris.__file__) - from iris import ( - LOAD_POLICY, - LOAD_POLICY_RECOMMENDED, - # LOAD_POLICY_LEGACY, - # LOAD_POLICY_COMPREHENSIVE - ) + from iris import LOAD_POLICY - with LOAD_POLICY.context(LOAD_POLICY_RECOMMENDED): + # print(LOAD_POLICY) + # LOAD_POLICY.repeat_until_unchanged = 4 + with LOAD_POLICY.context("default"): + # print(LOAD_POLICY) + # print("merge/concat = ", LOAD_POLICY.merge_concat_sequence) check_create() + + # print(LOAD_POLICY) + # print("now legacy mode ? ", LOAD_POLICY.settings() == LOAD_POLICY.SETTINGS["legacy"]) From edfea0554efdcb563fb1ddd31c61a971cb4d3939 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 15 Oct 2024 15:36:31 +0100 Subject: [PATCH 12/33] More documentation of loading options. --- lib/iris/__init__.py | 53 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 43869c20b7..34ab336c5d 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -328,6 +328,56 @@ class LoadPolicy(threading.local): from iris import LOAD_POLICY + Notes + ----- + The individual configurable options are : + + * ``support_multiple_references`` = True / False + When enabled, the presence of multiple aux-factory reference cubes, which merge + to define a extra dimension, will add that dimension to the loaded cubes. + This is essential for correct support of time-dependent hybrid coordinates (i.e. + aux factories) when loading from fields-based data (e.g. PP or GRIB). + For example (notably) time-dependent orography in UM data on hybrid-heights. + + In addition, when such multiple references are detected, an extra concatenate + step is added to the 'merge_concat_sequence' (see below), if none is already + configured there. + + * ``merge_concat_sequence`` = "m" / "c" / "cm" / "mc" + Specifies whether to merge, or concatenate, or both in either order. + This is the :func:`~iris.combine_cubes` operation to loaded data. + + * ``repeat_until_unchanged`` = True / False + When enabled, the configured "combine" operation will be repeated until the + result is stable (no more cubes are combined). + + Several common sets of options are provided in :data:`~iris.LOAD_POLICY.SETTINGS` : + + * ``"legacy"`` + Produces results identical to Iris versions < 3.11, i.e. before the varying + hybrid references were supported. + + * ``"default"`` + As "legacy" except that ``support_multiple_references=True``. This differs + from "legacy" only when multiple mergeable reference fields are encountered, + in which case incoming cubes are extended into the extra dimension, and a + concatenate step is added + + * ``"recommended"`` + Enables multiple reference handling, and applies a merge step followed by + a concatenate step. + + * ``"comprehensive"`` + Like "recommended", but will also *repeat* the merge+concatenate steps until no + further change is produced. + + .. note :: + + The 'comprehensive' makes the maximum effort to reduce the number of cubes + to a minimum. However, it still cannot combine cubes with a mixture of + matching dimension and scalar coordinates. This may be supported at some + later date, but for now is not possible without specific user intervention. + Examples -------- >>> LOAD_POLICY.set("legacy") @@ -485,10 +535,11 @@ def context(self, settings=None, **kwargs): self.set(saved_settings) +#: Object containing file loading options. +LOAD_POLICY = LoadPolicy("legacy") # The unique (singleton) policy object # N.B. FOR NOW, our starting point is "legacy" rather than "default" # TODO: resolve tests as needed, to pass with "default". -LOAD_POLICY = LoadPolicy("legacy") def combine_cubes(cubes, options=None, merge_require_unique=False): From 984a59c9ac96f5f15c54a02f4b972c2cb7208074 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 18 Oct 2024 20:46:15 +0100 Subject: [PATCH 13/33] Fix doctest. --- lib/iris/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 34ab336c5d..61a05818ab 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -391,8 +391,8 @@ class LoadPolicy(threading.local): LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) >>> with LOAD_POLICY.context("comprehensive"): ... print(LOAD_POLICY) - ... print(LOAD_POLICY) LoadPolicy(support_multiple_references=True, merge_concat_sequence='mc', repeat_until_unchanged=True) + >>> print(LOAD_POLICY) LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) """ From 027b7a0a84d3809aee2229283aa3b9f2f71abed9 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 20 Oct 2024 18:08:26 +0100 Subject: [PATCH 14/33] Fix repeated combination. --- lib/iris/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 61a05818ab..47e6a554d2 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -580,7 +580,7 @@ def combine_cubes(cubes, options=None, merge_require_unique=False): cubes = cubes.concatenate() # Repeat if requested, and this step reduced the number of cubes - if not options["repeat_until_unchanged"] or len(cubes) < n_original_cubes: + if not options["repeat_until_unchanged"] or len(cubes) >= n_original_cubes: break return cubes From 381e7da19f582158783e7b62ad2c7bed04344b2a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 21 Oct 2024 17:06:11 +0100 Subject: [PATCH 15/33] Minor docs improvements. --- lib/iris/__init__.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 47e6a554d2..89bdc063a9 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -361,7 +361,7 @@ class LoadPolicy(threading.local): As "legacy" except that ``support_multiple_references=True``. This differs from "legacy" only when multiple mergeable reference fields are encountered, in which case incoming cubes are extended into the extra dimension, and a - concatenate step is added + concatenate step is added. * ``"recommended"`` Enables multiple reference handling, and applies a merge step followed by @@ -373,10 +373,12 @@ class LoadPolicy(threading.local): .. note :: - The 'comprehensive' makes the maximum effort to reduce the number of cubes - to a minimum. However, it still cannot combine cubes with a mixture of - matching dimension and scalar coordinates. This may be supported at some - later date, but for now is not possible without specific user intervention. + The 'comprehensive' policy makes a maximum effort to reduce the number of + cubes to a minimum. However, it still cannot combine cubes with a mixture + of matching dimension and scalar coordinates. This may be supported at + some later date, but for now is not possible without specific user actions. + + TODO: reference the newer advice on "new_axis" usage. Examples -------- @@ -460,7 +462,7 @@ def set(self, options: str | dict | None = None, **kwargs): :data:`~iris.LoadPolicy.SETTINGS` standard option sets, e.g. "legacy" or "comprehensive". * kwargs : dict - Individual options options, from :data:`~iris.LoadPolicy.OPTION_KEYS`. + Individual option settings, from :data:`~iris.LoadPolicy.OPTION_KEYS`. Note ---- @@ -491,7 +493,7 @@ def set(self, options: str | dict | None = None, **kwargs): setattr(self, key, value) def settings(self): - """Return a options dict containing the current state.""" + """Return an options dict containing the current settings.""" return {key: getattr(self, key) for key in self.OPTION_KEYS} def __repr__(self): @@ -579,7 +581,7 @@ def combine_cubes(cubes, options=None, merge_require_unique=False): # concat if it comes last cubes = cubes.concatenate() - # Repeat if requested, and this step reduced the number of cubes + # Repeat if requested, *and* this step reduced the number of cubes if not options["repeat_until_unchanged"] or len(cubes) >= n_original_cubes: break From 022f94af8f81c06ec7ba65f4e93722d42ab61b33 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 18 Oct 2024 20:42:32 +0100 Subject: [PATCH 16/33] Initial load functions testing (WIP). More --- .../unit/fileformats/test_load_functions.py | 222 ++++++++++++++++++ 1 file changed, 222 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/test_load_functions.py diff --git a/lib/iris/tests/unit/fileformats/test_load_functions.py b/lib/iris/tests/unit/fileformats/test_load_functions.py new file mode 100644 index 0000000000..080760c4d5 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/test_load_functions.py @@ -0,0 +1,222 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for iris load functions. + +* :func:`iris.load` +* :func:`iris.load_cube` +* :func:`iris.load_cubes` +* :func:`iris.load_raw` +""" + +import re +from typing import Iterable +from unittest import mock + +import numpy as np +import pytest + +import iris +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube + +_time_unit = "days since 2001-01-01" + + +def cu(n="a", t=0, z=0): + """Create a single test cube. + + All cubes have, potentially, 4 dimensions (z, t, y, x). + The (y, x) dims are always the same, but (z, t) can be scalar, or various lengths. + t/z values which are scalar/vector produce likewise scalar/vector coordinates. + """ + yco = DimCoord(np.arange(3), long_name="latitude", units="degrees") + xco = DimCoord(np.arange(4), long_name="longitude", units="degrees") + dim_coords = [yco, xco] + shape = [3, 4] # the xy shape + scalar_coords = [] + tco = DimCoord( + np.array(t, dtype=np.float32), standard_name="time", units=_time_unit + ) + zco = DimCoord(np.array(z, dtype=np.float32), standard_name="height", units="m") + for tz, tzco in [(t, tco), (z, zco)]: + if isinstance(tz, Iterable): + # N.B. insert an extra dim at the front + dim_coords[:0] = [tzco] + shape[:0] = tzco.shape[:1] + else: + scalar_coords.append(tzco) + + cube = Cube( + data=np.zeros(shape), + long_name=n, + dim_coords_and_dims=[(dim, i_dim) for i_dim, dim in enumerate(dim_coords)], + aux_coords_and_dims=[(dim, ()) for dim in scalar_coords], + ) + return cube + + +@pytest.fixture(params=["load", "load_cube", "load_cubes", "load_raw"]) +def loadfunc_name(request): + # N.B. "request" is a standard PyTest fixture + return request.param # Return the name of the attribute to test. + + +def run_testcase(input_cubes, loadfunc_name, constraints=None): + loadfunc = getattr(iris, loadfunc_name) + + def mock_generate_cubes(uris, callback, constraints): + for cube in input_cubes: + yield cube + + try: + with mock.patch("iris._generate_cubes", mock_generate_cubes): + result = loadfunc(input_cubes, constraints) + except Exception as e: + result = e + + return result + + +def debug_result(cubes): + print() + print(cubes) + if isinstance(cubes, iris.cube.CubeList): + print(len(cubes), " cubes..") + for i_cube, cube in enumerate(cubes): + vh = cube.coord("height").points + vt = cube.coord("time").points + print(i_cube, cube.name(), ": h=", vh, " :: t=", vt) + + +def check_result(input_cubes, loadfunc_name, result, expected_results): + if "load_raw" not in expected_results and loadfunc_name == "load_raw": + expected = input_cubes + else: + expected = expected_results[loadfunc_name] + + if isinstance(expected, str): + # We expect an error result : stored 'expected' is a regexp to match its repr + assert re.search(expected, repr(result)) + else: + assert result == expected + + +class TestLoadFunctions: + def test_mergeable(self, loadfunc_name): + _cube = cu(t=(0, 1), z=(0, 1)) + input_cubes = [cu(t=i_t, z=i_z) for i_t in (0, 1) for i_z in (0, 1)] + expected_results = { + "load": [_cube], + "load_cube": _cube, + "load_cubes": [_cube], + } + result = run_testcase(input_cubes, loadfunc_name) + check_result(input_cubes, loadfunc_name, result, expected_results) + + def test_multiple(self, loadfunc_name): + input_cubes = [cu(), cu(n="b")] + expected_results = { + "load": [cu(), cu(n="b")], + "load_cube": "ConstraintMismatchError.*failed to merge into a single cube", + "load_cubes": r"ConstraintMismatchError.*-> \d+ cubes", + } + result = run_testcase(input_cubes, loadfunc_name) + check_result(input_cubes, loadfunc_name, result, expected_results) + + def test_multiple_constrained(self, loadfunc_name): + cube, cube_b = cu(), cu(n="b") + input_cubes = [cube, cube_b] + constraint = "a" + expected_results = { + "load": [cube], + "load_cube": cube, + "load_cubes": [cube], + "load_raw": [cube], + } + result = run_testcase(input_cubes, loadfunc_name, constraints=constraint) + check_result(input_cubes, loadfunc_name, result, expected_results) + + def test_multiple_multi_constraints(self, loadfunc_name): + ca, cb, cc = cu(), cu(n="b"), cu(n="c") + input_cubes = [ca, cb, cc] + constraints = ["c", "a"] + expected_results = { + "load": [cc, ca], + "load_cube": "ValueError.*only a single constraint is allowed", + "load_cubes": [cc, ca], + "load_raw": [cc, ca], + } + result = run_testcase(input_cubes, loadfunc_name, constraints=constraints) + check_result(input_cubes, loadfunc_name, result, expected_results) + + def test_nonmergeable_part_missing(self, loadfunc_name): + c1, c2, c3, c4 = [cu(t=i_t, z=i_z) for i_t in (0, 1) for i_z in (0, 1)] + input_cubes = [c1, c2, c4] + + c124 = cu(t=(0, 1, 2)) + c124.remove_coord("time") # we now have an unnamed dimension + c124.remove_coord("height") # we now have an unnamed dimension + c124.add_aux_coord(AuxCoord([0.0, 1, 1], standard_name="height", units="m"), 0) + c124.add_aux_coord( + AuxCoord([0.0, 0, 1], standard_name="time", units=_time_unit), 0 + ) + expected_results = { + "load": [c124], + "load_cube": c124, + "load_cubes": [c124], + } + result = run_testcase(input_cubes, loadfunc_name) + check_result(input_cubes, loadfunc_name, result, expected_results) + + def test_nonmergeable_part_extra(self, loadfunc_name): + cube_all = cu(t=(0, 1), z=(0, 1)) + c1, c2, c3, c4 = [cu(t=i_t, z=i_z) for i_t in (0, 1) for i_z in (0, 1)] + c5 = cu(t=5) + input_cubes = [c1, c2, c5, c4, c3] # scramble order, just to test + expected_results = { + "load": [cube_all, c5], + "load_cube": "ConstraintMismatchError.*failed to merge into a single cube", + "load_cubes": "ConstraintMismatchError.*-> 2 cubes", + } + result = run_testcase(input_cubes, loadfunc_name) + check_result(input_cubes, loadfunc_name, result, expected_results) + + def test_constraint_overlap(self, loadfunc_name): + c1, c2, c3, c4, c5, c6 = (cu(z=ind) for ind in (1, 2, 3, 4, 5, 6)) + input_cubes = [c1, c2, c3, c4, c5, c6] + constraints = [ + iris.Constraint(height=[1, 2]), + iris.Constraint(height=[1, 4, 5]), + ] + c12 = cu(z=[1, 2]) + c145 = cu(z=[1, 4, 5]) + expected_results = { + "load": [c12, c145], + "load_cube": "ValueError.*only a single constraint is allowed", + "load_cubes": [c12, c145], # selected parts merge, as for load + "load_raw": [c1, c2, c1, c4, c5], # THIS VERY STRANGE BEHAVIOUR!! + } + result = run_testcase(input_cubes, loadfunc_name, constraints=constraints) + check_result(input_cubes, loadfunc_name, result, expected_results) + + def test_multiple_match(self, loadfunc_name): + c1 = cu(z=1) + c2 = cu(z=2) + c3 = cu(n="b", z=1) + c4 = cu(n="b", z=2) + input_cubes = [c1, c2, c3, c4] + constraints = [ + iris.Constraint("a") & iris.Constraint(height=1), + iris.Constraint(height=2), + ] + expected_results = { + "load": [c1, c2, c4], + "load_cube": "ValueError.*only a single constraint is allowed", + "load_cubes": r"ConstraintMismatchError.*-> \d+ cubes", + "load_raw": [c1, c2, c4], + } + result = run_testcase(input_cubes, loadfunc_name, constraints=constraints) + debug_result(result) + check_result(input_cubes, loadfunc_name, result, expected_results) From 8aadc27aaadcc795be366de820ae63986133a8be Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Oct 2024 13:39:14 +0100 Subject: [PATCH 17/33] Integration tests for time-varying reference fields. --- lib/iris/__init__.py | 4 + .../varying_references/__init__.py | 12 + .../varying_references/test_realdata_load.py | 58 ++++ .../test_roundtrip_time_varying_references.py | 266 ++++++++++++++++++ .../testroundtrip_hybrid_factory_H_or_P.py | 205 -------------- 5 files changed, 340 insertions(+), 205 deletions(-) create mode 100644 lib/iris/tests/integration/varying_references/__init__.py create mode 100644 lib/iris/tests/integration/varying_references/test_realdata_load.py create mode 100644 lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py delete mode 100644 lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 89bdc063a9..497aaed323 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -564,6 +564,10 @@ def combine_cubes(cubes, options=None, merge_require_unique=False): ------- list of :class:`~iris.cube.Cube` + .. Note:: + The ``support_multiple_references`` keyword/property has no effect on the + :func:`combine_cubes` operation : it only takes effect during a load operation. + """ if not options: options = LOAD_POLICY.settings() diff --git a/lib/iris/tests/integration/varying_references/__init__.py b/lib/iris/tests/integration/varying_references/__init__.py new file mode 100644 index 0000000000..3c37f02242 --- /dev/null +++ b/lib/iris/tests/integration/varying_references/__init__.py @@ -0,0 +1,12 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for loading with varying references. + +Practically, this mostly means loading from fields-based file formats such as PP and +GRIB, and when hybrid vertical coordinates which have time-varying reference fields. +E.G. hybrid height with time-varying orography, or hybrid-pressure with time-varying +surface pressure. + +""" diff --git a/lib/iris/tests/integration/varying_references/test_realdata_load.py b/lib/iris/tests/integration/varying_references/test_realdata_load.py new file mode 100644 index 0000000000..edf2b00824 --- /dev/null +++ b/lib/iris/tests/integration/varying_references/test_realdata_load.py @@ -0,0 +1,58 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Test loading PP data with time-varying orography.""" + +import pytest + +import iris +from iris import LOAD_POLICY, sample_data_path + + +@pytest.fixture(params=["default", "recommended", "legacy"]) +def load_policy(request): + return request.param + + +def test_load_pp_timevarying_orography(load_policy): + testdata_dirpath = sample_data_path("time_varying_hybrid_height", "*.pp") + + with LOAD_POLICY.context(load_policy): + cubes = iris.load(testdata_dirpath) + + n_cubes = len(cubes) + if load_policy == "legacy": + # This doesn't merge fully: get a phenomenon cube for each reference field + assert n_cubes == 4 + else: + # Other load policies load with full merge, producing a 4D result. + assert n_cubes == 2 + phenom_cube = cubes.extract_cube("x_wind") + ref_cube = cubes.extract_cube("surface_altitude") + + cube_dims = [ + phenom_cube.coord(dim_coords=True, dimensions=i_dim).name() + for i_dim in range(phenom_cube.ndim) + ] + assert cube_dims == ["model_level_number", "time", "latitude", "longitude"] + + ref_coord = phenom_cube.coord("surface_altitude") + ref_coord_dims = [ + phenom_cube.coord(dim_coords=True, dimensions=i_dim).name() + for i_dim in phenom_cube.coord_dims(ref_coord) + ] + assert ref_coord_dims == ["time", "latitude", "longitude"] + + ref_cube_dims = [ + ref_cube.coord(dim_coords=True, dimensions=i_dim).name() + for i_dim in range(ref_cube.ndim) + ] + assert ref_cube_dims == ref_cube_dims + + derived_coord = phenom_cube.coord("altitude") + derived_dims = [ + phenom_cube.coord(dim_coords=True, dimensions=i_dim).name() + for i_dim in phenom_cube.coord_dims(derived_coord) + ] + assert derived_dims == ["model_level_number", "time", "latitude", "longitude"] diff --git a/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py new file mode 100644 index 0000000000..4d20a9de51 --- /dev/null +++ b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py @@ -0,0 +1,266 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Code to save and re-load hybrid vertical coordinates with variable reference fields. + +Tests all combinations of: + * file format: PP, GRIB and NetCDF + * reference fields: static (for legacy reference) and time-dependent + * hybrid coordinate fields: + * hybrid-height levels with orography, and + * hybrid-pressure levels with surface-pressure +""" + +import numpy as np +import pytest + +import iris +from iris import LOAD_POLICY +from iris.aux_factory import HybridHeightFactory, HybridPressureFactory +from iris.coord_systems import GeogCS +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube, CubeList +from iris.fileformats.pp import EARTH_RADIUS, STASH + +# General test dimensions = (timepoints, levels, lats, lons) +NT, NZ, NY, NX = (3, 4, 5, 6) + + +def make_hybrid_z_testdata( + hybrid_zcoord_type="height", + make_reference_time_dependent=True, + include_reference_as_cube=False, +): + """Construct a realistic synthetic data cube with a hybrid vertical coordinate. + + Parameters + ---------- + hybrid_zcoord_type : string, default "height" + either "height" or "pressure" + make_reference_time_dependent : bool, default True + if True, the reference coord has dims (t, y, x), otherwise just (y, x) + include_reference_as_cube : bool, default False + if True, the result includes a separate cube of the reference values. + (Because this must be separately passed to save for the fields-based formats.) + + Returns + ------- + cubes + A list containing a cube with (t, z, y, x) dims and the appropriate + aux-factory. + Optionally, if "include_reference_as_cube" is True, an extra cube + containing the reference data is aldo returned. + + """ + crs = GeogCS(EARTH_RADIUS) + z_dim, t_dim, y_dim, x_dim = 0, 1, 2, 3 + co_t = DimCoord( + np.arange(NT, dtype=np.float32), + standard_name="time", + units="days since 2000-01-01", + ) + co_z = DimCoord( + np.arange(1, NZ + 1, dtype=np.int32), + standard_name="model_level_number", + units=1, + ) + co_y = DimCoord( + np.linspace(0, 120.0, NY, dtype=np.float32), + standard_name="latitude", + units="degrees", + coord_system=crs, + ) + co_x = DimCoord( + np.linspace(-30.0, 50.0, NX, dtype=np.float32), + standard_name="longitude", + units="degrees", + coord_system=crs, + ) + cube = Cube( + np.zeros((NZ, NT, NY, NX), dtype=np.float32), + standard_name="air_temperature", + units="K", + dim_coords_and_dims=zip((co_t, co_z, co_y, co_x), (t_dim, z_dim, y_dim, x_dim)), + ) + + delta_vals = np.linspace(200.0, 600, NZ, dtype=np.float32) + if hybrid_zcoord_type == "pressure": + co_delta = DimCoord(delta_vals, long_name="delta", units="hPa") + elif hybrid_zcoord_type == "height": + co_delta = DimCoord(delta_vals, long_name="level_height", units="m") + else: + raise ValueError(f"Unknown hybrid coordinate type: {hybrid_zcoord_type}") + + sigma_vals = np.linspace(0.2, 0.8, NZ, dtype=np.float32) + co_sigma = DimCoord(sigma_vals, long_name="sigma", units=1) + + # Note: will not save as HH to PP without bounds on delta+sigma + for coord in (co_delta, co_sigma): + coord.guess_bounds() + cube.add_aux_coord(co_delta, z_dim) + cube.add_aux_coord(co_sigma, z_dim) + + refdata = np.arange(NT * NY * NX, dtype=np.float32) + refdata = 1000.0 + refdata.reshape(NT, NY, NX) + if hybrid_zcoord_type == "pressure": + co_ref = AuxCoord( + refdata, + standard_name="surface_air_pressure", + units="hPa", + attributes={"STASH": STASH(model=1, section=0, item=409)}, + ) + elif hybrid_zcoord_type == "height": + co_ref = AuxCoord( + refdata, + standard_name="surface_altitude", + units="m", + attributes={"STASH": STASH(model=1, section=0, item=33)}, + ) + else: + raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") + + ref_dims = (t_dim, y_dim, x_dim) + if not make_reference_time_dependent: + co_ref = co_ref[0] + ref_dims = ref_dims[1:] + + cube.add_aux_coord(co_ref, ref_dims) + if hybrid_zcoord_type == "pressure": + factory = HybridPressureFactory( + sigma=co_sigma, delta=co_delta, surface_air_pressure=co_ref + ) + elif hybrid_zcoord_type == "height": + factory = HybridHeightFactory(sigma=co_sigma, delta=co_delta, orography=co_ref) + else: + raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") + + cube.add_aux_factory(factory) + + cubes = CubeList([cube]) + + if include_reference_as_cube: + ref_dimcoords = [ + cube.coord(dim_coords=True, dimensions=cube_refdim) + for cube_refdim in cube.coord_dims(co_ref) + ] + reference_cube = Cube( + co_ref.points, + standard_name=co_ref.standard_name, + units=co_ref.units, + dim_coords_and_dims=[ + (ref_dimcoord, i_refdim) + for i_refdim, ref_dimcoord in enumerate(ref_dimcoords) + ], + attributes=co_ref.attributes, + ) + if not reference_cube.coords("time"): + # Add a dummy time coordinate to non-time-dependent reference cube + # - mostly because otherwise it cannot be saved to GRIB format + # NOTE: we give this a different nominal time to any of the data : when + # there is only one reference field, it's recorded time value should be + # **ignored** by the loader + reference_cube.add_aux_coord( + DimCoord( + np.array(0, dtype=np.float32), + standard_name="time", + units="days since 1900-01-01", + ) + ) + cubes.append(reference_cube) + + return cubes + + +def check_expected(result_cubes, file_extension, time_dependence, zcoord_type): + assert len(result_cubes) == 2 + result_phenom = result_cubes.extract_cube("air_temperature") + + if zcoord_type == "pressure": + ref_coord_name = ref_cube_name = "surface_air_pressure" + if file_extension == "grib2": + ref_cube_name = "air_pressure" + elif zcoord_type == "height": + ref_coord_name = ref_cube_name = "surface_altitude" + else: + raise ValueError(f"Unknown hybrid coordinate type: {zcoord_type}") + + result_ref_cube = result_cubes.extract_cube(ref_cube_name) + result_ref_coord = result_phenom.coord(ref_coord_name) + + # Check that the reference cube and the coord are equivalent + assert result_ref_coord.shape == result_ref_cube.shape + assert np.array_equal(result_ref_cube.data, result_ref_coord.points) + assert not result_ref_coord.bounds # bounds are unused in our testcases + + # Check the expected phenomenon shape + if time_dependence == "static" and file_extension in ("pp", "grib2"): + phenom_shape = (NT, NZ, NY, NX) + else: + phenom_shape = (NZ, NT, NY, NX) + assert result_phenom.shape == phenom_shape + + # Check expected reference values against calculated values. + # This shows that the reference was correctly divided into 2d fields and + # reconstructed on load to match the original (for fields-based formats). + if time_dependence == "static": + ref_shape = (NY, NX) + else: + ref_shape = (NT, NY, NX) + ref_data = 1000.0 + np.arange(np.prod(ref_shape)).reshape(ref_shape) + if zcoord_type == "pressure" and file_extension == "grib2": + # values come back in Pa not hPa + ref_data *= 100.0 + assert np.array_equal(ref_data, result_ref_cube.data) + + +@pytest.fixture(params=["pp", "grib2", "nc"]) +def file_extension(request): + return request.param + + +@pytest.fixture(params=["static", "time_varying"]) +def time_dependence(request): + return request.param + + +@pytest.fixture(params=["height", "pressure"]) +def zcoord_type(request): + return request.param + + +@pytest.fixture(params=["default_policy", "recommended_policy", "legacy_policy"]) +def load_policy(request): + return request.param + + +def test_roundtrip(file_extension, time_dependence, zcoord_type, load_policy, tmp_path): + if ( + load_policy == "legacy_policy" + and time_dependence == "time_varying" + and file_extension in ("pp", "grib2") + ): + pytest.skip("Testcase not supported in 'legacy' mode.") + + filepath = tmp_path / f"tmp.{file_extension}" + include_ref = file_extension in ("grib2", "pp") + is_time_dependent = time_dependence == "time_varying" + data = make_hybrid_z_testdata( + hybrid_zcoord_type=zcoord_type, + include_reference_as_cube=include_ref, + make_reference_time_dependent=is_time_dependent, + ) + + iris.save(data, filepath) + + policy_name = load_policy.split("_")[0] + with LOAD_POLICY.context(policy_name): + # NOTE: this is default, but "legacy" mode would fail + readback = iris.load(filepath) + + check_expected( + readback, + file_extension=file_extension, + time_dependence=time_dependence, + zcoord_type=zcoord_type, + ) diff --git a/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py b/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py deleted file mode 100644 index 9b88ffc489..0000000000 --- a/lib/iris/tests/testroundtrip_hybrid_factory_H_or_P.py +++ /dev/null @@ -1,205 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Temporary exercising code for multiple-factory-reference changes.""" - -import numpy as np - -import iris -from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.coord_systems import GeogCS -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube, CubeList -from iris.fileformats.pp import EARTH_RADIUS, STASH - - -def make_hybrid_z_testdata( - nt=2, - nz=3, - ny=4, - nx=3, - hybrid_zcoord_type="height", - make_reference_time_dependent=True, - include_reference_as_cube=False, -): - crs = GeogCS(EARTH_RADIUS) - t_dim, z_dim, y_dim, x_dim = 0, 1, 2, 3 - co_t = DimCoord( - np.arange(nt, dtype=np.float32), - standard_name="time", - units="days since 2000-01-01", - ) - co_z = DimCoord( - np.arange(1, nz + 1, dtype=np.int32), - standard_name="model_level_number", - units=1, - ) - co_y = DimCoord( - np.linspace(0, 120.0, ny, dtype=np.float32), - standard_name="latitude", - units="degrees", - coord_system=crs, - ) - co_x = DimCoord( - np.linspace(-30.0, 50.0, nx, dtype=np.float32), - standard_name="longitude", - units="degrees", - coord_system=crs, - ) - cube = Cube( - np.zeros((nt, nz, ny, nx), dtype=np.float32), - standard_name="air_temperature", - units="K", - dim_coords_and_dims=zip((co_t, co_z, co_y, co_x), (t_dim, z_dim, y_dim, x_dim)), - ) - - delta_vals = np.linspace(200.0, 600, nz, dtype=np.float32) - if hybrid_zcoord_type == "pressure": - co_delta = DimCoord(delta_vals, long_name="delta", units="hPa") - elif hybrid_zcoord_type == "height": - co_delta = DimCoord(delta_vals, long_name="level_height", units="m") - else: - raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") - - sigma_vals = np.linspace(0.2, 0.8, nz, dtype=np.float32) - co_sigma = DimCoord(sigma_vals, long_name="sigma", units=1) - - # Note: will not save as HH to PP without bounds on delta+sigma - for coord in (co_delta, co_sigma): - coord.guess_bounds() - cube.add_aux_coord(co_delta, z_dim) - cube.add_aux_coord(co_sigma, z_dim) - - refdata = np.arange(nt * ny * nx, dtype=np.float32) - refdata = 1000.0 + refdata.reshape(nt, ny, nx) - if hybrid_zcoord_type == "pressure": - co_ref = AuxCoord( - refdata, - standard_name="surface_air_pressure", - units="hPa", - attributes={"STASH": STASH(model=1, section=0, item=409)}, - ) - elif hybrid_zcoord_type == "height": - co_ref = AuxCoord( - refdata, - standard_name="surface_altitude", - units="m", - attributes={"STASH": STASH(model=1, section=0, item=33)}, - ) - else: - raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") - - ref_dims = (t_dim, y_dim, x_dim) - if not make_reference_time_dependent: - co_ref = co_ref[0] - ref_dims = ref_dims[1:] - - cube.add_aux_coord(co_ref, ref_dims) - if hybrid_zcoord_type == "pressure": - factory = HybridPressureFactory( - sigma=co_sigma, delta=co_delta, surface_air_pressure=co_ref - ) - elif hybrid_zcoord_type == "height": - factory = HybridHeightFactory(sigma=co_sigma, delta=co_delta, orography=co_ref) - else: - raise ValueError(f"Unknown hybrid type: {hybrid_zcoord_type}") - - cube.add_aux_factory(factory) - - cubes = CubeList([cube]) - - if include_reference_as_cube: - ref_dimcoords = [ - cube.coord(dim_coords=True, dimensions=cube_refdim) - for cube_refdim in cube.coord_dims(co_ref) - ] - reference_cube = Cube( - co_ref.points, - standard_name=co_ref.standard_name, - units=co_ref.units, - dim_coords_and_dims=[ - (ref_dimcoord, i_refdim) - for i_refdim, ref_dimcoord in enumerate(ref_dimcoords) - ], - attributes=co_ref.attributes, - ) - if not reference_cube.coords("time"): - # Add a dummy time coordinate to non-time-dependent reference cube - # - mostly because otherwise it cannot be saved to GRIB format - # NOTE: we give this a different nominal time to any of the data : when - # there is only one reference field, it's recorded time value should be - # **ignored** by the loader - reference_cube.add_aux_coord( - DimCoord( - np.array(0, dtype=np.float32), - standard_name="time", - units="days since 1900-01-01", - ) - ) - cubes.append(reference_cube) - - return cubes - - -# _HYBRID_ZCOORD_TYPE = "height" -_HYBRID_ZCOORD_TYPE = "pressure" - -# _FILENAME = "tmp.nc" # Naturally, this "just works" -_FILENAME = "tmp.pp" -# _FILENAME = "tmp.grib2" - -_TEST_TIME_DEPENDENT = True -# _TEST_TIME_DEPENDENT = False - - -def check_create(): - global _FILENAME, _HYBRID_ZCOORD_TYPE, _TEST_TIME_DEPENDENT - file_ext = _FILENAME.split(".")[-1] - include_ref = file_ext in ("grib2", "pp") - - data = make_hybrid_z_testdata( - hybrid_zcoord_type=_HYBRID_ZCOORD_TYPE, - include_reference_as_cube=include_ref, - make_reference_time_dependent=_TEST_TIME_DEPENDENT, - ) - - print() - print(f"Cubes saving to {_FILENAME}:") - print(data) - for cube in data: - print(cube) - - _EXTRA_COORDS_DEBUG = False - if _EXTRA_COORDS_DEBUG: - (datacube,) = [cube for cube in data if "surface" not in cube.name()] - for name in ("level_height", "sigma", "surface_altitude"): - print(f"Coord({name}):") - print(datacube.coord(name)) - print("Ref cube:") - print(data.extract_cube("surface_altitude")) - - iris.save(data, _FILENAME) - readback = iris.load(_FILENAME) - # Apply extra concat : as "raw" cubes with a time dimension won't merge - readback = readback.concatenate() - print() - print("Readback cubes:") - print(readback) - for cube in readback: - print(cube) - - -def test_roundtrip(): - print("Check with Iris from : ", iris.__file__) - from iris import LOAD_POLICY - - # print(LOAD_POLICY) - # LOAD_POLICY.repeat_until_unchanged = 4 - with LOAD_POLICY.context("default"): - # print(LOAD_POLICY) - # print("merge/concat = ", LOAD_POLICY.merge_concat_sequence) - check_create() - - # print(LOAD_POLICY) - # print("now legacy mode ? ", LOAD_POLICY.settings() == LOAD_POLICY.SETTINGS["legacy"]) From 98b8da6a1204b0623dedc8781f9bab590717ae36 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Oct 2024 13:54:43 +0100 Subject: [PATCH 18/33] Fix test result. --- .../unit/fileformats/test_load_functions.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/test_load_functions.py b/lib/iris/tests/unit/fileformats/test_load_functions.py index 080760c4d5..3c3d361080 100644 --- a/lib/iris/tests/unit/fileformats/test_load_functions.py +++ b/lib/iris/tests/unit/fileformats/test_load_functions.py @@ -171,14 +171,23 @@ def test_nonmergeable_part_missing(self, loadfunc_name): check_result(input_cubes, loadfunc_name, result, expected_results) def test_nonmergeable_part_extra(self, loadfunc_name): - cube_all = cu(t=(0, 1), z=(0, 1)) c1, c2, c3, c4 = [cu(t=i_t, z=i_z) for i_t in (0, 1) for i_z in (0, 1)] c5 = cu(t=5) input_cubes = [c1, c2, c5, c4, c3] # scramble order, just to test + + cx = cu(t=range(5)) + cx.remove_coord("time") # we now have an unnamed dimension + cx.remove_coord("height") # we now have an unnamed dimension + cx.add_aux_coord( + AuxCoord([0.0, 1, 0, 1, 0], standard_name="height", units="m"), 0 + ) + cx.add_aux_coord( + AuxCoord([0.0, 0, 5, 1, 1], standard_name="time", units=_time_unit), 0 + ) expected_results = { - "load": [cube_all, c5], - "load_cube": "ConstraintMismatchError.*failed to merge into a single cube", - "load_cubes": "ConstraintMismatchError.*-> 2 cubes", + "load": [cx], + "load_cube": cx, + "load_cubes": [cx], } result = run_testcase(input_cubes, loadfunc_name) check_result(input_cubes, loadfunc_name, result, expected_results) From a62b452c0f9a0a3d3f72d93bc3a1ff196920dda6 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Oct 2024 14:05:31 +0100 Subject: [PATCH 19/33] Make grib test optional. --- .../test_roundtrip_time_varying_references.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py index 4d20a9de51..3398013ff1 100644 --- a/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py +++ b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py @@ -23,6 +23,11 @@ from iris.cube import Cube, CubeList from iris.fileformats.pp import EARTH_RADIUS, STASH +try: + import iris_grib +except ImportError: + iris_grib = None + # General test dimensions = (timepoints, levels, lats, lons) NT, NZ, NY, NX = (3, 4, 5, 6) @@ -214,7 +219,12 @@ def check_expected(result_cubes, file_extension, time_dependence, zcoord_type): assert np.array_equal(ref_data, result_ref_cube.data) -@pytest.fixture(params=["pp", "grib2", "nc"]) +_file_formats = ["pp", "nc"] +if iris_grib: + _file_formats += ["grib2"] + + +@pytest.fixture(params=_file_formats) def file_extension(request): return request.param From 4e265f2400bde57b8fe60578351ea27326c67e89 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 23 Oct 2024 20:06:30 +0100 Subject: [PATCH 20/33] Review changes --- docs/src/further_topics/controlling_merge.rst | 8 ++++++-- lib/iris/__init__.py | 19 +++++++++++++++---- lib/iris/cube.py | 2 -- 3 files changed, 21 insertions(+), 8 deletions(-) diff --git a/docs/src/further_topics/controlling_merge.rst b/docs/src/further_topics/controlling_merge.rst index 3f788a8493..6f223048ea 100644 --- a/docs/src/further_topics/controlling_merge.rst +++ b/docs/src/further_topics/controlling_merge.rst @@ -200,5 +200,9 @@ Once merged, we can now concatenate all these cubes into a single result cube, w source 'Data from Met Office Unified Model' um_version '12.1' -.. todo:: - Mention the work done in #6168 +See Also +-------- +* :func:`iris.combine_cubes` can perform similar operations automatically +* :data:`iris.LOAD_POLICY` controls the application of :func:`~iris.combine_cubes` + during the load operations, i.e. :func:`~iris.load`, :func:`~iris.load_cube` and + :func:`~iris.load_cubes`. diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 497aaed323..5255c5cc46 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -378,7 +378,9 @@ class LoadPolicy(threading.local): of matching dimension and scalar coordinates. This may be supported at some later date, but for now is not possible without specific user actions. - TODO: reference the newer advice on "new_axis" usage. + .. Note :: + + See also : :ref:`controlling_merge`. Examples -------- @@ -519,11 +521,20 @@ def context(self, settings=None, **kwargs): import iris from iris import LOAD_POLICY, sample_data_path - path = sample_data_path("hybrid_height.nc") - >>> with LOAD_POLICY.context("comprehensive"): + >>> path = sample_data_path("time_varying_hybrid_height", "*.pp") + >>> with LOAD_POLICY.context("legacy"): ... cubes = iris.load(path) - + >>> print(cubes) + 0: surface_altitude / (m) (time: 15; latitude: 144; longitude: 192) + 1: x_wind / (m s-1) (time: 2; model_level_number: 5; latitude: 144; longitude: 192) + 2: x_wind / (m s-1) (time: 12; model_level_number: 5; latitude: 144; longitude: 192) + 3: x_wind / (m s-1) (model_level_number: 5; latitude: 144; longitude: 192) + + >>> with LOAD_POLICY.context("recommended"): + ... cube = iris.load_cube(path, "x_wind") + >>> cube + """ # Save the current state saved_settings = self.settings() diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 389ef5f981..30ac3432b7 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -64,8 +64,6 @@ class _CubeFilter: """A constraint, paired with a list of cubes matching that constraint.""" def __init__(self, constraint, cubes=None): - from iris.cube import CubeList - self.constraint = constraint if cubes is None: cubes = CubeList() From 3bf6cc68c9500450dfa3f6d46ef0043449cb2284 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 10:30:59 +0100 Subject: [PATCH 21/33] Reinstate skipped tests. --- lib/iris/tests/test_plot.py | 2 -- lib/iris/tests/test_quickplot.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 916e205dd8..50773f0d24 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -2,7 +2,6 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -import pytest # import iris tests first so that some things can be initialised before # importing anything else @@ -817,7 +816,6 @@ def __repr__(self): @tests.skip_data @tests.skip_plot -@pytest.mark.skip class TestPlotCoordinatesGiven(tests.GraphicsTest): def setUp(self): super().setUp() diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index 598ae760a4..8469aa0776 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -8,7 +8,6 @@ import iris.tests as tests # isort:skip import numpy as np -import pytest import iris import iris.tests.test_plot as test_plot @@ -105,7 +104,6 @@ def setUp(self): @tests.skip_data @tests.skip_plot -@pytest.mark.skip class TestLabels(tests.GraphicsTest): def setUp(self): super().setUp() From cfa19740ba27dc5d3717325c67dcf8996f87139e Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 11:52:55 +0100 Subject: [PATCH 22/33] Make combine_cubes work with plain lists; Make 'combine_cubes' private API. --- lib/iris/__init__.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 5255c5cc46..27689511f2 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -124,7 +124,6 @@ def callback(cube, field, filename): "LOAD_POLICY", "LoadPolicy", "NameConstraint", - "combine_cubes", "load", "load_cube", "load_cubes", @@ -345,7 +344,7 @@ class LoadPolicy(threading.local): * ``merge_concat_sequence`` = "m" / "c" / "cm" / "mc" Specifies whether to merge, or concatenate, or both in either order. - This is the :func:`~iris.combine_cubes` operation to loaded data. + This is the "combine" operation which is applied to loaded data. * ``repeat_until_unchanged`` = True / False When enabled, the configured "combine" operation will be repeated until the @@ -555,7 +554,7 @@ def context(self, settings=None, **kwargs): # TODO: resolve tests as needed, to pass with "default". -def combine_cubes(cubes, options=None, merge_require_unique=False): +def _combine_cubes(cubes, options, merge_require_unique): """Combine cubes as for load, according to "loading policy" options. Applies :meth:`~iris.cube.CubeList.merge`/:meth:`~iris.cube.CubeList.concatenate` @@ -565,7 +564,7 @@ def combine_cubes(cubes, options=None, merge_require_unique=False): ---------- cubes : list of :class:`~iris.cube.Cube` A list of cubes to combine. - options : dict or str + options : dict Settings, as described for :meth:`iris.LOAD_POLICY.set`. Defaults to current :meth:`iris.LOAD_POLICY.settings`. merge_require_unique : bool @@ -573,15 +572,24 @@ def combine_cubes(cubes, options=None, merge_require_unique=False): Returns ------- - list of :class:`~iris.cube.Cube` + :class:`~iris.cube.CubeList` .. Note:: The ``support_multiple_references`` keyword/property has no effect on the - :func:`combine_cubes` operation : it only takes effect during a load operation. + :func:`_combine_cubes` operation : it only takes effect during a load operation. + + Notes + ----- + TODO: make this public API in future. + At that point, change the API to support (options=None, **kwargs) + add testing of + those modes (notably arg type = None / str / dict). """ - if not options: - options = LOAD_POLICY.settings() + from iris.cube import CubeList + + if not isinstance(cubes, CubeList): + cubes = CubeList(cubes) + while True: n_original_cubes = len(cubes) sequence = options["merge_concat_sequence"] @@ -604,7 +612,7 @@ def combine_cubes(cubes, options=None, merge_require_unique=False): def _combine_load_cubes(cubes, merge_require_unique=False): - # A special version to call combine_cubes while also implementing the + # A special version to call _combine_cubes while also implementing the # _MULTIREF_DETECTION behaviour options = LOAD_POLICY.settings() if ( @@ -617,7 +625,7 @@ def _combine_load_cubes(cubes, merge_require_unique=False): if _MULTIREF_DETECTION.found_multiple_refs: options["merge_concat_sequence"] += "c" - return combine_cubes(cubes, options, merge_require_unique=merge_require_unique) + return _combine_cubes(cubes, options, merge_require_unique=merge_require_unique) def load(uris, constraints=None, callback=None, policy=None): From da85529d29443caa319fdad65eb0c760a80d6d29 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 12:44:02 +0100 Subject: [PATCH 23/33] Add tests for combine_cubes. --- lib/iris/tests/unit/test_combine_cubes.py | 89 +++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 lib/iris/tests/unit/test_combine_cubes.py diff --git a/lib/iris/tests/unit/test_combine_cubes.py b/lib/iris/tests/unit/test_combine_cubes.py new file mode 100644 index 0000000000..e159582497 --- /dev/null +++ b/lib/iris/tests/unit/test_combine_cubes.py @@ -0,0 +1,89 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for the :func:`iris.io.loading.combine_cubes` function. + +Note: These tests are fairly extensive to cover functional uses within the loading +operations. +TODO: when function is public API, extend testing to the extended API options, +i.e. different types + defaulting of the 'options' arg, and **kwargs support. +""" + +import pytest + +from iris import LoadPolicy, _combine_cubes +from iris.tests.unit.fileformats.test_load_functions import cu + + +@pytest.fixture(params=list(LoadPolicy.SETTINGS.keys())) +def options(request): + # N.B. "request" is a standard PyTest fixture + return request.param # Return the name of the attribute to test. + + +# Interface to convert settings-name / kwargs into an options dict, +# TODO: remove this wrapper when the API of "combine_cubes" is opened up. +def combine_cubes(cubes, settings_name="default", **kwargs): + options = LoadPolicy.SETTINGS[settings_name] + options.update(kwargs) + return _combine_cubes(cubes, options, merge_require_unique=False) + + +class Test: + def test_mergeable(self, options): + c1, c2 = cu(t=1), cu(t=2) + c12 = cu(t=(1, 2)) + input_cubes = [c1, c2] + result = combine_cubes(input_cubes, options) + expected = [c12] # same in all cases + assert result == expected + + def test_catable(self, options): + c1, c2 = cu(t=(1, 2)), cu(t=(3, 4)) + c12 = cu(t=(1, 2, 3, 4)) + input_cubes = [c1, c2] + result = combine_cubes(input_cubes, options) + expected = { + "legacy": [c1, c2], # standard options can't do this .. + "default": [c1, c2], + "recommended": [c12], # .. but it works if you enable concatenate + "comprehensive": [c12], + }[options] + assert result == expected + + def test_cat_enables_merge(self, options): + c1, c2 = cu(t=(1, 2), z=1), cu(t=(3, 4, 5), z=1) + c3, c4 = cu(t=(1, 2, 3), z=2), cu(t=(4, 5), z=2) + c1234 = cu(t=(1, 2, 3, 4, 5), z=(1, 2)) + c12 = cu(t=(1, 2, 3, 4, 5), z=1) + c34 = cu(t=(1, 2, 3, 4, 5), z=2) + input_cubes = [c1, c2, c3, c4] + result = combine_cubes(input_cubes, options) + expected = { + "legacy": input_cubes, + "default": input_cubes, + "recommended": [c12, c34], # standard "mc" sequence can't do this one.. + "comprehensive": [c1234], # .. but works if you repeat + }[options] + assert result == expected + + def test_cat_enables_merge__custom(self): + c1, c2 = cu(t=(1, 2), z=1), cu(t=(3, 4, 5), z=1) + c3, c4 = cu(t=(1, 2, 3), z=2), cu(t=(4, 5), z=2) + c1234 = cu(t=(1, 2, 3, 4, 5), z=(1, 2)) + input_cubes = [c1, c2, c3, c4] + result = combine_cubes(input_cubes, merge_concat_sequence="cm") + assert result == [c1234] + + def test_nocombine_overlapping(self, options): + c1, c2 = cu(t=(1, 3)), cu(t=(2, 4)) + input_cubes = [c1, c2] + result = combine_cubes(input_cubes, options) + assert result == input_cubes # same in all cases : can't do this + + def test_nocombine_dim_scalar(self, options): + c1, c2 = cu(t=(1,)), cu(t=2) + input_cubes = [c1, c2] + result = combine_cubes(input_cubes, options) + assert result == input_cubes # can't do this at present From 80efa2dac51cf5ccef8d3950fcad7c2e97d96ce2 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 14:21:56 +0100 Subject: [PATCH 24/33] Add tests for LoadPolicy API. --- lib/iris/__init__.py | 6 +- lib/iris/tests/unit/test_LoadPolicy.py | 144 +++++++++++++++++++++++++ 2 files changed, 147 insertions(+), 3 deletions(-) create mode 100644 lib/iris/tests/unit/test_LoadPolicy.py diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 27689511f2..aef02ee88a 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -477,10 +477,10 @@ def set(self, options: str | dict | None = None, **kwargs): options = self.SETTINGS[options] elif not isinstance(options, Mapping): msg = ( - f"Invalid arg options='{options!r}' : " - f"must be a dict, or one of {self.SETTINGS.keys()}" + f"Invalid arg options={options!r} : " + f"must be a dict, or one of {tuple(self.SETTINGS.keys())}" ) - raise ValueError(msg) + raise TypeError(msg) # Override any options with keywords options.update(**kwargs) diff --git a/lib/iris/tests/unit/test_LoadPolicy.py b/lib/iris/tests/unit/test_LoadPolicy.py new file mode 100644 index 0000000000..8772b089c1 --- /dev/null +++ b/lib/iris/tests/unit/test_LoadPolicy.py @@ -0,0 +1,144 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for the :mod:`iris.io.loading.LoadPolicy` package.""" + +from unittest import mock + +import pytest + +from iris import LoadPolicy + + +class TestInit: + def test_init_empty(self): + # Check how a bare init works + options = LoadPolicy() + assert options.settings() == LoadPolicy.SETTINGS["default"] + + def test_init_args_kwargs(self): + # Check that init with args, kwargs equates to a pair of set() calls. + with mock.patch("iris.LoadPolicy.set") as mock_set: + test_option = mock.sentinel.option + test_kwargs = {"junk": "invalid"} + LoadPolicy(options=test_option, **test_kwargs) + assert mock_set.call_args_list == [ + mock.call("default"), + mock.call(test_option, **test_kwargs), + ] + + +class Test_settings: + """The .settings() returns a dict full of the settings.""" + + def test_settings(self): + options = LoadPolicy() + settings = options.settings() + assert isinstance(settings, dict) + assert tuple(settings.keys()) == LoadPolicy.OPTION_KEYS + for key in LoadPolicy.OPTION_KEYS: + assert settings[key] == getattr(options, key) + + +class Test_set: + """Check the .set(arg, **kwargs) behaviour.""" + + def test_empty(self): + options = LoadPolicy() + orig_settings = options.settings() + options.set() + assert options.settings() == orig_settings + + def test_arg_dict(self): + options = LoadPolicy() + assert options.settings()["merge_concat_sequence"] == "m" + assert options.settings()["repeat_until_unchanged"] is False + options.set({"merge_concat_sequence": "c", "repeat_until_unchanged": True}) + assert options.settings()["merge_concat_sequence"] == "c" + assert options.settings()["repeat_until_unchanged"] is True + + def test_arg_string(self): + options = LoadPolicy() + assert options.settings()["merge_concat_sequence"] == "m" + assert options.settings()["repeat_until_unchanged"] is False + options.set("comprehensive") + assert options.settings()["merge_concat_sequence"] == "mc" + assert options.settings()["repeat_until_unchanged"] is True + + def test_arg_bad_dict(self): + options = LoadPolicy() + expected = "Unknown options.*'junk'.* : valid options are" + with pytest.raises(ValueError, match=expected): + options.set({"junk": "invalid"}) + + def test_arg_bad_string(self): + options = LoadPolicy() + expected = "Invalid arg options='unknown' : must be a dict, or one of" + with pytest.raises(TypeError, match=expected): + options.set("unknown") + + def test_arg_bad_type(self): + options = LoadPolicy() + expected = "must be a dict, or one of" + with pytest.raises(TypeError, match=expected): + options.set((1, 2, 3)) + + def test_kwargs(self): + options = LoadPolicy() + assert options.settings()["merge_concat_sequence"] == "m" + assert options.settings()["repeat_until_unchanged"] is False + options.set(merge_concat_sequence="c", repeat_until_unchanged=True) + assert options.settings()["merge_concat_sequence"] == "c" + assert options.settings()["repeat_until_unchanged"] is True + + def test_arg_kwargs(self): + # Show that kwargs override arg + options = LoadPolicy( + support_multiple_references=False, + merge_concat_sequence="", + repeat_until_unchanged=False, + ) + options.set( + dict(merge_concat_sequence="c", repeat_until_unchanged=True), + merge_concat_sequence="mc", + ) + assert options.merge_concat_sequence == "mc" + assert options.repeat_until_unchanged is True + + def test_bad_kwarg(self): + options = LoadPolicy() + expected = "Unknown options.*'junk'.* : valid options are" + with pytest.raises(ValueError, match=expected): + options.set({"junk": "invalid"}) + + +class Test_AttributeAccess: + """Check operation of direct property access (with ".").""" + + def test_getattr(self): + options = LoadPolicy(merge_concat_sequence="m") + assert options.merge_concat_sequence == "m" + + def test_getattr_badname(self): + options = LoadPolicy() + expected = "'LoadPolicy' object has no attribute 'unknown'" + with pytest.raises(AttributeError, match=expected): + options.unknown + + def test_setattr(self): + options = LoadPolicy(merge_concat_sequence="m") + options.merge_concat_sequence = "mc" + assert options.merge_concat_sequence == "mc" + + def test_setattr_badname(self): + options = LoadPolicy() + expected = "LoadPolicy object has no property 'anyold_property'" + with pytest.raises(KeyError, match=expected): + options.anyold_property = "x" + + def test_setattr_badvalue(self): + options = LoadPolicy() + expected = "'mcm' is not a valid.*merge_concat_sequence : must be one of" + with pytest.raises(ValueError, match=expected): + options.merge_concat_sequence = "mcm" From cb1ddff49e042705c221476137b81ee88033de9a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 15:57:25 +0100 Subject: [PATCH 25/33] Add special split-netcdf roundtrip testing. --- .../test_roundtrip_time_varying_references.py | 67 ++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py index 3398013ff1..0ad4b5a941 100644 --- a/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py +++ b/lib/iris/tests/integration/varying_references/test_roundtrip_time_varying_references.py @@ -22,6 +22,7 @@ from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList from iris.fileformats.pp import EARTH_RADIUS, STASH +from iris.util import new_axis try: import iris_grib @@ -239,7 +240,7 @@ def zcoord_type(request): return request.param -@pytest.fixture(params=["default_policy", "recommended_policy", "legacy_policy"]) +@pytest.fixture(params=[f"{name}_policy" for name in LOAD_POLICY.SETTINGS]) def load_policy(request): return request.param @@ -274,3 +275,67 @@ def test_roundtrip(file_extension, time_dependence, zcoord_type, load_policy, tm time_dependence=time_dependence, zcoord_type=zcoord_type, ) + + +def test_split_netcdf_roundtrip(zcoord_type, load_policy, tmp_path): + # NetCDF special test : split the data into 2D slices (like "fields"), + # and save each to a different file. + policy_name = load_policy.split("_")[0] + reference_surface_name = { + "pressure": "surface_air_pressure", + "height": "surface_altitude", + }[zcoord_type] + + data = make_hybrid_z_testdata( + hybrid_zcoord_type=zcoord_type, + include_reference_as_cube=False, + make_reference_time_dependent=True, + ) + + # There is just 1 cube + (data,) = data # just 1 cube for netcdf, no separate reference cube + # split it into 2D YX "field" cubes + field_cubes = list(data.slices(("latitude", "longitude"))) + # Reinstate a length-1 "time" dimension in each cube. + field_cubes = [ + new_axis(field_cube, "time", expand_extras=[reference_surface_name]) + for field_cube in field_cubes + ] + # Save to 1 file per 'field_cube' + result_paths = [ + tmp_path / f"field_{i_field:02d}.nc" for i_field in range(len(field_cubes)) + ] + for field_cube, path in zip(field_cubes, result_paths): + iris.save(field_cube, path) + + # load back with the chosen policy. + with LOAD_POLICY.context(policy_name): + readback = iris.load(result_paths) + + n_cubes = len(readback) + n_datacubes = len(readback.extract("air_temperature")) + if policy_name == "legacy": + assert (n_cubes, n_datacubes) == (15, 3) + elif policy_name == "default": + assert (n_cubes, n_datacubes) == (15, 3) + elif policy_name == "recommended": + assert (n_cubes, n_datacubes) == (5, 1) + elif policy_name == "comprehensive": + assert (n_cubes, n_datacubes) == (5, 1) + else: + raise ValueError(f"unknown policy {policy_name!r}") + + if n_datacubes == 1: + check_expected( + CubeList( + [ + readback.extract_cube("air_temperature"), + # include only 1 of N (identical) reference cubes + # (all this would be easier if we could rely on load-cube ordering!) + readback.extract(reference_surface_name)[0], + ] + ), + file_extension=file_extension, + time_dependence=time_dependence, + zcoord_type=zcoord_type, + ) From cb7fc0623b2576c69896e112ec0c1e4a4e9c8ae1 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 16:27:46 +0100 Subject: [PATCH 26/33] Removed unwanted 'policy' keyword from iris.load . --- lib/iris/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index aef02ee88a..fb92981250 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -628,7 +628,7 @@ def _combine_load_cubes(cubes, merge_require_unique=False): return _combine_cubes(cubes, options, merge_require_unique=merge_require_unique) -def load(uris, constraints=None, callback=None, policy=None): +def load(uris, constraints=None, callback=None): """Load any number of Cubes for each constraint. For a full description of the arguments, please see the module From 54fe96d39e0619b8c7cecdb3c76680734e9c20c4 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 16:33:03 +0100 Subject: [PATCH 27/33] Make LoadPolicy examples more consistent. --- lib/iris/__init__.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index fb92981250..23495875ae 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -523,17 +523,16 @@ def context(self, settings=None, **kwargs): >>> path = sample_data_path("time_varying_hybrid_height", "*.pp") >>> with LOAD_POLICY.context("legacy"): - ... cubes = iris.load(path) + ... cubes = iris.load(path, "x_wind") >>> print(cubes) - 0: surface_altitude / (m) (time: 15; latitude: 144; longitude: 192) - 1: x_wind / (m s-1) (time: 2; model_level_number: 5; latitude: 144; longitude: 192) - 2: x_wind / (m s-1) (time: 12; model_level_number: 5; latitude: 144; longitude: 192) - 3: x_wind / (m s-1) (model_level_number: 5; latitude: 144; longitude: 192) - + 0: x_wind / (m s-1) (time: 2; model_level_number: 5; latitude: 144; longitude: 192) + 1: x_wind / (m s-1) (time: 12; model_level_number: 5; latitude: 144; longitude: 192) + 2: x_wind / (m s-1) (model_level_number: 5; latitude: 144; longitude: 192) + >>> >>> with LOAD_POLICY.context("recommended"): - ... cube = iris.load_cube(path, "x_wind") - >>> cube - + ... cubes = iris.load(path, "x_wind") + >>> cubes + 0: x_wind / (m s-1) (model_level_number: 5; time: 15; latitude: 144; longitude: 192) """ # Save the current state saved_settings = self.settings() From 03f0d2a5b9fd4bd7242a3f616fcff76a7faa4f0f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 17:09:37 +0100 Subject: [PATCH 28/33] Review changes : documentation improvements. --- lib/iris/__init__.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 23495875ae..c0d9f29558 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -317,12 +317,22 @@ def _load_collection(uris, constraints=None, callback=None): class LoadPolicy(threading.local): """A container for loading strategy options. - Controls merge/concatenate usage, and the handling of cases where multiple reference - fields merge to define an additional dimension (e.g. a time-varying orography). + Controls merge/concatenate usage during loading. + + Also controls the detection and handling of cases where a hybrid coordinate + uses multiple reference fields : for example, a UM file which contains a series of + fields describing time-varying orography. Options can be set directly, or via :meth:`~iris.LoadPolicy.set`, or changed for the scope of a code block with :meth:`~iris.LoadPolicy.context`. + .. note :: + + The default behaviour will "fix" loading for cases like the one just described. + However this is not strictly backwards-compatible. If this causes problems, + you can force identical loading behaviour to earlier Iris versions with + ``LOAD_POLICY.set("legacy")`` or equivalent. + .. testsetup:: from iris import LOAD_POLICY @@ -546,7 +556,7 @@ def context(self, settings=None, **kwargs): self.set(saved_settings) -#: Object containing file loading options. +#: A control object containing the current file loading options. LOAD_POLICY = LoadPolicy("legacy") # The unique (singleton) policy object # N.B. FOR NOW, our starting point is "legacy" rather than "default" From c30ab2f0202706110c21c5c2befbfc435317249c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 17:52:15 +0100 Subject: [PATCH 29/33] Doctest fix --- lib/iris/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index c0d9f29558..67c6accc1b 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -541,7 +541,7 @@ def context(self, settings=None, **kwargs): >>> >>> with LOAD_POLICY.context("recommended"): ... cubes = iris.load(path, "x_wind") - >>> cubes + >>> print(cubes) 0: x_wind / (m s-1) (model_level_number: 5; time: 15; latitude: 144; longitude: 192) """ # Save the current state From e1de23cf75598ad6fc345ed2820ac7d910831175 Mon Sep 17 00:00:00 2001 From: "stephen.worsley" Date: Thu, 24 Oct 2024 17:56:31 +0100 Subject: [PATCH 30/33] fix controlling_merge docs --- docs/src/further_topics/controlling_merge.rst | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/docs/src/further_topics/controlling_merge.rst b/docs/src/further_topics/controlling_merge.rst index 6f223048ea..85fe46e745 100644 --- a/docs/src/further_topics/controlling_merge.rst +++ b/docs/src/further_topics/controlling_merge.rst @@ -4,6 +4,21 @@ Controlling Merge and Concatenate ================================= +Preliminaries +------------- + +The following code would have been necessary with loading behaviour prior to version 3.11.0 . For the sake of +demonstration, we will revert back to this legacy loading behaviour as follows: + + >>> iris.LOAD_POLICY.set("legacy") + +.. note:: + The default settings for :data:`iris.LOAD_POLICY` effectively implements some version of the following demonstration + automatically upon loading. It may still be worth being aware of how to handle this manually if an even finer degree + of control is required. + +How to Merge Cubes When Coordinates Differ +------------------------------------------ Sometimes it is not possible to appropriately combine a CubeList using merge and concatenate on their own. In such cases it is possible to achieve much more control over cube combination by using the :func:`~iris.util.new_axis` utility. @@ -202,7 +217,6 @@ Once merged, we can now concatenate all these cubes into a single result cube, w See Also -------- -* :func:`iris.combine_cubes` can perform similar operations automatically -* :data:`iris.LOAD_POLICY` controls the application of :func:`~iris.combine_cubes` - during the load operations, i.e. :func:`~iris.load`, :func:`~iris.load_cube` and +* :data:`iris.LOAD_POLICY` can be controlled to apply similar operations + within the load functions, i.e. :func:`~iris.load`, :func:`~iris.load_cube` and :func:`~iris.load_cubes`. From 8a36d8839d0bb4070437021dce3df4e4ac357655 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 17:35:46 +0100 Subject: [PATCH 31/33] LOAD_POLICY uses 'default' settings by default. --- lib/iris/__init__.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 67c6accc1b..d4454efe89 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -557,10 +557,7 @@ def context(self, settings=None, **kwargs): #: A control object containing the current file loading options. -LOAD_POLICY = LoadPolicy("legacy") -# The unique (singleton) policy object -# N.B. FOR NOW, our starting point is "legacy" rather than "default" -# TODO: resolve tests as needed, to pass with "default". +LOAD_POLICY = LoadPolicy() def _combine_cubes(cubes, options, merge_require_unique): From 3eba834fea937f45d15a59b0aba4ff77311f3ad6 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 18:25:31 +0100 Subject: [PATCH 32/33] Various quick fixes to legacy tests. --- lib/iris/tests/integration/test_trajectory.py | 5 +- .../results/cube_io/pickling/cubelist.cml | 2690 +++++++++++++++-- .../results/cube_io/pickling/single_cube.cml | 2690 +++++++++++++++-- lib/iris/tests/test_plot.py | 3 + lib/iris/tests/test_quickplot.py | 3 + 5 files changed, 4732 insertions(+), 659 deletions(-) diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py index aa4ce67a3b..f56970f9fa 100644 --- a/lib/iris/tests/integration/test_trajectory.py +++ b/lib/iris/tests/integration/test_trajectory.py @@ -11,6 +11,7 @@ import numpy as np import iris +from iris import LOAD_POLICY from iris._lazy_data import as_lazy_data from iris.analysis.trajectory import Trajectory from iris.analysis.trajectory import interpolate as traj_interpolate @@ -22,7 +23,9 @@ class TestColpex(tests.IrisTest): def setUp(self): # Load the COLPEX data => TZYX path = tests.get_data_path(["PP", "COLPEX", "theta_and_orog_subset.pp"]) - cube = iris.load_cube(path, "air_potential_temperature") + # Fix to ignore time-varying orography, for the purposes of these tests + with LOAD_POLICY.context(support_multiple_references=False): + cube = iris.load_cube(path, "air_potential_temperature") cube.coord("grid_latitude").bounds = None cube.coord("grid_longitude").bounds = None # TODO: Workaround until regrid can handle factories diff --git a/lib/iris/tests/results/cube_io/pickling/cubelist.cml b/lib/iris/tests/results/cube_io/pickling/cubelist.cml index c52486b1d0..6510b200e6 100644 --- a/lib/iris/tests/results/cube_io/pickling/cubelist.cml +++ b/lib/iris/tests/results/cube_io/pickling/cubelist.cml @@ -7,398 +7,2360 @@ - - + + + [[[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]]], + + + [[[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]]], + + + [[[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]]]]" shape="(70, 6, 100, 100)" standard_name="altitude" units="Unit('m')" value_type="float32"> @@ -407,7 +2369,7 @@ - + @@ -436,7 +2398,7 @@ - + + + - - + + @@ -531,14 +2563,14 @@ - + - + diff --git a/lib/iris/tests/results/cube_io/pickling/single_cube.cml b/lib/iris/tests/results/cube_io/pickling/single_cube.cml index eb3e9d0112..aead0825ea 100644 --- a/lib/iris/tests/results/cube_io/pickling/single_cube.cml +++ b/lib/iris/tests/results/cube_io/pickling/single_cube.cml @@ -7,398 +7,2360 @@ - - + + + [[[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]], + + [[32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + ..., + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ], + [32192.732 , 32192.732 , 32192.732 , ..., + 32192.732 , 32192.732 , 32192.732 ]]], + + + [[[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]], + + [[35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + ..., + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ], + [35845.004 , 35845.004 , 35845.004 , ..., + 35845.004 , 35845.004 , 35845.004 ]]], + + + [[[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]], + + [[40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + ..., + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ], + [40000. , 40000. , 40000. , ..., + 40000. , 40000. , 40000. ]]]]" shape="(70, 6, 100, 100)" standard_name="altitude" units="Unit('m')" value_type="float32"> @@ -407,7 +2369,7 @@ - + @@ -436,7 +2398,7 @@ - + + + - - + + @@ -531,13 +2563,13 @@ - + - + diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 50773f0d24..b263313b90 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -821,6 +821,9 @@ def setUp(self): super().setUp() filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) self.cube = load_cube_once(filename, "air_potential_temperature") + if self.cube.coord_dims("time") != (0,): + # A quick fix for data which has changed since we support time-varying orography + self.cube.transpose((1, 0, 2, 3)) self.draw_module = iris.plot self.contourf = LambdaStr( diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index 8469aa0776..25bd8904a7 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -49,6 +49,9 @@ def setUp(self): tests.GraphicsTest.setUp(self) filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) self.cube = test_plot.load_cube_once(filename, "air_potential_temperature") + if self.cube.coord_dims("time") != (0,): + # A quick fix for data which has changed since we support time-varying orography + self.cube.transpose((1, 0, 2, 3)) self.draw_module = iris.quickplot self.contourf = test_plot.LambdaStr( From e5b4ce646ca33bda30814ceb717cabd01c74d21f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 24 Oct 2024 17:51:16 +0100 Subject: [PATCH 33/33] Added whatsnew. --- docs/src/whatsnew/latest.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 58e3f5f956..cb1ccdf348 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -45,6 +45,10 @@ This document explains the changes made to Iris for this release Requested in :issue:`5970`, actioned in :pull:`6169`. +#. `@pp-mo`_ and `@stephenworsley`_ added support for hybrid coordinates whose + references are split across multiple input fields, and :meth:`~iris.LOAD_POLICY` to + control it, as requested in :issue:`5369`, actioned in :pull:`6168`. + 🐛 Bugs Fixed =============